Commit 4113cf64 authored by Manos Koukoutos's avatar Manos Koukoutos Committed by V8 LUCI CQ

[wasm][turbofan] Immutable object operators

Design doc: bit.ly/36MfD6Y

We introduce simplified operators LoadImmutableFromObject and
InitializeImmutableInObject. These are lowered to Loads and Stores like
LoadFromObject and StoreToObject.
We split CsaLoadElimination::AbstractState in two HalfStates,
which represent the mutable and immutable component of the state.
Immutable operators in the effect chain modify the immutable half-state,
and plain operators modify the mutable half-state. The immutable part is
maintained through write effects and loop headers. Immutable
initializations do not lookup and kill previous overlapping stores,
assuming each offset cannot be initialized more than once.

Bug: v8:11510

Change-Id: I0f5feca3354fdd3bdc1f511cc5214ec51e1407ad
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3268728Reviewed-by: 's avatarNico Hartmann <nicohartmann@chromium.org>
Reviewed-by: 's avatarJakob Kummerow <jkummerow@chromium.org>
Commit-Queue: Manos Koukoutos <manoskouk@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78325}
parent 002e39e9
This diff is collapsed.
......@@ -62,9 +62,9 @@ class V8_EXPORT_PRIVATE CsaLoadElimination final
};
// Design doc: https://bit.ly/36MfD6Y
class AbstractState final : public ZoneObject {
class HalfState final : public ZoneObject {
public:
explicit AbstractState(Zone* zone)
explicit HalfState(Zone* zone)
: zone_(zone),
fresh_entries_(zone, InnerMap(zone)),
constant_entries_(zone, InnerMap(zone)),
......@@ -73,7 +73,7 @@ class V8_EXPORT_PRIVATE CsaLoadElimination final
constant_unknown_entries_(zone, InnerMap(zone)),
arbitrary_unknown_entries_(zone, InnerMap(zone)) {}
bool Equals(AbstractState const* that) const {
bool Equals(HalfState const* that) const {
return fresh_entries_ == that->fresh_entries_ &&
constant_entries_ == that->constant_entries_ &&
arbitrary_entries_ == that->arbitrary_entries_ &&
......@@ -81,33 +81,22 @@ class V8_EXPORT_PRIVATE CsaLoadElimination final
constant_unknown_entries_ == that->constant_unknown_entries_ &&
arbitrary_unknown_entries_ == that->arbitrary_unknown_entries_;
}
void IntersectWith(AbstractState const* that);
AbstractState const* KillField(Node* object, Node* offset,
MachineRepresentation repr) const;
AbstractState const* AddField(Node* object, Node* offset, Node* value,
MachineRepresentation repr) const;
void IntersectWith(HalfState const* that);
HalfState const* KillField(Node* object, Node* offset,
MachineRepresentation repr) const;
HalfState const* AddField(Node* object, Node* offset, Node* value,
MachineRepresentation repr) const;
FieldInfo Lookup(Node* object, Node* offset) const;
void Print() const;
private:
Zone* zone_;
using InnerMap = PersistentMap<Node*, FieldInfo>;
template <typename OuterKey>
using OuterMap = PersistentMap<OuterKey, InnerMap>;
// offset -> object -> info
using ConstantOffsetInfos = OuterMap<uint32_t>;
ConstantOffsetInfos fresh_entries_;
ConstantOffsetInfos constant_entries_;
ConstantOffsetInfos arbitrary_entries_;
// object -> offset -> info
using UnknownOffsetInfos = OuterMap<Node*>;
UnknownOffsetInfos fresh_unknown_entries_;
UnknownOffsetInfos constant_unknown_entries_;
UnknownOffsetInfos arbitrary_unknown_entries_;
// Update {map} so that {map.Get(outer_key).Get(inner_key)} returns {info}.
template <typename OuterKey>
......@@ -123,12 +112,43 @@ class V8_EXPORT_PRIVATE CsaLoadElimination final
MachineRepresentation repr, Zone* zone);
void KillOffsetInFresh(Node* object, uint32_t offset,
MachineRepresentation repr);
template <typename OuterKey>
static void IntersectWith(OuterMap<OuterKey>& to,
const OuterMap<OuterKey>& from);
static void Print(const ConstantOffsetInfos& infos);
static void Print(const UnknownOffsetInfos& infos);
Zone* zone_;
ConstantOffsetInfos fresh_entries_;
ConstantOffsetInfos constant_entries_;
ConstantOffsetInfos arbitrary_entries_;
UnknownOffsetInfos fresh_unknown_entries_;
UnknownOffsetInfos constant_unknown_entries_;
UnknownOffsetInfos arbitrary_unknown_entries_;
};
// An {AbstractState} consists of two {HalfState}s, representing the mutable
// and immutable sets of known fields, respectively. These sets correspond to
// LoadFromObject/StoreToObject and LoadImmutableFromObject/
// InitializeImmutableInObject respectively. The two half-states should not
// overlap.
struct AbstractState : public ZoneObject {
explicit AbstractState(Zone* zone)
: mutable_state(zone), immutable_state(zone) {}
explicit AbstractState(HalfState mutable_state, HalfState immutable_state)
: mutable_state(mutable_state), immutable_state(immutable_state) {}
bool Equals(AbstractState const* that) const {
return this->immutable_state.Equals(&that->immutable_state) &&
this->mutable_state.Equals(&that->mutable_state);
}
void IntersectWith(AbstractState const* that) {
mutable_state.IntersectWith(&that->mutable_state);
immutable_state.IntersectWith(&that->immutable_state);
}
HalfState mutable_state;
HalfState immutable_state;
};
Reduction ReduceLoadFromObject(Node* node, ObjectAccess const& access);
......
......@@ -268,6 +268,13 @@ void Int64Lowering::LowerNode(Node* node) {
MachineType::Int32(), access.write_barrier_kind)));
break;
}
case IrOpcode::kLoadImmutableFromObject: {
ObjectAccess access = ObjectAccessOf(node->op());
LowerLoadOperator(node, access.machine_type.representation(),
simplified()->LoadImmutableFromObject(ObjectAccess(
MachineType::Int32(), access.write_barrier_kind)));
break;
}
case IrOpcode::kStore: {
StoreRepresentation store_rep = StoreRepresentationOf(node->op());
LowerStoreOperator(
......@@ -291,6 +298,13 @@ void Int64Lowering::LowerNode(Node* node) {
MachineType::Int32(), access.write_barrier_kind)));
break;
}
case IrOpcode::kInitializeImmutableInObject: {
ObjectAccess access = ObjectAccessOf(node->op());
LowerStoreOperator(node, access.machine_type.representation(),
simplified()->InitializeImmutableInObject(ObjectAccess(
MachineType::Int32(), access.write_barrier_kind)));
break;
}
case IrOpcode::kStart: {
int parameter_count = GetParameterCountAfterLowering(signature());
// Only exchange the node if the parameter count actually changed.
......
......@@ -84,12 +84,14 @@ Reduction MemoryLowering::Reduce(Node* node) {
case IrOpcode::kAllocateRaw:
return ReduceAllocateRaw(node);
case IrOpcode::kLoadFromObject:
case IrOpcode::kLoadImmutableFromObject:
return ReduceLoadFromObject(node);
case IrOpcode::kLoadElement:
return ReduceLoadElement(node);
case IrOpcode::kLoadField:
return ReduceLoadField(node);
case IrOpcode::kStoreToObject:
case IrOpcode::kInitializeImmutableInObject:
return ReduceStoreToObject(node);
case IrOpcode::kStoreElement:
return ReduceStoreElement(node);
......@@ -372,7 +374,8 @@ Reduction MemoryLowering::ReduceAllocateRaw(
}
Reduction MemoryLowering::ReduceLoadFromObject(Node* node) {
DCHECK_EQ(IrOpcode::kLoadFromObject, node->opcode());
DCHECK(node->opcode() == IrOpcode::kLoadFromObject ||
node->opcode() == IrOpcode::kLoadImmutableFromObject);
ObjectAccess const& access = ObjectAccessOf(node->op());
MachineType machine_type = access.machine_type;
......@@ -492,7 +495,8 @@ Reduction MemoryLowering::ReduceLoadField(Node* node) {
Reduction MemoryLowering::ReduceStoreToObject(Node* node,
AllocationState const* state) {
DCHECK_EQ(IrOpcode::kStoreToObject, node->opcode());
DCHECK(node->opcode() == IrOpcode::kStoreToObject ||
node->opcode() == IrOpcode::kInitializeImmutableInObject);
ObjectAccess const& access = ObjectAccessOf(node->op());
Node* object = node->InputAt(0);
Node* value = node->InputAt(2);
......
......@@ -37,6 +37,7 @@ bool CanAllocate(const Node* node) {
case IrOpcode::kLoadElement:
case IrOpcode::kLoadField:
case IrOpcode::kLoadFromObject:
case IrOpcode::kLoadImmutableFromObject:
case IrOpcode::kLoadLane:
case IrOpcode::kLoadTransform:
case IrOpcode::kMemoryBarrier:
......@@ -53,6 +54,7 @@ bool CanAllocate(const Node* node) {
case IrOpcode::kStoreField:
case IrOpcode::kStoreLane:
case IrOpcode::kStoreToObject:
case IrOpcode::kInitializeImmutableInObject:
case IrOpcode::kUnalignedLoad:
case IrOpcode::kUnalignedStore:
case IrOpcode::kUnreachable:
......@@ -217,12 +219,14 @@ void MemoryOptimizer::VisitNode(Node* node, AllocationState const* state) {
case IrOpcode::kCall:
return VisitCall(node, state);
case IrOpcode::kLoadFromObject:
case IrOpcode::kLoadImmutableFromObject:
return VisitLoadFromObject(node, state);
case IrOpcode::kLoadElement:
return VisitLoadElement(node, state);
case IrOpcode::kLoadField:
return VisitLoadField(node, state);
case IrOpcode::kStoreToObject:
case IrOpcode::kInitializeImmutableInObject:
return VisitStoreToObject(node, state);
case IrOpcode::kStoreElement:
return VisitStoreElement(node, state);
......@@ -306,7 +310,8 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
void MemoryOptimizer::VisitLoadFromObject(Node* node,
AllocationState const* state) {
DCHECK_EQ(IrOpcode::kLoadFromObject, node->opcode());
DCHECK(node->opcode() == IrOpcode::kLoadFromObject ||
node->opcode() == IrOpcode::kLoadImmutableFromObject);
Reduction reduction = memory_lowering()->ReduceLoadFromObject(node);
EnqueueUses(node, state);
if (V8_MAP_PACKING_BOOL && reduction.replacement() != node) {
......@@ -316,7 +321,8 @@ void MemoryOptimizer::VisitLoadFromObject(Node* node,
void MemoryOptimizer::VisitStoreToObject(Node* node,
AllocationState const* state) {
DCHECK_EQ(IrOpcode::kStoreToObject, node->opcode());
DCHECK(node->opcode() == IrOpcode::kStoreToObject ||
node->opcode() == IrOpcode::kInitializeImmutableInObject);
memory_lowering()->ReduceStoreToObject(node, state);
EnqueueUses(node, state);
}
......
......@@ -426,11 +426,13 @@
V(FastApiCall) \
V(FindOrderedHashMapEntry) \
V(FindOrderedHashMapEntryForInt32Key) \
V(InitializeImmutableInObject) \
V(LoadDataViewElement) \
V(LoadElement) \
V(LoadField) \
V(LoadFieldByIndex) \
V(LoadFromObject) \
V(LoadImmutableFromObject) \
V(LoadMessage) \
V(LoadStackArgument) \
V(LoadTypedElement) \
......
......@@ -160,7 +160,9 @@ const ElementAccess& ElementAccessOf(const Operator* op) {
const ObjectAccess& ObjectAccessOf(const Operator* op) {
DCHECK_NOT_NULL(op);
DCHECK(op->opcode() == IrOpcode::kLoadFromObject ||
op->opcode() == IrOpcode::kStoreToObject);
op->opcode() == IrOpcode::kLoadImmutableFromObject ||
op->opcode() == IrOpcode::kStoreToObject ||
op->opcode() == IrOpcode::kInitializeImmutableInObject);
return OpParameter<ObjectAccess>(op);
}
......@@ -1878,16 +1880,18 @@ const Operator* SimplifiedOperatorBuilder::SpeculativeNumberEqual(
UNREACHABLE();
}
#define ACCESS_OP_LIST(V) \
V(LoadField, FieldAccess, Operator::kNoWrite, 1, 1, 1) \
V(StoreField, FieldAccess, Operator::kNoRead, 2, 1, 0) \
V(LoadElement, ElementAccess, Operator::kNoWrite, 2, 1, 1) \
V(StoreElement, ElementAccess, Operator::kNoRead, 3, 1, 0) \
V(LoadTypedElement, ExternalArrayType, Operator::kNoWrite, 4, 1, 1) \
V(LoadFromObject, ObjectAccess, Operator::kNoWrite, 2, 1, 1) \
V(StoreTypedElement, ExternalArrayType, Operator::kNoRead, 5, 1, 0) \
V(StoreToObject, ObjectAccess, Operator::kNoRead, 3, 1, 0) \
V(LoadDataViewElement, ExternalArrayType, Operator::kNoWrite, 4, 1, 1) \
#define ACCESS_OP_LIST(V) \
V(LoadField, FieldAccess, Operator::kNoWrite, 1, 1, 1) \
V(StoreField, FieldAccess, Operator::kNoRead, 2, 1, 0) \
V(LoadElement, ElementAccess, Operator::kNoWrite, 2, 1, 1) \
V(StoreElement, ElementAccess, Operator::kNoRead, 3, 1, 0) \
V(LoadTypedElement, ExternalArrayType, Operator::kNoWrite, 4, 1, 1) \
V(StoreTypedElement, ExternalArrayType, Operator::kNoRead, 5, 1, 0) \
V(LoadFromObject, ObjectAccess, Operator::kNoWrite, 2, 1, 1) \
V(StoreToObject, ObjectAccess, Operator::kNoRead, 3, 1, 0) \
V(LoadImmutableFromObject, ObjectAccess, Operator::kNoWrite, 2, 1, 1) \
V(InitializeImmutableInObject, ObjectAccess, Operator::kNoRead, 3, 1, 0) \
V(LoadDataViewElement, ExternalArrayType, Operator::kNoWrite, 4, 1, 1) \
V(StoreDataViewElement, ExternalArrayType, Operator::kNoRead, 5, 1, 0)
#define ACCESS(Name, Type, properties, value_input_count, control_input_count, \
......
......@@ -1068,10 +1068,22 @@ class V8_EXPORT_PRIVATE SimplifiedOperatorBuilder final
Type value_type);
// load-from-object [base + offset]
// This operator comes in two flavors: LoadImmutableFromObject guarantees that
// the underlying object field will be initialized at most once for the
// duration of the program. This enables more optimizations in
// CsaLoadElimination.
// Note: LoadImmutableFromObject is unrelated to LoadImmutable and is lowered
// into a regular Load.
const Operator* LoadFromObject(ObjectAccess const&);
const Operator* LoadImmutableFromObject(ObjectAccess const&);
// store-to-object [base + offset], value
// This operator comes in two flavors: InitializeImmutableInObject guarantees
// that the underlying object field has not and will not be initialized again
// for the duration of the program. This enables more optimizations in
// CsaLoadElimination.
const Operator* StoreToObject(ObjectAccess const&);
const Operator* InitializeImmutableInObject(ObjectAccess const&);
// load-typed-element buffer, [base + external + index]
const Operator* LoadTypedElement(ExternalArrayType const&);
......
......@@ -2174,6 +2174,7 @@ Type Typer::Visitor::TypeLoadStackArgument(Node* node) {
}
Type Typer::Visitor::TypeLoadFromObject(Node* node) { UNREACHABLE(); }
Type Typer::Visitor::TypeLoadImmutableFromObject(Node* node) { UNREACHABLE(); }
Type Typer::Visitor::TypeLoadTypedElement(Node* node) {
switch (ExternalArrayTypeOf(node->op())) {
......@@ -2204,6 +2205,9 @@ Type Typer::Visitor::TypeStoreMessage(Node* node) { UNREACHABLE(); }
Type Typer::Visitor::TypeStoreElement(Node* node) { UNREACHABLE(); }
Type Typer::Visitor::TypeStoreToObject(Node* node) { UNREACHABLE(); }
Type Typer::Visitor::TypeInitializeImmutableInObject(Node* node) {
UNREACHABLE();
}
Type Typer::Visitor::TypeTransitionAndStoreElement(Node* node) {
UNREACHABLE();
......
......@@ -1562,6 +1562,7 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) {
// CheckTypeIs(node, ElementAccessOf(node->op()).type));
break;
case IrOpcode::kLoadFromObject:
case IrOpcode::kLoadImmutableFromObject:
CheckValueInputIs(node, 0, Type::Receiver());
break;
case IrOpcode::kLoadTypedElement:
......@@ -1584,6 +1585,7 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) {
CheckNotTyped(node);
break;
case IrOpcode::kStoreToObject:
case IrOpcode::kInitializeImmutableInObject:
// TODO(gsps): Can we check some types here?
break;
case IrOpcode::kTransitionAndStoreElement:
......
This diff is collapsed.
......@@ -29,8 +29,8 @@ Reduction WasmEscapeAnalysis::ReduceAllocateRaw(Node* node) {
for (Edge edge : node->use_edges()) {
if (NodeProperties::IsValueEdge(edge)) {
if (edge.index() != 0 ||
edge.from()->opcode() != IrOpcode::kStoreToObject) {
// The allocated object is used for something other than storing into.
(edge.from()->opcode() != IrOpcode::kStoreToObject &&
edge.from()->opcode() != IrOpcode::kInitializeImmutableInObject)) {
return NoChange();
}
value_edges.push_back(edge);
......@@ -43,7 +43,8 @@ Reduction WasmEscapeAnalysis::ReduceAllocateRaw(Node* node) {
DCHECK_EQ(edge.index(), 0);
Node* use = edge.from();
DCHECK(!use->IsDead());
DCHECK_EQ(use->opcode(), IrOpcode::kStoreToObject);
DCHECK(use->opcode() == IrOpcode::kStoreToObject ||
use->opcode() == IrOpcode::kInitializeImmutableInObject);
// The value stored by this StoreToObject node might be another allocation
// which has no more uses. Therefore we have to revisit it. Note that this
// will not happen automatically: ReplaceWithValue does not trigger revisits
......
......@@ -782,6 +782,11 @@ Map HeapObject::map(PtrComprCageBase cage_base) const {
}
void HeapObject::set_map(Map value) {
#if V8_ENABLE_WEBASSEMBLY
// In {WasmGraphBuilder::SetMap} and {WasmGraphBuilder::LoadMap}, we treat
// maps as immutable. Therefore we are not allowed to mutate them here.
DCHECK(!value.IsWasmStructMap() && !value.IsWasmArrayMap());
#endif
#ifdef VERIFY_HEAP
if (FLAG_verify_heap && !value.is_null()) {
GetHeapFromWritableObject(*this)->VerifyObjectLayoutChange(*this, value);
......
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --experimental-wasm-gc --no-liftoff --experimental-wasm-nn-locals
d8.file.execute("test/mjsunit/wasm/wasm-module-builder.js");
// Test that comparisons with array length in a loop get optimized away.
(function ArrayLoopOptimizationTest() {
var builder = new WasmModuleBuilder();
var array_index = builder.addArray(kWasmI32, true);
// Increase these parameters to measure performance.
let array_size = 10; // 100000000;
let iterations = 1; // 50;
builder.addFunction("array_inc", kSig_v_v)
.addLocals(wasmRefType(array_index), 1)
.addLocals(kWasmI32, 2)
// Locals: 0 -> array, 1 -> length, 2 -> index
.addBody([
...wasmI32Const(array_size),
kExprCallFunction, 1,
kExprLocalSet, 0,
// length = array.length
kExprLocalGet, 0,
kGCPrefix, kExprArrayLen, array_index,
kExprLocalSet, 1,
// while (true) {
kExprLoop, kWasmVoid,
// if (index < length) {
kExprLocalGet, 2,
kExprLocalGet, 1,
kExprI32LtU,
kExprIf, kWasmVoid,
// array[index] = array[index] + 5;
kExprLocalGet, 0,
kExprLocalGet, 2,
kExprLocalGet, 0,
kExprLocalGet, 2,
kGCPrefix, kExprArrayGet, array_index,
kExprI32Const, 5,
kExprI32Add,
kGCPrefix, kExprArraySet, array_index,
// index = index + 1;
kExprLocalGet, 2,
kExprI32Const, 1,
kExprI32Add,
kExprLocalSet, 2,
// continue;
kExprBr, 1,
// }
// break;
kExprEnd,
// }
kExprEnd])
.exportFunc();
builder.addFunction("make_array",
makeSig([kWasmI32], [wasmRefType(array_index)]))
.addBody([kExprLocalGet, 0, kGCPrefix, kExprArrayNewDefault, array_index])
var instance = builder.instantiate({});
let before = Date.now();
for (let i = 0; i < iterations; i++) {
instance.exports.array_inc();
}
let after = Date.now();
print(
"Average of " + iterations + " runs: " +
(after - before)/iterations + "ms");
})();
(function ImmutableLoadThroughEffect() {
var builder = new WasmModuleBuilder();
var struct = builder.addStructSubtype([
makeField(kWasmI32, false), makeField(kWasmI32, true)]);
let effect = builder.addImport('m', 'f', kSig_v_v);
builder.addFunction("main", kSig_i_i)
.addLocals(wasmRefType(struct), 1)
.addBody([
// Initialize an object
kExprLocalGet, 0,
kExprLocalGet, 0, kExprI32Const, 1, kExprI32Add,
kGCPrefix, kExprStructNew, struct,
kExprLocalSet, 1,
// Introduce unknown effect
kExprCallFunction, effect,
// TF should be able to eliminate this load...
kExprLocalGet, 1,
kGCPrefix, kExprStructGet, struct, 0,
// ... but not this one.
kExprLocalGet, 1,
kGCPrefix, kExprStructGet, struct, 1,
kExprI32Add
])
.exportFunc();
var instance = builder.instantiate({m : { f: function () {} }});
assertEquals(85, instance.exports.main(42));
})();
(function FunctionTypeCheckThroughEffect() {
var builder = new WasmModuleBuilder();
var sig = builder.addType(kSig_i_i);
let effect = builder.addImport('m', 'f', kSig_v_v);
builder.addFunction("input", sig)
.addBody([kExprLocalGet, 0])
.exportFunc();
builder.addFunction("main", makeSig([wasmRefType(kWasmFuncRef)], [kWasmI32]))
.addBody([
// Type check the function
kExprLocalGet, 0, kGCPrefix, kExprRttCanon, sig, kGCPrefix, kExprRefCast,
kExprDrop,
// Introduce unknown effect
kExprCallFunction, effect,
// TF should be able to eliminate the second type check, and return the
// constant 1.
kExprLocalGet, 0, kGCPrefix, kExprRttCanon, sig,
kGCPrefix, kExprRefTest])
.exportFunc();
var instance = builder.instantiate({m : { f: function () {} }});
assertEquals(1, instance.exports.main(instance.exports.input));
})();
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment