Commit 863bc2b8 authored by Patrick Thier's avatar Patrick Thier Committed by V8 LUCI CQ

[turbofan] Improve StoreStoreElimination

Previously, StoreStoreElimination handled allocations as
"can observe anything". This is pretty conservative and prohibits
elimination of repeated double stores to the same field.
With this CL allocations are changed to "observes initializing or
transitioning stores".
This way it is guaranteed that initializing stores to a freshly created
object or stores that are part of a map transition are not eliminated
before allocations (that can trigger GC), but allows elimination of
non-initializing, non-transitioning, unobservable stores in the
presence of allocations.

Bug: v8:12200
Change-Id: Ie1419696b9c8cb7c39aecf38d9f08102177b2c0f
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3295449
Commit-Queue: Patrick Thier <pthier@chromium.org>
Reviewed-by: 's avatarTobias Tebbi <tebbi@chromium.org>
Reviewed-by: 's avatarMaya Lekova <mslekova@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78230}
parent 719f9db3
......@@ -342,6 +342,7 @@ class EffectControlLinearizer {
Zone* temp_zone_;
MaintainSchedule maintain_schedule_;
RegionObservability region_observability_ = RegionObservability::kObservable;
bool inside_region_ = false;
SourcePositionTable* source_positions_;
NodeOriginTable* node_origins_;
JSHeapBroker* broker_;
......@@ -864,6 +865,7 @@ void EffectControlLinearizer::ProcessNode(Node* node, Node** frame_state) {
if (node->opcode() == IrOpcode::kFinishRegion) {
// Reset the current region observability.
region_observability_ = RegionObservability::kObservable;
inside_region_ = false;
// Update the value uses to the value input of the finish node and
// the effect uses to the effect input.
return RemoveRenameNode(node);
......@@ -874,6 +876,7 @@ void EffectControlLinearizer::ProcessNode(Node* node, Node** frame_state) {
// StoreField and other operators).
DCHECK_NE(RegionObservability::kNotObservable, region_observability_);
region_observability_ = RegionObservabilityOf(node->op());
inside_region_ = true;
// Update the value uses to the value input of the finish node and
// the effect uses to the effect input.
return RemoveRenameNode(node);
......@@ -891,6 +894,14 @@ void EffectControlLinearizer::ProcessNode(Node* node, Node** frame_state) {
return;
}
if (node->opcode() == IrOpcode::kStoreField) {
// Mark stores outside a region as non-initializing and non-transitioning.
if (!inside_region_) {
const FieldAccess access = FieldAccessOf(node->op());
NodeProperties::ChangeOp(node, simplified()->StoreField(access, false));
}
}
// The IfSuccess nodes should always start a basic block (and basic block
// start nodes are not handled in the ProcessNode method).
DCHECK_NE(IrOpcode::kIfSuccess, node->opcode());
......
......@@ -91,6 +91,9 @@ std::ostream& operator<<(std::ostream& os, FieldAccess const& access) {
if (access.is_store_in_literal) {
os << " (store in literal)";
}
if (access.maybe_initializing_or_transitioning_store) {
os << " (initializing or transitioning store)";
}
os << "]";
return os;
}
......@@ -1880,7 +1883,6 @@ const Operator* SimplifiedOperatorBuilder::SpeculativeNumberEqual(
#define ACCESS_OP_LIST(V) \
V(LoadField, FieldAccess, Operator::kNoWrite, 1, 1, 1) \
V(StoreField, FieldAccess, Operator::kNoRead, 2, 1, 0) \
V(LoadElement, ElementAccess, Operator::kNoWrite, 2, 1, 1) \
V(StoreElement, ElementAccess, Operator::kNoRead, 3, 1, 0) \
V(LoadTypedElement, ExternalArrayType, Operator::kNoWrite, 4, 1, 1) \
......@@ -1902,6 +1904,17 @@ const Operator* SimplifiedOperatorBuilder::SpeculativeNumberEqual(
ACCESS_OP_LIST(ACCESS)
#undef ACCESS
const Operator* SimplifiedOperatorBuilder::StoreField(
const FieldAccess& access, bool maybe_initializing_or_transitioning) {
FieldAccess store_access = access;
store_access.maybe_initializing_or_transitioning_store =
maybe_initializing_or_transitioning;
return zone()->New<Operator1<FieldAccess>>(
IrOpcode::kStoreField,
Operator::kNoDeopt | Operator::kNoThrow | Operator::kNoRead, "StoreField",
2, 1, 1, 0, 1, 0, store_access);
}
const Operator* SimplifiedOperatorBuilder::LoadMessage() {
return zone()->New<Operator>(IrOpcode::kLoadMessage, Operator::kEliminatable,
"LoadMessage", 1, 1, 1, 1, 1, 0);
......
......@@ -84,6 +84,10 @@ struct FieldAccess {
#ifdef V8_HEAP_SANDBOX
ExternalPointerTag external_pointer_tag = kExternalPointerNullTag;
#endif
bool maybe_initializing_or_transitioning_store; // store is potentially
// initializing a newly
// allocated object or part
// of a map transition.
FieldAccess()
: base_is_tagged(kTaggedBase),
......@@ -92,18 +96,18 @@ struct FieldAccess {
machine_type(MachineType::None()),
write_barrier_kind(kFullWriteBarrier),
const_field_info(ConstFieldInfo::None()),
is_store_in_literal(false) {}
is_store_in_literal(false),
maybe_initializing_or_transitioning_store(false) {}
FieldAccess(BaseTaggedness base_is_tagged, int offset, MaybeHandle<Name> name,
MaybeHandle<Map> map, Type type, MachineType machine_type,
WriteBarrierKind write_barrier_kind,
ConstFieldInfo const_field_info = ConstFieldInfo::None(),
bool is_store_in_literal = false
bool is_store_in_literal = false,
#ifdef V8_HEAP_SANDBOX
,
ExternalPointerTag external_pointer_tag = kExternalPointerNullTag
ExternalPointerTag external_pointer_tag = kExternalPointerNullTag,
#endif
)
bool maybe_initializing_or_transitioning_store = false)
: base_is_tagged(base_is_tagged),
offset(offset),
name(name),
......@@ -112,12 +116,12 @@ struct FieldAccess {
machine_type(machine_type),
write_barrier_kind(write_barrier_kind),
const_field_info(const_field_info),
is_store_in_literal(is_store_in_literal)
is_store_in_literal(is_store_in_literal),
#ifdef V8_HEAP_SANDBOX
,
external_pointer_tag(external_pointer_tag)
external_pointer_tag(external_pointer_tag),
#endif
{
maybe_initializing_or_transitioning_store(
maybe_initializing_or_transitioning_store) {
DCHECK_GE(offset, 0);
DCHECK_IMPLIES(
machine_type.IsMapWord(),
......@@ -1043,7 +1047,8 @@ class V8_EXPORT_PRIVATE SimplifiedOperatorBuilder final
const Operator* LoadFieldByIndex();
const Operator* LoadField(FieldAccess const&);
const Operator* StoreField(FieldAccess const&);
const Operator* StoreField(FieldAccess const&,
bool maybe_initializing_or_transitioning = true);
// load-element [base + index]
const Operator* LoadElement(ElementAccess const&);
......
......@@ -50,6 +50,7 @@ using StoreOffset = uint32_t;
struct UnobservableStore {
NodeId id_;
StoreOffset offset_;
bool maybe_gc_observable_ = false;
bool operator==(const UnobservableStore other) const {
return (id_ == other.id_) && (offset_ == other.offset_);
......@@ -76,13 +77,15 @@ size_t hash_value(const UnobservableStore& p) {
// an UnobservablesSet allocates no memory.
class UnobservablesSet final {
private:
using KeyT = UnobservableStore;
using ValueT = bool; // Emulates set semantics in the map.
enum ObservableState {
kObservable = 0, // We haven't seen a store to this offset before.
kUnobservable = 1, // Stores to the same offset can be eliminated.
kGCObservable = 2 // Stores to the same offset can only be eliminated,
// if they are not initializing or transitioning.
};
// The PersistentMap uses a special value to signify 'not present'. We use
// a boolean value to emulate set semantics.
static constexpr ValueT kNotPresent = false;
static constexpr ValueT kPresent = true;
using KeyT = UnobservableStore;
using ValueT = ObservableState;
public:
using SetT = PersistentMap<KeyT, ValueT>;
......@@ -97,13 +100,17 @@ class UnobservablesSet final {
UnobservablesSet& operator=(const UnobservablesSet& other)
V8_NOEXCEPT = default;
// Computes the intersection of two states.
ObservableState Intersect(const ObservableState state1,
const ObservableState state2) const;
// Computes the intersection of two UnobservablesSets. If one of the sets is
// empty, will return empty.
UnobservablesSet Intersect(const UnobservablesSet& other,
const UnobservablesSet& empty, Zone* zone) const;
// Returns a set that it is the current one, plus the observation obs passed
// as parameter. If said obs it's already in the set, we don't have to
// as parameter. If said obs it's already unobservable, we don't have to
// create a new one.
UnobservablesSet Add(UnobservableStore obs, Zone* zone) const;
......@@ -113,18 +120,43 @@ class UnobservablesSet final {
// This can probably be done better if the observations are stored first by
// offset and then by node.
// We are removing all nodes with offset off since different nodes may
// alias one another, and we currently we don't have the means to know if
// alias one another, and currently we don't have the means to know if
// two nodes are definitely the same value.
UnobservablesSet RemoveSameOffset(StoreOffset off, Zone* zone) const;
// Returns a new set where all observations are marked as being observable
// by GC.
UnobservablesSet MarkGCObservable(Zone* zone) const;
const SetT* set() const { return set_; }
bool IsUnvisited() const { return set_ == nullptr; }
bool IsEmpty() const {
return set_ == nullptr || set_->begin() == set_->end();
}
bool Contains(UnobservableStore obs) const {
return set_ != nullptr && set_->Get(obs) != kNotPresent;
// We need to guarantee that objects are fully initialized and fields are in
// sync with their map when a GC is triggered (potentially by any allocation).
// Therefore initializing or transitioning stores are observable if they are
// observable by GC. All other stores are not relevant for correct GC
// behaviour and can be eliminated even if they are observable by GC.
bool IsUnobservable(UnobservableStore obs,
bool maybe_initializing_or_transitioning) const {
if (set_ == nullptr) return false;
ObservableState state = set_->Get(obs);
switch (state) {
case kUnobservable:
return true;
case kObservable:
return false;
case kGCObservable:
return !maybe_initializing_or_transitioning;
}
UNREACHABLE();
}
bool IsGCObservable(UnobservableStore obs) const {
return set_ != nullptr && set_->Get(obs) == kGCObservable;
}
bool operator==(const UnobservablesSet& other) const {
......@@ -145,22 +177,22 @@ class UnobservablesSet final {
explicit UnobservablesSet(const SetT* set) : set_(set) {}
static SetT* NewSet(Zone* zone) {
return zone->New<UnobservablesSet::SetT>(zone, kNotPresent);
return zone->New<UnobservablesSet::SetT>(zone, kObservable);
}
static void SetAdd(SetT* set, const KeyT& key) { set->Set(key, kPresent); }
static void SetAdd(SetT* set, const KeyT& key) {
set->Set(key, kUnobservable);
}
static void SetErase(SetT* set, const KeyT& key) {
set->Set(key, kNotPresent);
set->Set(key, kObservable);
}
static void SetGCObservable(SetT* set, const KeyT& key) {
set->Set(key, kGCObservable);
}
const SetT* set_ = nullptr;
};
// These definitions are here in order to please the linker, which in debug mode
// sometimes requires static constants to be defined in .cc files.
constexpr UnobservablesSet::ValueT UnobservablesSet::kNotPresent;
constexpr UnobservablesSet::ValueT UnobservablesSet::kPresent;
class RedundantStoreFinder final {
public:
// Note that we Initialize unobservable_ with js_graph->graph->NodeCount()
......@@ -286,7 +318,8 @@ UnobservablesSet RedundantStoreFinder::RecomputeSet(
StoreOffset offset = ToOffset(access);
UnobservableStore observation = {stored_to->id(), offset};
bool is_not_observable = uses.Contains(observation);
const bool is_not_observable = uses.IsUnobservable(
observation, access.maybe_initializing_or_transitioning_store);
if (is_not_observable) {
TRACE(" #%d is StoreField[+%d,%s](#%d), unobservable", node->id(),
......@@ -295,10 +328,15 @@ UnobservablesSet RedundantStoreFinder::RecomputeSet(
to_remove().insert(node);
return uses;
} else {
TRACE(" #%d is StoreField[+%d,%s](#%d), observable, recording in set",
node->id(), offset,
MachineReprToString(access.machine_type.representation()),
stored_to->id());
TRACE(
" #%d is StoreField[+%d,%s](#%d), observable%s, recording in set",
node->id(), offset,
MachineReprToString(access.machine_type.representation()),
stored_to->id(),
(access.maybe_initializing_or_transitioning_store &&
uses.IsGCObservable(observation))
? " by GC"
: "");
return uses.Add(observation, temp_zone());
}
}
......@@ -316,6 +354,17 @@ UnobservablesSet RedundantStoreFinder::RecomputeSet(
return uses.RemoveSameOffset(offset, temp_zone());
}
case IrOpcode::kAllocate:
case IrOpcode::kAllocateRaw: {
// Allocations can trigger a GC, therefore stores observable by allocation
// can not be eliminated, if they are initializing or tranisitioning
// stores.
TRACE(
" #%d is Allocate or AllocateRaw, marking recorded offsets as "
"observable by GC",
node->id());
return uses.MarkGCObservable(temp_zone());
}
default:
if (CannotObserveStoreField(node)) {
TRACE(" #%d:%s can observe nothing, set stays unchanged", node->id(),
......@@ -444,6 +493,16 @@ UnobservablesSet UnobservablesSet::VisitedEmpty(Zone* zone) {
return UnobservablesSet(NewSet(zone));
}
UnobservablesSet::ObservableState UnobservablesSet::Intersect(
const ObservableState state1, const ObservableState state2) const {
if (state1 == state2) return state1;
if (state1 == kObservable || state2 == kObservable) return kObservable;
if (state1 == kGCObservable || state2 == kGCObservable) {
return kGCObservable;
}
UNREACHABLE();
}
UnobservablesSet UnobservablesSet::Intersect(const UnobservablesSet& other,
const UnobservablesSet& empty,
Zone* zone) const {
......@@ -451,9 +510,9 @@ UnobservablesSet UnobservablesSet::Intersect(const UnobservablesSet& other,
UnobservablesSet::SetT* intersection = NewSet(zone);
for (auto triple : set()->Zip(*other.set())) {
if (std::get<1>(triple) && std::get<2>(triple)) {
intersection->Set(std::get<0>(triple), kPresent);
}
ObservableState new_state =
Intersect(std::get<1>(triple), std::get<2>(triple));
intersection->Set(std::get<0>(triple), new_state);
}
return UnobservablesSet(intersection);
......@@ -461,7 +520,7 @@ UnobservablesSet UnobservablesSet::Intersect(const UnobservablesSet& other,
UnobservablesSet UnobservablesSet::Add(UnobservableStore obs,
Zone* zone) const {
if (set()->Get(obs) != kNotPresent) return *this;
if (set()->Get(obs) == kUnobservable) return *this;
UnobservablesSet::SetT* new_set = NewSet(zone);
*new_set = *set();
......@@ -484,6 +543,19 @@ UnobservablesSet UnobservablesSet::RemoveSameOffset(StoreOffset offset,
return UnobservablesSet(new_set);
}
UnobservablesSet UnobservablesSet::MarkGCObservable(Zone* zone) const {
UnobservablesSet::SetT* new_set = NewSet(zone);
*new_set = *set();
// Mark all elements as observable by GC.
for (auto entry : *new_set) {
const UnobservableStore& obs = entry.first;
SetGCObservable(new_set, obs);
}
return UnobservablesSet(new_set);
}
} // namespace
// static
......
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --verify-heap --turbo-store-elimination
// Check that transitioning stores are not eliminated.
let obj = { a: 42 }
function foo() {
// Force GC on the next allocation to trigger heap verification.
%SimulateNewspaceFull();
// Transitioning store. Must not be eliminated.
this.f = obj;
this.f = {
a: 43
};
}
%PrepareFunctionForOptimization(foo);
var a;
a = new foo();
a = new foo();
%OptimizeFunctionOnNextCall(foo);
a = new foo();
assertEquals(43, a.f.a);
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment