Commit 7fd19228 authored by Georg Schmid's avatar Georg Schmid Committed by Commit Bot

[turbofan] Relax double const store invariant in load elim. for literals

Even when a field is marked const, we may emit multiple consecutive in-literal stores to that field. That is, in 'JSNativeContextSpecialization::BuildPropertyStore', when the access mode is 'kStoreInLiteral' and we are accessing a const field, we may produce a StoreField node, even though another StoreField (that stores something other than 'Uninitialized') to the same const field dominates it. This appears to be sound, since earlier stores to literals cannot be observed anyways.

Unfortunately this behavior conflicts with the double const store invariant in load elimination: Roughly speaking, we assume that load elimination may never observe two consecutive const stores to the same field on the same object.

The apparent solution would be to treat 'kStoreInLiteral' accesses like regular 'kStore' accesses: For consecutive stores to const properties we don't emit StoreField, but instead emit code that checks whether the value about to be written is equivalent to the previously written one, and otherwise deopt ('DeoptimizeReason::kWrongValue'). Unfortunately this turns out impractical, since for 'kStoreInLiteral' accesses we can't easily decide whether we're dealing with the first such store or one of the consecutive ones. Also see this abandoned CL: https://chromium-review.googlesource.com/c/v8/v8/+/1762020.

This CL instead adds an exception to the invariant in load elimination. We track whether a store arose from a 'kStoreInLiteral' access, and use this information when visiting StoreField nodes in load elimination.

R=neis@chromium.org, tebbi@chromium.org

Bug: chromium:987205
Change-Id: I8829752aa0637e9599677d20aad2d706d40d7fe6
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1763535Reviewed-by: 's avatarTobias Tebbi <tebbi@chromium.org>
Reviewed-by: 's avatarGeorg Neis <neis@chromium.org>
Commit-Queue: Georg Schmid <gsps@google.com>
Cr-Commit-Position: refs/heads/master@{#63385}
parent 9866cb59
......@@ -2254,7 +2254,8 @@ JSNativeContextSpecialization::BuildPropertyStore(
MachineType::TypeForRepresentation(field_representation),
kFullWriteBarrier,
LoadSensitivity::kUnsafe,
access_info.GetConstFieldInfo()};
access_info.GetConstFieldInfo(),
access_mode == AccessMode::kStoreInLiteral};
switch (field_representation) {
case MachineRepresentation::kFloat64: {
......@@ -2289,7 +2290,8 @@ JSNativeContextSpecialization::BuildPropertyStore(
MachineType::TypeCompressedTaggedPointer(),
kPointerWriteBarrier,
LoadSensitivity::kUnsafe,
access_info.GetConstFieldInfo()};
access_info.GetConstFieldInfo(),
access_mode == AccessMode::kStoreInLiteral};
storage = effect =
graph()->NewNode(simplified()->LoadField(storage_access),
storage, effect, control);
......
......@@ -283,6 +283,28 @@ class LoadElimination::AliasStateInfo {
MaybeHandle<Map> map_;
};
LoadElimination::AbstractField const* LoadElimination::AbstractField::KillConst(
Node* object, Zone* zone) const {
for (auto pair : this->info_for_node_) {
if (pair.first->IsDead()) continue;
// If we previously recorded information about a const store on the given
// 'object', we might not have done it on the same node; e.g. we might now
// identify the object by a FinishRegion node, whereas the initial const
// store was performed on the Allocate node. We therefore remove information
// on all nodes that must alias with 'object'.
if (MustAlias(object, pair.first)) {
AbstractField* that = new (zone) AbstractField(zone);
for (auto pair : this->info_for_node_) {
if (!MustAlias(object, pair.first)) {
that->info_for_node_.insert(pair);
}
}
return that;
}
}
return this;
}
LoadElimination::AbstractField const* LoadElimination::AbstractField::Kill(
const AliasStateInfo& alias_info, MaybeHandle<Name> name,
Zone* zone) const {
......@@ -541,6 +563,24 @@ LoadElimination::AbstractState const* LoadElimination::AbstractState::AddField(
return that;
}
LoadElimination::AbstractState const*
LoadElimination::AbstractState::KillConstField(Node* object,
IndexRange index_range,
Zone* zone) const {
AliasStateInfo alias_info(this, object);
AbstractState* that = nullptr;
for (int index : index_range) {
if (AbstractField const* this_field = this->const_fields_[index]) {
this_field = this_field->KillConst(object, zone);
if (this->const_fields_[index] != this_field) {
if (!that) that = new (zone) AbstractState(*this);
that->const_fields_[index] = this_field;
}
}
}
return that ? that : this;
}
LoadElimination::AbstractState const* LoadElimination::AbstractState::KillField(
Node* object, IndexRange index_range, MaybeHandle<Name> name,
Zone* zone) const {
......@@ -964,7 +1004,8 @@ Reduction LoadElimination::ReduceStoreField(Node* node,
// At runtime, we should never encounter
// - any store replacing existing info with a different, incompatible
// representation, nor
// - two consecutive const stores.
// - two consecutive const stores, unless the latter is a store into
// a literal.
// However, we may see such code statically, so we guard against
// executing it by emitting Unreachable.
// TODO(gsps): Re-enable the double const store check even for
......@@ -974,7 +1015,9 @@ Reduction LoadElimination::ReduceStoreField(Node* node,
bool incompatible_representation =
!lookup_result->name.is_null() &&
!IsCompatible(representation, lookup_result->representation);
if (incompatible_representation || is_const_store) {
bool illegal_double_const_store =
is_const_store && !access.is_store_in_literal;
if (incompatible_representation || illegal_double_const_store) {
Node* control = NodeProperties::GetControlInput(node);
Node* unreachable =
graph()->NewNode(common()->Unreachable(), effect, control);
......@@ -989,6 +1032,12 @@ Reduction LoadElimination::ReduceStoreField(Node* node,
// Kill all potentially aliasing fields and record the new value.
FieldInfo new_info(new_value, representation, access.name,
access.const_field_info);
if (is_const_store && access.is_store_in_literal) {
// We only kill const information when there is a chance that we
// previously stored information about the given const field (namely,
// when we observe const stores to literals).
state = state->KillConstField(object, field_index, zone());
}
state = state->KillField(object, field_index, access.name, zone());
state = state->AddField(object, field_index, new_info, zone());
if (is_const_store) {
......
......@@ -140,6 +140,7 @@ class V8_EXPORT_PRIVATE LoadElimination final
return that;
}
FieldInfo const* Lookup(Node* object) const;
AbstractField const* KillConst(Node* object, Zone* zone) const;
AbstractField const* Kill(const AliasStateInfo& alias_info,
MaybeHandle<Name> name, Zone* zone) const;
bool Equals(AbstractField const* that) const {
......@@ -241,6 +242,8 @@ class V8_EXPORT_PRIVATE LoadElimination final
AbstractState const* AddField(Node* object, IndexRange index,
FieldInfo info, Zone* zone) const;
AbstractState const* KillConstField(Node* object, IndexRange index_range,
Zone* zone) const;
AbstractState const* KillField(const AliasStateInfo& alias_info,
IndexRange index, MaybeHandle<Name> name,
Zone* zone) const;
......
......@@ -57,7 +57,8 @@ bool operator==(FieldAccess const& lhs, FieldAccess const& rhs) {
return lhs.base_is_tagged == rhs.base_is_tagged && lhs.offset == rhs.offset &&
lhs.map.address() == rhs.map.address() &&
lhs.machine_type == rhs.machine_type &&
lhs.const_field_info == rhs.const_field_info;
lhs.const_field_info == rhs.const_field_info &&
lhs.is_store_in_literal == rhs.is_store_in_literal;
}
size_t hash_value(FieldAccess const& access) {
......@@ -65,7 +66,8 @@ size_t hash_value(FieldAccess const& access) {
// really only relevant for eliminating loads and they don't care about the
// write barrier mode.
return base::hash_combine(access.base_is_tagged, access.offset,
access.machine_type, access.const_field_info);
access.machine_type, access.const_field_info,
access.is_store_in_literal);
}
size_t hash_value(LoadSensitivity load_sensitivity) {
......@@ -99,6 +101,9 @@ std::ostream& operator<<(std::ostream& os, FieldAccess const& access) {
#endif
os << access.type << ", " << access.machine_type << ", "
<< access.write_barrier_kind << ", " << access.const_field_info;
if (access.is_store_in_literal) {
os << " (store in literal)";
}
if (FLAG_untrusted_code_mitigations) {
os << ", " << access.load_sensitivity;
}
......
......@@ -79,6 +79,7 @@ struct FieldAccess {
LoadSensitivity load_sensitivity; // load safety for poisoning.
ConstFieldInfo const_field_info; // the constness of this access, and the
// field owner map, if the access is const
bool is_store_in_literal; // originates from a kStoreInLiteral access
FieldAccess()
: base_is_tagged(kTaggedBase),
......@@ -87,13 +88,15 @@ struct FieldAccess {
machine_type(MachineType::None()),
write_barrier_kind(kFullWriteBarrier),
load_sensitivity(LoadSensitivity::kUnsafe),
const_field_info(ConstFieldInfo::None()) {}
const_field_info(ConstFieldInfo::None()),
is_store_in_literal(false) {}
FieldAccess(BaseTaggedness base_is_tagged, int offset, MaybeHandle<Name> name,
MaybeHandle<Map> map, Type type, MachineType machine_type,
WriteBarrierKind write_barrier_kind,
LoadSensitivity load_sensitivity = LoadSensitivity::kUnsafe,
ConstFieldInfo const_field_info = ConstFieldInfo::None())
ConstFieldInfo const_field_info = ConstFieldInfo::None(),
bool is_store_in_literal = false)
: base_is_tagged(base_is_tagged),
offset(offset),
name(name),
......@@ -102,7 +105,8 @@ struct FieldAccess {
machine_type(machine_type),
write_barrier_kind(write_barrier_kind),
load_sensitivity(load_sensitivity),
const_field_info(const_field_info) {}
const_field_info(const_field_info),
is_store_in_literal(is_store_in_literal) {}
int tag() const { return base_is_tagged == kTaggedBase ? kHeapObjectTag : 0; }
};
......
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax
function f(x) {
// This used to generate two distinct stores to #undefined, violating the load
// elimination invariant that there are no two store to the same const field:
var obj1 = {
[undefined]: 1,
'undefined': 1
};
// This one cannot be discharged statically:
var obj2 = {
[undefined]: x,
'undefined': 1
};
// Some more variations that exercise AllocateFastLiteral:
var obj3 = {
'undefined': 1,
[undefined]: x
};
var obj4 = {
'undefined': x,
[undefined]: 1
};
assertEquals(obj1.undefined, 1);
assertEquals(obj2.undefined, 1);
assertEquals(obj3.undefined, x);
assertEquals(obj4.undefined, 1);
}
%PrepareFunctionForOptimization(f);
f(1);
f(1);
%OptimizeFunctionOnNextCall(f);
f(2);
function g(x) {
var obj1 = {
[undefined]: 1,
[undefined]: 1
};
var obj2 = {
[undefined]: 1,
[undefined]: x
};
var obj3 = {
[undefined]: x,
[undefined]: 1
};
var obj4 = {
[undefined]: x,
[undefined]: x
};
assertEquals(obj1.undefined, 1);
assertEquals(obj2.undefined, x);
assertEquals(obj3.undefined, 1);
assertEquals(obj4.undefined, x);
}
%PrepareFunctionForOptimization(g);
g(1);
g(1);
%OptimizeFunctionOnNextCall(g);
g(2);
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment