Commit 258b146b authored by Shu-yu Guo's avatar Shu-yu Guo Committed by V8 LUCI CQ

[shared-struct] Do not depend on shared objects for optimized code

It is an invariant that objects in the shared heap never point into
per-Isolate heaps. This is currently broken by DependentCode. At the
same time, shared maps and other holders of DependentCode are designed
to never invalidate optimized code. E.g., shared maps are effectively
immutable.

This CL does two things:

1. Prevent shared objects from being depended upon
2. DCHECK that shared objects never cause deoptimization

Bug: v8:12547, v8:12761
Change-Id: I0fedae9134a8f786a9200e70f99dba7b38cd2d80
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3704809Reviewed-by: 's avatarTobias Tebbi <tebbi@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Shu-yu Guo <syg@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81340}
parent fbb8efd2
......@@ -115,6 +115,12 @@ class PendingDependencies final {
void Register(Handle<HeapObject> object,
DependentCode::DependencyGroup group) {
// Code, which are per-local Isolate, cannot depend on objects in the shared
// heap. Shared heap dependencies are designed to never invalidate
// assumptions. E.g., maps for shared structs do not have transitions or
// change the shape of their fields. See
// DependentCode::DeoptimizeDependencyGroups for corresponding DCHECK.
if (object->InSharedWritableHeap()) return;
deps_[object] |= group;
}
......
......@@ -1367,8 +1367,8 @@ void Heap::DeoptMarkedAllocationSites() {
ForeachAllocationSite(allocation_sites_list(), [](AllocationSite site) {
if (site.deopt_dependent_code()) {
site.dependent_code().MarkCodeForDeoptimization(
DependentCode::kAllocationSiteTenuringChangedGroup);
DependentCode::MarkCodeForDeoptimization(
site, DependentCode::kAllocationSiteTenuringChangedGroup);
site.set_deopt_dependent_code(false);
}
});
......
......@@ -239,8 +239,9 @@ bool AllocationSite::DigestTransitionFeedback(Handle<AllocationSite> site,
}
CHECK_NE(to_kind, DICTIONARY_ELEMENTS);
JSObject::TransitionElementsKind(boilerplate, to_kind);
site->dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
DependentCode::DeoptimizeDependencyGroups(
isolate, *site,
DependentCode::kAllocationSiteTransitionChangedGroup);
result = true;
}
}
......@@ -259,8 +260,8 @@ bool AllocationSite::DigestTransitionFeedback(Handle<AllocationSite> site,
ElementsKindToString(to_kind));
}
site->SetElementsKind(to_kind);
site->dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
DependentCode::DeoptimizeDependencyGroups(
isolate, *site, DependentCode::kAllocationSiteTransitionChangedGroup);
result = true;
}
}
......
......@@ -1405,6 +1405,24 @@ inline void DeoptimizationLiteralArray::set(int index, Object value) {
Set(index, maybe);
}
// static
template <typename ObjectT>
void DependentCode::DeoptimizeDependencyGroups(Isolate* isolate, ObjectT object,
DependencyGroups groups) {
// Shared objects are designed to never invalidate code.
DCHECK(!object.InSharedHeap());
object.dependent_code().DeoptimizeDependencyGroups(isolate, groups);
}
// static
template <typename ObjectT>
bool DependentCode::MarkCodeForDeoptimization(ObjectT object,
DependencyGroups groups) {
// Shared objects are designed to never invalidate code.
DCHECK(!object.InSharedHeap());
return object.dependent_code().MarkCodeForDeoptimization(groups);
}
} // namespace internal
} // namespace v8
......
......@@ -728,13 +728,13 @@ bool BytecodeArray::IsOld() const {
return bytecode_age() >= kIsOldBytecodeAge;
}
DependentCode DependentCode::GetDependentCode(Handle<HeapObject> object) {
if (object->IsMap()) {
return Handle<Map>::cast(object)->dependent_code();
} else if (object->IsPropertyCell()) {
return Handle<PropertyCell>::cast(object)->dependent_code();
} else if (object->IsAllocationSite()) {
return Handle<AllocationSite>::cast(object)->dependent_code();
DependentCode DependentCode::GetDependentCode(HeapObject object) {
if (object.IsMap()) {
return Map::cast(object).dependent_code();
} else if (object.IsPropertyCell()) {
return PropertyCell::cast(object).dependent_code();
} else if (object.IsAllocationSite()) {
return AllocationSite::cast(object).dependent_code();
}
UNREACHABLE();
}
......@@ -775,7 +775,7 @@ void DependentCode::InstallDependency(Isolate* isolate, Handle<Code> code,
PrintDependencyGroups(groups);
StdoutStream{} << "]\n";
}
Handle<DependentCode> old_deps(DependentCode::GetDependentCode(object),
Handle<DependentCode> old_deps(DependentCode::GetDependentCode(*object),
isolate);
Handle<DependentCode> new_deps =
InsertWeakCode(isolate, old_deps, groups, code);
......@@ -881,7 +881,7 @@ int DependentCode::FillEntryFromBack(int index, int length) {
return index; // No non-cleared entry found.
}
void DependentCode::DeoptimizeDependentCodeGroup(
void DependentCode::DeoptimizeDependencyGroups(
Isolate* isolate, DependentCode::DependencyGroups groups) {
DisallowGarbageCollection no_gc_scope;
bool marked_something = MarkCodeForDeoptimization(groups);
......
......@@ -1031,9 +1031,13 @@ class DependentCode : public WeakArrayList {
Handle<HeapObject> object,
DependencyGroups groups);
void DeoptimizeDependentCodeGroup(Isolate* isolate, DependencyGroups groups);
template <typename ObjectT>
static void DeoptimizeDependencyGroups(Isolate* isolate, ObjectT object,
DependencyGroups groups);
bool MarkCodeForDeoptimization(DependencyGroups deopt_groups);
template <typename ObjectT>
static bool MarkCodeForDeoptimization(ObjectT object,
DependencyGroups groups);
V8_EXPORT_PRIVATE static DependentCode empty_dependent_code(
const ReadOnlyRoots& roots);
......@@ -1047,7 +1051,7 @@ class DependentCode : public WeakArrayList {
private:
// Get/Set {object}'s {DependentCode}.
static DependentCode GetDependentCode(Handle<HeapObject> object);
static DependentCode GetDependentCode(HeapObject object);
static void SetDependentCode(Handle<HeapObject> object,
Handle<DependentCode> dep);
......@@ -1058,6 +1062,10 @@ class DependentCode : public WeakArrayList {
DependencyGroups groups,
Handle<Code> code);
bool MarkCodeForDeoptimization(DependencyGroups deopt_groups);
void DeoptimizeDependencyGroups(Isolate* isolate, DependencyGroups groups);
// The callback is called for all non-cleared entries, and should return true
// iff the current entry should be cleared.
using IterateAndCompactFn = std::function<bool(CodeT, DependencyGroups)>;
......
......@@ -647,8 +647,8 @@ void SetInstancePrototype(Isolate* isolate, Handle<JSFunction> function,
}
// Deoptimize all code that embeds the previous initial map.
initial_map->dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kInitialMapChangedGroup);
DependentCode::DeoptimizeDependencyGroups(
isolate, *initial_map, DependentCode::kInitialMapChangedGroup);
} else {
// Put the value in the initial map field until an initial map is
// needed. At that point, a new initial map is created and the
......
......@@ -4930,8 +4930,8 @@ void InvalidateOnePrototypeValidityCellInternal(Map map) {
if (V8_DICT_PROPERTY_CONST_TRACKING_BOOL && map.is_dictionary_map()) {
// TODO(11527): pass Isolate as an argument.
Isolate* isolate = GetIsolateFromWritableObject(map);
map.dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kPrototypeCheckGroup);
DependentCode::DeoptimizeDependencyGroups(
isolate, map, DependentCode::kPrototypeCheckGroup);
}
}
......
......@@ -663,8 +663,8 @@ bool Map::CanBeDeprecated() const {
void Map::NotifyLeafMapLayoutChange(Isolate* isolate) {
if (is_stable()) {
mark_unstable();
dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kPrototypeCheckGroup);
DependentCode::DeoptimizeDependencyGroups(
isolate, *this, DependentCode::kPrototypeCheckGroup);
}
}
......
......@@ -1222,8 +1222,7 @@ void MapUpdater::GeneralizeField(Isolate* isolate, Handle<Map> map,
dep_groups |= DependentCode::kFieldRepresentationGroup;
}
field_owner->dependent_code().DeoptimizeDependentCodeGroup(isolate,
dep_groups);
DependentCode::DeoptimizeDependencyGroups(isolate, *field_owner, dep_groups);
if (FLAG_trace_generalization) {
PrintGeneralization(
......
......@@ -587,8 +587,8 @@ void Map::DeprecateTransitionTree(Isolate* isolate) {
if (FLAG_log_maps) {
LOG(isolate, MapEvent("Deprecate", handle(*this, isolate), Handle<Map>()));
}
dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kTransitionGroup);
DependentCode::DeoptimizeDependencyGroups(isolate, *this,
DependentCode::kTransitionGroup);
NotifyLeafMapLayoutChange(isolate);
}
......@@ -1892,8 +1892,8 @@ Handle<Map> Map::TransitionToDataProperty(Isolate* isolate, Handle<Map> map,
JSFunction::SetInitialMap(isolate, constructor, result, prototype);
// Deoptimize all code that embeds the previous initial map.
initial_map->dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kInitialMapChangedGroup);
DependentCode::DeoptimizeDependencyGroups(
isolate, *initial_map, DependentCode::kInitialMapChangedGroup);
if (!result->EquivalentToForNormalization(*map,
CLEAR_INOBJECT_PROPERTIES)) {
result =
......
......@@ -6613,8 +6613,8 @@ void PropertyCell::ClearAndInvalidate(ReadOnlyRoots roots) {
Transition(details, roots.the_hole_value_handle());
// TODO(11527): pass Isolate as an argument.
Isolate* isolate = GetIsolateFromWritableObject(*this);
dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kPropertyCellChangedGroup);
DependentCode::DeoptimizeDependencyGroups(
isolate, *this, DependentCode::kPropertyCellChangedGroup);
}
// static
......@@ -6710,8 +6710,8 @@ Handle<PropertyCell> PropertyCell::PrepareForAndSetValue(
// forever.
if (original_details.cell_type() != new_type ||
(!original_details.IsReadOnly() && details.IsReadOnly())) {
cell->dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kPropertyCellChangedGroup);
DependentCode::DeoptimizeDependencyGroups(
isolate, *cell, DependentCode::kPropertyCellChangedGroup);
}
}
return cell;
......@@ -6724,8 +6724,8 @@ void PropertyCell::InvalidateProtector() {
set_value(Smi::FromInt(Protectors::kProtectorInvalid), kReleaseStore);
// TODO(11527): pass Isolate as an argument.
Isolate* isolate = GetIsolateFromWritableObject(*this);
dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kPropertyCellChangedGroup);
DependentCode::DeoptimizeDependencyGroups(
isolate, *this, DependentCode::kPropertyCellChangedGroup);
}
}
......
......@@ -49,8 +49,8 @@ void PropertyCell::UpdatePropertyDetailsExceptCellType(
if (!old_details.IsReadOnly() && details.IsReadOnly()) {
// TODO(11527): pass Isolate as an argument.
Isolate* isolate = GetIsolateFromWritableObject(*this);
dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kPropertyCellChangedGroup);
DependentCode::DeoptimizeDependencyGroups(
isolate, *this, DependentCode::kPropertyCellChangedGroup);
}
}
......
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Flags: --shared-string-table --harmony-struct --allow-natives-syntax
// Flags: --verify-heapo
"use strict";
if (this.Worker) {
(function TestSharedStructOptimizedCode() {
let workerScript =
`onmessage = function(struct) {
%PrepareFunctionForOptimization(writePayload);
for (let i = 0; i < 5; i++) writePayload(struct);
%OptimizeFunctionOnNextCall(writePayload);
writePayload(struct);
postMessage("done");
};
function writePayload(struct) {
for (let i = 0; i < 100; i++) {
struct.payload = struct.payload + 1;
}
}
postMessage("started");`;
let worker = new Worker(workerScript, { type: 'string' });
assertEquals("started", worker.getMessage());
let MyStruct = new SharedStructType(['payload']);
let struct = new MyStruct();
struct.payload = 0;
worker.postMessage(struct);
// Spin until we observe the worker's write of string_field.
assertEquals("done", worker.getMessage());
worker.terminate();
})();
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment