Commit da62220f authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap, objects] Check object layout changes happen on main thread

Verification code in HeapObject::set_map() is supposed to run on the
main thread since object layout change is only supported on the main
thread. There are some users of set_map() on background threads though,
which resulted in crashes. Since those users all perform a safe map
transition, we introduce a separate method for this purpose:
HeapObject::set_map_safe_transition(). This method behaves just like
set_map() but verifies that this is a safe map transition and not an
object layout change and therefore can be used on background threads
as well.

This CL also adds a DCHECK to HeapObject::set_map() to ensure we run
this method only on the main thread.

Bug: chromium:1293484
Change-Id: I25de6fda08de21b8b7a3645cf0ea5b1334e8a2f6
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3439905Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78978}
parent a6843b13
......@@ -669,7 +669,8 @@ void ArrayLiteral::BuildBoilerplateDescription(IsolateT* isolate) {
// elements array to a copy-on-write array.
if (is_simple() && depth() == 1 && array_index > 0 &&
IsSmiOrObjectElementsKind(kind)) {
elements->set_map(ReadOnlyRoots(isolate).fixed_cow_array_map());
elements->set_map_safe_transition(
ReadOnlyRoots(isolate).fixed_cow_array_map());
}
boilerplate_description_ =
......
......@@ -3878,6 +3878,9 @@ class SlotCollectingVisitor final : public ObjectVisitor {
};
void Heap::VerifyObjectLayoutChange(HeapObject object, Map new_map) {
// Object layout changes are currently not supported on background threads.
DCHECK_NULL(LocalHeap::Current());
if (!FLAG_verify_heap) return;
PtrComprCageBase cage_base(isolate());
......@@ -3887,54 +3890,60 @@ void Heap::VerifyObjectLayoutChange(HeapObject object, Map new_map) {
// If you see this check triggering for a freshly allocated object,
// use object->set_map_after_allocation() to initialize its map.
if (pending_layout_change_object_.is_null()) {
if (object.IsJSObject(cage_base)) {
// Without double unboxing all in-object fields of a JSObject are tagged.
return;
}
if (object.IsString(cage_base) &&
(new_map == ReadOnlyRoots(this).thin_string_map() ||
new_map == ReadOnlyRoots(this).thin_one_byte_string_map())) {
// When transitioning a string to ThinString,
// Heap::NotifyObjectLayoutChange doesn't need to be invoked because only
// tagged fields are introduced.
return;
}
if (FLAG_shared_string_table && object.IsString(cage_base) &&
InstanceTypeChecker::IsInternalizedString(new_map.instance_type())) {
// In-place internalization does not change a string's fields.
//
// When sharing the string table, the setting and re-setting of maps below
// can race when there are parallel internalization operations, causing
// DCHECKs to fail.
return;
}
// Check that the set of slots before and after the transition match.
SlotCollectingVisitor old_visitor;
object.IterateFast(cage_base, &old_visitor);
MapWord old_map_word = object.map_word(cage_base, kRelaxedLoad);
// Temporarily set the new map to iterate new slots.
object.set_map_word(MapWord::FromMap(new_map), kRelaxedStore);
SlotCollectingVisitor new_visitor;
object.IterateFast(cage_base, &new_visitor);
// Restore the old map.
object.set_map_word(old_map_word, kRelaxedStore);
DCHECK_EQ(new_visitor.number_of_slots(), old_visitor.number_of_slots());
for (int i = 0; i < new_visitor.number_of_slots(); i++) {
DCHECK_EQ(new_visitor.slot(i), old_visitor.slot(i));
}
#ifdef V8_EXTERNAL_CODE_SPACE
DCHECK_EQ(new_visitor.number_of_code_slots(),
old_visitor.number_of_code_slots());
for (int i = 0; i < new_visitor.number_of_code_slots(); i++) {
DCHECK_EQ(new_visitor.code_slot(i), old_visitor.code_slot(i));
}
#endif // V8_EXTERNAL_CODE_SPACE
VerifySafeMapTransition(object, new_map);
} else {
DCHECK_EQ(pending_layout_change_object_, object);
pending_layout_change_object_ = HeapObject();
}
}
#endif
void Heap::VerifySafeMapTransition(HeapObject object, Map new_map) {
PtrComprCageBase cage_base(isolate());
if (object.IsJSObject(cage_base)) {
// Without double unboxing all in-object fields of a JSObject are tagged.
return;
}
if (object.IsString(cage_base) &&
(new_map == ReadOnlyRoots(this).thin_string_map() ||
new_map == ReadOnlyRoots(this).thin_one_byte_string_map())) {
// When transitioning a string to ThinString,
// Heap::NotifyObjectLayoutChange doesn't need to be invoked because only
// tagged fields are introduced.
return;
}
if (FLAG_shared_string_table && object.IsString(cage_base) &&
InstanceTypeChecker::IsInternalizedString(new_map.instance_type())) {
// In-place internalization does not change a string's fields.
//
// When sharing the string table, the setting and re-setting of maps below
// can race when there are parallel internalization operations, causing
// DCHECKs to fail.
return;
}
// Check that the set of slots before and after the transition match.
SlotCollectingVisitor old_visitor;
object.IterateFast(cage_base, &old_visitor);
MapWord old_map_word = object.map_word(cage_base, kRelaxedLoad);
// Temporarily set the new map to iterate new slots.
object.set_map_word(MapWord::FromMap(new_map), kRelaxedStore);
SlotCollectingVisitor new_visitor;
object.IterateFast(cage_base, &new_visitor);
// Restore the old map.
object.set_map_word(old_map_word, kRelaxedStore);
DCHECK_EQ(new_visitor.number_of_slots(), old_visitor.number_of_slots());
for (int i = 0; i < new_visitor.number_of_slots(); i++) {
DCHECK_EQ(new_visitor.slot(i), old_visitor.slot(i));
}
#ifdef V8_EXTERNAL_CODE_SPACE
DCHECK_EQ(new_visitor.number_of_code_slots(),
old_visitor.number_of_code_slots());
for (int i = 0; i < new_visitor.number_of_code_slots(); i++) {
DCHECK_EQ(new_visitor.code_slot(i), old_visitor.code_slot(i));
}
#endif // V8_EXTERNAL_CODE_SPACE
}
#endif // VERIFY_HEAP
GCIdleTimeHeapState Heap::ComputeHeapState() {
GCIdleTimeHeapState heap_state;
......
......@@ -1171,6 +1171,9 @@ class Heap {
// - or it was communicated to GC using NotifyObjectLayoutChange.
V8_EXPORT_PRIVATE void VerifyObjectLayoutChange(HeapObject object,
Map new_map);
// Checks that this is a safe map transition.
V8_EXPORT_PRIVATE void VerifySafeMapTransition(HeapObject object,
Map new_map);
#endif
// ===========================================================================
......
......@@ -35,6 +35,11 @@ class HeapObject : public Object {
DECL_GETTER(map, Map)
inline void set_map(Map value);
// This method behaves the same as `set_map` but marks the map transition as
// safe for the concurrent marker (object layout doesn't change) during
// verification.
inline void set_map_safe_transition(Map value);
inline ObjectSlot map_slot() const;
// The no-write-barrier version. This is OK if the object is white and in
......@@ -47,6 +52,7 @@ class HeapObject : public Object {
// Access the map using acquire load and release store.
DECL_ACQUIRE_GETTER(map, Map)
inline void set_map(Map value, ReleaseStoreTag);
inline void set_map_safe_transition(Map value, ReleaseStoreTag);
// Compare-and-swaps map word using release store, returns true if the map
// word was actually swapped.
......@@ -233,6 +239,20 @@ class HeapObject : public Object {
inline HeapObject(Address ptr, AllowInlineSmiStorage allow_smi);
OBJECT_CONSTRUCTORS(HeapObject, Object);
private:
enum class VerificationMode {
kSafeMapTransition,
kPotentialLayoutChange,
};
enum class EmitWriteBarrier {
kYes,
kNo,
};
template <EmitWriteBarrier emit_write_barrier, typename MemoryOrder>
V8_INLINE void set_map(Map value, MemoryOrder order, VerificationMode mode);
};
OBJECT_CONSTRUCTORS_IMPL(HeapObject, Object)
......
......@@ -789,61 +789,70 @@ Map HeapObject::map(PtrComprCageBase cage_base) const {
}
void HeapObject::set_map(Map value) {
#if V8_ENABLE_WEBASSEMBLY
// In {WasmGraphBuilder::SetMap} and {WasmGraphBuilder::LoadMap}, we treat
// maps as immutable. Therefore we are not allowed to mutate them here.
DCHECK(!value.IsWasmStructMap() && !value.IsWasmArrayMap());
#endif
#ifdef VERIFY_HEAP
if (FLAG_verify_heap && !value.is_null()) {
GetHeapFromWritableObject(*this)->VerifyObjectLayoutChange(*this, value);
}
#endif
set_map_word(MapWord::FromMap(value), kRelaxedStore);
#ifndef V8_DISABLE_WRITE_BARRIERS
if (!value.is_null()) {
WriteBarrier::Marking(*this, map_slot(), value);
}
#endif
set_map<EmitWriteBarrier::kYes>(value, kRelaxedStore,
VerificationMode::kPotentialLayoutChange);
}
DEF_ACQUIRE_GETTER(HeapObject, map, Map) {
return map_word(cage_base, kAcquireLoad).ToMap();
void HeapObject::set_map(Map value, ReleaseStoreTag tag) {
set_map<EmitWriteBarrier::kYes>(value, kReleaseStore,
VerificationMode::kPotentialLayoutChange);
}
void HeapObject::set_map(Map value, ReleaseStoreTag tag) {
#ifdef VERIFY_HEAP
if (FLAG_verify_heap && !value.is_null()) {
GetHeapFromWritableObject(*this)->VerifyObjectLayoutChange(*this, value);
}
#endif
set_map_word(MapWord::FromMap(value), tag);
#ifndef V8_DISABLE_WRITE_BARRIERS
if (!value.is_null()) {
WriteBarrier::Marking(*this, map_slot(), value);
}
#endif
void HeapObject::set_map_safe_transition(Map value) {
set_map<EmitWriteBarrier::kYes>(value, kRelaxedStore,
VerificationMode::kSafeMapTransition);
}
void HeapObject::set_map_safe_transition(Map value, ReleaseStoreTag tag) {
set_map<EmitWriteBarrier::kYes>(value, kReleaseStore,
VerificationMode::kSafeMapTransition);
}
// Unsafe accessor omitting write barrier.
void HeapObject::set_map_no_write_barrier(Map value, RelaxedStoreTag tag) {
#ifdef VERIFY_HEAP
if (FLAG_verify_heap && !value.is_null()) {
GetHeapFromWritableObject(*this)->VerifyObjectLayoutChange(*this, value);
}
#endif
set_map_word(MapWord::FromMap(value), tag);
SLOW_DCHECK(!WriteBarrier::IsRequired(*this, value));
set_map<EmitWriteBarrier::kNo>(value, kRelaxedStore,
VerificationMode::kPotentialLayoutChange);
}
void HeapObject::set_map_no_write_barrier(Map value, ReleaseStoreTag tag) {
set_map<EmitWriteBarrier::kNo>(value, kReleaseStore,
VerificationMode::kPotentialLayoutChange);
}
template <HeapObject::EmitWriteBarrier emit_write_barrier, typename MemoryOrder>
void HeapObject::set_map(Map value, MemoryOrder order, VerificationMode mode) {
#if V8_ENABLE_WEBASSEMBLY
// In {WasmGraphBuilder::SetMap} and {WasmGraphBuilder::LoadMap}, we treat
// maps as immutable. Therefore we are not allowed to mutate them here.
DCHECK(!value.IsWasmStructMap() && !value.IsWasmArrayMap());
#endif
// Object layout changes are currently not supported on background threads.
// This method might change object layout and therefore can't be used on
// background threads.
DCHECK_IMPLIES(mode != VerificationMode::kSafeMapTransition,
!LocalHeap::Current());
#ifdef VERIFY_HEAP
if (FLAG_verify_heap && !value.is_null()) {
GetHeapFromWritableObject(*this)->VerifyObjectLayoutChange(*this, value);
Heap* heap = GetHeapFromWritableObject(*this);
if (mode == VerificationMode::kSafeMapTransition) {
heap->VerifySafeMapTransition(*this, value);
} else {
DCHECK_EQ(mode, VerificationMode::kPotentialLayoutChange);
heap->VerifyObjectLayoutChange(*this, value);
}
}
#endif
set_map_word(MapWord::FromMap(value), order);
#ifndef V8_DISABLE_WRITE_BARRIERS
if (!value.is_null()) {
if (emit_write_barrier == EmitWriteBarrier::kYes) {
WriteBarrier::Marking(*this, map_slot(), value);
} else {
DCHECK_EQ(emit_write_barrier, EmitWriteBarrier::kNo);
SLOW_DCHECK(!WriteBarrier::IsRequired(*this, value));
}
}
#endif
set_map_word(MapWord::FromMap(value), tag);
SLOW_DCHECK(!WriteBarrier::IsRequired(*this, value));
}
void HeapObject::set_map_after_allocation(Map value, WriteBarrierMode mode) {
......@@ -859,6 +868,10 @@ void HeapObject::set_map_after_allocation(Map value, WriteBarrierMode mode) {
#endif
}
DEF_ACQUIRE_GETTER(HeapObject, map, Map) {
return map_word(cage_base, kAcquireLoad).ToMap();
}
ObjectSlot HeapObject::map_slot() const {
return ObjectSlot(MapField::address(*this));
}
......
......@@ -288,7 +288,7 @@ StringMigrationResult MigrateStringMapUnderLockIfNeeded(
CHECK(string.release_compare_and_swap_map_word(
MapWord::FromMap(sentinel_map), MapWord::FromMap(target_map)));
} else {
string.set_map(target_map, kReleaseStore);
string.set_map_safe_transition(target_map, kReleaseStore);
}
return StringMigrationResult::kThisThreadMigrated;
......
......@@ -321,7 +321,8 @@ void Deserializer<IsolateT>::WeakenDescriptorArrays() {
DisallowGarbageCollection no_gc;
for (Handle<DescriptorArray> descriptor_array : new_descriptor_arrays_) {
DCHECK(descriptor_array->IsStrongDescriptorArray());
descriptor_array->set_map(ReadOnlyRoots(isolate()).descriptor_array_map());
descriptor_array->set_map_safe_transition(
ReadOnlyRoots(isolate()).descriptor_array_map());
WriteBarrier::Marking(*descriptor_array,
descriptor_array->number_of_descriptors());
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment