Commit 45bb4498 authored by Shu-yu Guo's avatar Shu-yu Guo Committed by Commit Bot

[weakrefs] Hold unregister tokens weakly

Change unregister tokens to be held weakly instead of strongly. This
enables the use case for an object to be used as its own unregister
token.

To avoid using an ephemeron table, FinalizationGroup's key_map is
changed to key off unregister tokens' identity hashes. Because hashes
may collide, a single key list may rarely contain multiple tokens. When
a FinalizationGroup WeakCell's token becomes unreachable, during GC, it
is removed from the the doubly linked key list and removed from the key
map if it had a unique key.

Bug: v8:8179
Change-Id: If88fd2ab196e3f9a287990ae345117a0abb2f04d
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1970493
Commit-Queue: Shu-yu Guo <syg@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#65532}
parent 90b42052
......@@ -2415,6 +2415,12 @@ void MarkCompactCollector::ClearJSWeakRefs() {
}
WeakCell weak_cell;
while (weak_objects_.weak_cells.Pop(kMainThreadTask, &weak_cell)) {
auto gc_notify_updated_slot = [](HeapObject object, ObjectSlot slot,
Object target) {
if (target.IsHeapObject()) {
RecordSlot(object, slot, HeapObject::cast(target));
}
};
HeapObject target = HeapObject::cast(weak_cell.target());
if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
DCHECK(!target.IsUndefined());
......@@ -2422,23 +2428,13 @@ void MarkCompactCollector::ClearJSWeakRefs() {
JSFinalizationGroup finalization_group =
JSFinalizationGroup::cast(weak_cell.finalization_group());
if (!finalization_group.scheduled_for_cleanup()) {
heap()->AddDirtyJSFinalizationGroup(
finalization_group,
[](HeapObject object, ObjectSlot slot, Object target) {
if (target.IsHeapObject()) {
RecordSlot(object, slot, HeapObject::cast(target));
}
});
heap()->AddDirtyJSFinalizationGroup(finalization_group,
gc_notify_updated_slot);
}
// We're modifying the pointers in WeakCell and JSFinalizationGroup during
// GC; thus we need to record the slots it writes. The normal write
// barrier is not enough, since it's disabled before GC.
weak_cell.Nullify(isolate(),
[](HeapObject object, ObjectSlot slot, Object target) {
if (target.IsHeapObject()) {
RecordSlot(object, slot, HeapObject::cast(target));
}
});
weak_cell.Nullify(isolate(), gc_notify_updated_slot);
DCHECK(finalization_group.NeedsCleanup());
DCHECK(finalization_group.scheduled_for_cleanup());
} else {
......@@ -2446,6 +2442,29 @@ void MarkCompactCollector::ClearJSWeakRefs() {
ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
RecordSlot(weak_cell, slot, HeapObject::cast(*slot));
}
HeapObject unregister_token =
HeapObject::cast(weak_cell.unregister_token());
if (!non_atomic_marking_state()->IsBlackOrGrey(unregister_token)) {
// The unregister token is dead. Remove any corresponding entries in the
// key map. Multiple WeakCell with the same token will have all their
// unregister_token field set to undefined when processing the first
// WeakCell. Like above, we're modifying pointers during GC, so record the
// slots.
HeapObject undefined = ReadOnlyRoots(isolate()).undefined_value();
JSFinalizationGroup finalization_group =
JSFinalizationGroup::cast(weak_cell.finalization_group());
finalization_group.RemoveUnregisterToken(
JSReceiver::cast(unregister_token), isolate(),
[undefined](WeakCell matched_cell) {
matched_cell.set_unregister_token(undefined);
},
gc_notify_updated_slot);
} else {
// The unregister_token is alive.
ObjectSlot slot = weak_cell.RawField(WeakCell::kUnregisterTokenOffset);
RecordSlot(weak_cell, slot, HeapObject::cast(*slot));
}
}
}
......
......@@ -324,15 +324,22 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitWeakCell(
WeakCell::BodyDescriptor::IterateBody(map, weak_cell, size, this);
if (weak_cell.target().IsHeapObject()) {
HeapObject target = HeapObject::cast(weak_cell.target());
HeapObject unregister_token =
HeapObject::cast(weak_cell.unregister_token());
concrete_visitor()->SynchronizePageAccess(target);
if (concrete_visitor()->marking_state()->IsBlackOrGrey(target)) {
// Record the slot inside the WeakCell, since the IterateBody above
concrete_visitor()->SynchronizePageAccess(unregister_token);
if (concrete_visitor()->marking_state()->IsBlackOrGrey(target) &&
concrete_visitor()->marking_state()->IsBlackOrGrey(unregister_token)) {
// Record the slots inside the WeakCell, since the IterateBody above
// didn't visit it.
ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
concrete_visitor()->RecordSlot(weak_cell, slot, target);
slot = weak_cell.RawField(WeakCell::kUnregisterTokenOffset);
concrete_visitor()->RecordSlot(weak_cell, slot, unregister_token);
} else {
// WeakCell points to a potentially dead object. We have to process
// them when we know the liveness of the whole transitive closure.
// WeakCell points to a potentially dead object or a dead unregister
// token. We have to process them when we know the liveness of the whole
// transitive closure.
weak_objects_->weak_cells.Push(task_id_, weak_cell);
}
}
......
......@@ -290,8 +290,8 @@ class SimpleNumberDictionary
public:
DECL_CAST(SimpleNumberDictionary)
// Type specific at put (default NONE attributes is used when adding).
V8_WARN_UNUSED_RESULT static Handle<SimpleNumberDictionary> Set(
Isolate* isolate, Handle<SimpleNumberDictionary> dictionary, uint32_t key,
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT static Handle<SimpleNumberDictionary>
Set(Isolate* isolate, Handle<SimpleNumberDictionary> dictionary, uint32_t key,
Handle<Object> value);
static const int kEntryValueIndex = 1;
......
......@@ -54,23 +54,26 @@ void JSFinalizationGroup::Register(
finalization_group->set_active_cells(*weak_cell);
if (!unregister_token->IsUndefined(isolate)) {
Handle<ObjectHashTable> key_map;
Handle<SimpleNumberDictionary> key_map;
if (finalization_group->key_map().IsUndefined(isolate)) {
key_map = ObjectHashTable::New(isolate, 1);
key_map = SimpleNumberDictionary::New(isolate, 1);
} else {
key_map =
handle(ObjectHashTable::cast(finalization_group->key_map()), isolate);
key_map = handle(
SimpleNumberDictionary::cast(finalization_group->key_map()), isolate);
}
Object value = key_map->Lookup(unregister_token);
if (value.IsWeakCell()) {
// Unregister tokens are held weakly as objects are often their own
// unregister token. To avoid using an ephemeron map, the map for token
// lookup is keyed on the token's identity hash instead of the token itself.
uint32_t key = unregister_token->GetOrCreateHash(isolate).value();
InternalIndex entry = key_map->FindEntry(isolate, key);
if (entry.is_found()) {
Object value = key_map->ValueAt(entry);
WeakCell existing_weak_cell = WeakCell::cast(value);
existing_weak_cell.set_key_list_prev(*weak_cell);
weak_cell->set_key_list_next(existing_weak_cell);
} else {
DCHECK(value.IsTheHole(isolate));
}
key_map = ObjectHashTable::Put(key_map, unregister_token, weak_cell);
key_map = SimpleNumberDictionary::Set(isolate, key_map, key, weak_cell);
finalization_group->set_key_map(*key_map);
}
}
......@@ -81,26 +84,89 @@ bool JSFinalizationGroup::Unregister(
// Iterate through the doubly linked list of WeakCells associated with the
// key. Each WeakCell will be in the "active_cells" or "cleared_cells" list of
// its FinalizationGroup; remove it from there.
if (!finalization_group->key_map().IsUndefined(isolate)) {
Handle<ObjectHashTable> key_map =
handle(ObjectHashTable::cast(finalization_group->key_map()), isolate);
Object value = key_map->Lookup(unregister_token);
HeapObject undefined = ReadOnlyRoots(isolate).undefined_value();
while (value.IsWeakCell()) {
WeakCell weak_cell = WeakCell::cast(value);
weak_cell.RemoveFromFinalizationGroupCells(isolate);
value = weak_cell.key_list_next();
return finalization_group->RemoveUnregisterToken(
*unregister_token, isolate,
[isolate](WeakCell matched_cell) {
matched_cell.RemoveFromFinalizationGroupCells(isolate);
},
[](HeapObject, ObjectSlot, Object) {});
}
template <typename MatchCallback, typename GCNotifyUpdatedSlotCallback>
bool JSFinalizationGroup::RemoveUnregisterToken(
JSReceiver unregister_token, Isolate* isolate, MatchCallback match_callback,
GCNotifyUpdatedSlotCallback gc_notify_updated_slot) {
// This method is called from both FinalizationGroup#unregister and for
// removing weakly-held dead unregister tokens. The latter is during GC so
// this function cannot GC.
DisallowHeapAllocation no_gc;
if (key_map().IsUndefined(isolate)) {
return false;
}
SimpleNumberDictionary key_map =
SimpleNumberDictionary::cast(this->key_map());
// If the token doesn't have a hash, it was not used as a key inside any hash
// tables.
Object hash = unregister_token.GetHash();
if (hash.IsUndefined(isolate)) {
return false;
}
uint32_t key = Smi::ToInt(hash);
InternalIndex entry = key_map.FindEntry(isolate, key);
if (entry.is_not_found()) {
return false;
}
Object value = key_map.ValueAt(entry);
bool was_present = false;
HeapObject undefined = ReadOnlyRoots(isolate).undefined_value();
HeapObject new_key_list_head = undefined;
HeapObject new_key_list_prev = undefined;
// Compute a new key list that doesn't have unregister_token. Because
// unregister tokens are held weakly, key_map is keyed using the tokens'
// identity hashes, and identity hashes may collide.
while (!value.IsUndefined(isolate)) {
WeakCell weak_cell = WeakCell::cast(value);
DCHECK(!ObjectInYoungGeneration(weak_cell));
value = weak_cell.key_list_next();
if (weak_cell.unregister_token() == unregister_token) {
// weak_cell has the same unregister token; remove it from the key list.
match_callback(weak_cell);
weak_cell.set_key_list_prev(undefined);
weak_cell.set_key_list_next(undefined);
was_present = true;
} else {
// weak_cell has a different unregister token with the same key (hash
// collision); fix up the list.
weak_cell.set_key_list_prev(new_key_list_prev);
gc_notify_updated_slot(weak_cell,
weak_cell.RawField(WeakCell::kKeyListPrevOffset),
new_key_list_prev);
weak_cell.set_key_list_next(undefined);
if (new_key_list_prev.IsUndefined(isolate)) {
new_key_list_head = weak_cell;
} else {
DCHECK(new_key_list_head.IsWeakCell());
WeakCell prev_cell = WeakCell::cast(new_key_list_prev);
prev_cell.set_key_list_next(weak_cell);
gc_notify_updated_slot(prev_cell,
prev_cell.RawField(WeakCell::kKeyListNextOffset),
weak_cell);
}
new_key_list_prev = weak_cell;
}
bool was_present;
key_map = ObjectHashTable::Remove(isolate, key_map, unregister_token,
&was_present);
finalization_group->set_key_map(*key_map);
return was_present;
}
return false;
if (new_key_list_head.IsUndefined(isolate)) {
DCHECK(was_present);
key_map.ClearEntry(isolate, entry);
key_map.ElementRemoved();
} else {
key_map.ValueAtPut(entry, new_key_list_head);
gc_notify_updated_slot(key_map, key_map.RawFieldOfValueAt(entry),
new_key_list_head);
}
return was_present;
}
bool JSFinalizationGroup::NeedsCleanup() const {
......@@ -136,16 +202,18 @@ Object JSFinalizationGroup::PopClearedCellHoldings(
// Also remove the WeakCell from the key_map (if it's there).
if (!weak_cell->unregister_token().IsUndefined(isolate)) {
if (weak_cell->key_list_prev().IsUndefined(isolate)) {
Handle<ObjectHashTable> key_map =
handle(ObjectHashTable::cast(finalization_group->key_map()), isolate);
Handle<Object> key = handle(weak_cell->unregister_token(), isolate);
Handle<SimpleNumberDictionary> key_map = handle(
SimpleNumberDictionary::cast(finalization_group->key_map()), isolate);
Handle<Object> unregister_token =
handle(weak_cell->unregister_token(), isolate);
uint32_t key = Smi::ToInt(unregister_token->GetHash());
InternalIndex entry = key_map->FindEntry(isolate, key);
if (weak_cell->key_list_next().IsUndefined(isolate)) {
// weak_cell is the only one associated with its key; remove the key
// from the hash table.
bool was_present;
key_map = ObjectHashTable::Remove(isolate, key_map, key, &was_present);
DCHECK(was_present);
DCHECK(entry.is_found());
key_map = SimpleNumberDictionary::DeleteEntry(isolate, key_map, entry);
finalization_group->set_key_map(*key_map);
} else {
// weak_cell is the list head for its key; we need to change the value
......@@ -155,7 +223,7 @@ Object JSFinalizationGroup::PopClearedCellHoldings(
DCHECK_EQ(next->key_list_prev(), *weak_cell);
next->set_key_list_prev(ReadOnlyRoots(isolate).undefined_value());
weak_cell->set_key_list_next(ReadOnlyRoots(isolate).undefined_value());
key_map = ObjectHashTable::Put(key_map, key, next);
key_map = SimpleNumberDictionary::Set(isolate, key_map, key, next);
finalization_group->set_key_map(*key_map);
}
} else {
......
......@@ -45,6 +45,16 @@ class JSFinalizationGroup : public JSObject {
Handle<JSReceiver> unregister_token,
Isolate* isolate);
// RemoveUnregisterToken is called from both Unregister and during GC. Since
// it modifies slots in key_map and WeakCells and the normal write barrier is
// disabled during GC, we need to tell the GC about the modified slots via the
// gc_notify_updated_slot function.
template <typename MatchCallback, typename GCNotifyUpdatedSlotCallback>
inline bool RemoveUnregisterToken(
JSReceiver unregister_token, Isolate* isolate,
MatchCallback match_callback,
GCNotifyUpdatedSlotCallback gc_notify_updated_slot);
// Returns true if the cleared_cells list is non-empty.
inline bool NeedsCleanup() const;
......
......@@ -21,6 +21,7 @@ extern class JSFinalizationGroupCleanupIterator extends JSObject {
extern class WeakCell extends HeapObject {
finalization_group: Undefined|JSFinalizationGroup;
target: Undefined|JSReceiver;
unregister_token: Object;
holdings: Object;
// For storing doubly linked lists of WeakCells in JSFinalizationGroup's
......@@ -28,11 +29,12 @@ extern class WeakCell extends HeapObject {
prev: Undefined|WeakCell;
next: Undefined|WeakCell;
// For storing doubly linked lists of WeakCells per unregister token in
// JSFinalizationGroup's token-keyed hashmap. WeakCell also needs to know its
// token, so that we can remove it from the key_map when we remove the last
// WeakCell associated with it.
unregister_token: Object;
// For storing doubly linked lists of WeakCells per key in
// JSFinalizationGroup's key-based hashmap. The key is the identity hash of
// unregister_token. WeakCell also needs to know its token, so that we can
// remove its corresponding key from the key_map when we remove the last
// WeakCell associated with it or when the unregister_token dies. The
// unregister token is stored above, after target, as both are weak.
key_list_prev: Undefined|WeakCell;
key_list_next: Undefined|WeakCell;
}
......
......@@ -219,7 +219,8 @@ class WeakCell::BodyDescriptor final : public BodyDescriptorBase {
ObjectVisitor* v) {
IteratePointers(obj, HeapObject::kHeaderSize, kTargetOffset, v);
IterateCustomWeakPointer(obj, kTargetOffset, v);
IteratePointers(obj, kTargetOffset + kTaggedSize, object_size, v);
IterateCustomWeakPointer(obj, kUnregisterTokenOffset, v);
IteratePointers(obj, kUnregisterTokenOffset + kTaggedSize, object_size, v);
}
static inline int SizeOf(Map map, HeapObject object) {
......
......@@ -8090,6 +8090,15 @@ Maybe<bool> JSFinalizationGroup::Cleanup(
Isolate* isolate, Handle<JSFinalizationGroup> finalization_group,
Handle<Object> cleanup) {
DCHECK(cleanup->IsCallable());
// Attempt to shrink key_map now, as unregister tokens are held weakly and the
// map is not shrinkable when sweeping dead tokens during GC itself.
if (!finalization_group->key_map().IsUndefined(isolate)) {
Handle<SimpleNumberDictionary> key_map = handle(
SimpleNumberDictionary::cast(finalization_group->key_map()), isolate);
key_map = SimpleNumberDictionary::Shrink(isolate, key_map);
finalization_group->set_key_map(*key_map);
}
// It's possible that the cleared_cells list is empty, since
// FinalizationGroup.unregister() removed all its elements before this task
// ran. In that case, don't call the cleanup function.
......
This diff is collapsed.
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --harmony-weak-refs --expose-gc --noincremental-marking
var FG = new FinalizationGroup (function (iter) { globalThis.FGRan = true; });
{
let obj = {};
// obj is its own unregister token and becomes unreachable after this
// block. If the unregister token is held strongly this test will not
// terminate.
FG.register(obj, 42, obj);
}
function tryAgain() {
gc();
if (globalThis.FGRan || FG.cleanupSome()) {
return;
}
setTimeout(tryAgain, 0);
}
tryAgain();
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment