Commit 3b48db40 authored by Shu-yu Guo's avatar Shu-yu Guo Committed by Commit Bot

[weakrefs] Make the dirty FinalizationGroup list weak

A FinalizationGroup that needs cleanup should not artificially prolong
its lifetime by being on the dirty list.

R=ulan@chromium.org

Bug: v8:8179
Change-Id: I19f102d154a9ac43b549b7d833d0c3ca7e61c6d0
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2051562Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarToon Verwaest <verwaest@chromium.org>
Commit-Queue: Shu-yu Guo <syg@chromium.org>
Cr-Commit-Position: refs/heads/master@{#66251}
parent 4fffe0b3
...@@ -641,7 +641,7 @@ void Heap::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type, ...@@ -641,7 +641,7 @@ void Heap::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
} }
bool Heap::HasDirtyJSFinalizationGroups() { bool Heap::HasDirtyJSFinalizationGroups() {
return !dirty_js_finalization_groups().IsUndefined(isolate()); return !dirty_js_finalization_groups_list().IsUndefined(isolate());
} }
AlwaysAllocateScope::AlwaysAllocateScope(Heap* heap) : heap_(heap) { AlwaysAllocateScope::AlwaysAllocateScope(Heap* heap) : heap_(heap) {
......
...@@ -206,6 +206,7 @@ Heap::Heap() ...@@ -206,6 +206,7 @@ Heap::Heap()
set_native_contexts_list(Smi::zero()); set_native_contexts_list(Smi::zero());
set_allocation_sites_list(Smi::zero()); set_allocation_sites_list(Smi::zero());
set_dirty_js_finalization_groups_list(Smi::zero());
// Put a dummy entry in the remembered pages so we can find the list the // Put a dummy entry in the remembered pages so we can find the list the
// minidump even if there are no real unmapped pages. // minidump even if there are no real unmapped pages.
RememberUnmappedPage(kNullAddress, false); RememberUnmappedPage(kNullAddress, false);
...@@ -2651,6 +2652,7 @@ void Heap::UpdateReferencesInExternalStringTable( ...@@ -2651,6 +2652,7 @@ void Heap::UpdateReferencesInExternalStringTable(
void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) { void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) {
ProcessNativeContexts(retainer); ProcessNativeContexts(retainer);
ProcessAllocationSites(retainer); ProcessAllocationSites(retainer);
ProcessDirtyJSFinalizationGroups(retainer);
} }
...@@ -2672,9 +2674,17 @@ void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { ...@@ -2672,9 +2674,17 @@ void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
set_allocation_sites_list(allocation_site_obj); set_allocation_sites_list(allocation_site_obj);
} }
void Heap::ProcessDirtyJSFinalizationGroups(WeakObjectRetainer* retainer) {
Object head = VisitWeakList<JSFinalizationGroup>(
this, dirty_js_finalization_groups_list(), retainer);
set_dirty_js_finalization_groups_list(head);
}
void Heap::ProcessWeakListRoots(WeakObjectRetainer* retainer) { void Heap::ProcessWeakListRoots(WeakObjectRetainer* retainer) {
set_native_contexts_list(retainer->RetainAs(native_contexts_list())); set_native_contexts_list(retainer->RetainAs(native_contexts_list()));
set_allocation_sites_list(retainer->RetainAs(allocation_sites_list())); set_allocation_sites_list(retainer->RetainAs(allocation_sites_list()));
set_dirty_js_finalization_groups_list(
retainer->RetainAs(dirty_js_finalization_groups_list()));
} }
void Heap::ForeachAllocationSite( void Heap::ForeachAllocationSite(
...@@ -6064,25 +6074,25 @@ void Heap::AddDirtyJSFinalizationGroup( ...@@ -6064,25 +6074,25 @@ void Heap::AddDirtyJSFinalizationGroup(
std::function<void(HeapObject object, ObjectSlot slot, Object target)> std::function<void(HeapObject object, ObjectSlot slot, Object target)>
gc_notify_updated_slot) { gc_notify_updated_slot) {
DCHECK(!HasDirtyJSFinalizationGroups() || DCHECK(!HasDirtyJSFinalizationGroups() ||
dirty_js_finalization_groups().IsJSFinalizationGroup()); dirty_js_finalization_groups_list().IsJSFinalizationGroup());
DCHECK(finalization_group.next().IsUndefined(isolate())); DCHECK(finalization_group.next().IsUndefined(isolate()));
DCHECK(!finalization_group.scheduled_for_cleanup()); DCHECK(!finalization_group.scheduled_for_cleanup());
finalization_group.set_scheduled_for_cleanup(true); finalization_group.set_scheduled_for_cleanup(true);
finalization_group.set_next(dirty_js_finalization_groups()); finalization_group.set_next(dirty_js_finalization_groups_list());
gc_notify_updated_slot( gc_notify_updated_slot(
finalization_group, finalization_group,
finalization_group.RawField(JSFinalizationGroup::kNextOffset), finalization_group.RawField(JSFinalizationGroup::kNextOffset),
dirty_js_finalization_groups()); dirty_js_finalization_groups_list());
set_dirty_js_finalization_groups(finalization_group); set_dirty_js_finalization_groups_list(finalization_group);
// Roots are rescanned after objects are moved, so no need to record a slot // dirty_js_finalization_groups_list is rescanned by ProcessWeakListRoots.
// for the root pointing to the first JSFinalizationGroup.
} }
MaybeHandle<JSFinalizationGroup> Heap::TakeOneDirtyJSFinalizationGroup() { MaybeHandle<JSFinalizationGroup> Heap::TakeOneDirtyJSFinalizationGroup() {
if (HasDirtyJSFinalizationGroups()) { if (HasDirtyJSFinalizationGroups()) {
Handle<JSFinalizationGroup> finalization_group( Handle<JSFinalizationGroup> finalization_group(
JSFinalizationGroup::cast(dirty_js_finalization_groups()), isolate()); JSFinalizationGroup::cast(dirty_js_finalization_groups_list()),
set_dirty_js_finalization_groups(finalization_group->next()); isolate());
set_dirty_js_finalization_groups_list(finalization_group->next());
finalization_group->set_next(ReadOnlyRoots(isolate()).undefined_value()); finalization_group->set_next(ReadOnlyRoots(isolate()).undefined_value());
return finalization_group; return finalization_group;
} }
...@@ -6097,12 +6107,12 @@ void Heap::RemoveDirtyFinalizationGroupsOnContext(NativeContext context) { ...@@ -6097,12 +6107,12 @@ void Heap::RemoveDirtyFinalizationGroupsOnContext(NativeContext context) {
Isolate* isolate = this->isolate(); Isolate* isolate = this->isolate();
Object prev = ReadOnlyRoots(isolate).undefined_value(); Object prev = ReadOnlyRoots(isolate).undefined_value();
Object current = dirty_js_finalization_groups(); Object current = dirty_js_finalization_groups_list();
while (!current.IsUndefined(isolate)) { while (!current.IsUndefined(isolate)) {
JSFinalizationGroup finalization_group = JSFinalizationGroup::cast(current); JSFinalizationGroup finalization_group = JSFinalizationGroup::cast(current);
if (finalization_group.native_context() == context) { if (finalization_group.native_context() == context) {
if (prev.IsUndefined(isolate)) { if (prev.IsUndefined(isolate)) {
set_dirty_js_finalization_groups(finalization_group.next()); set_dirty_js_finalization_groups_list(finalization_group.next());
} else { } else {
JSFinalizationGroup::cast(prev).set_next(finalization_group.next()); JSFinalizationGroup::cast(prev).set_next(finalization_group.next());
} }
......
...@@ -502,6 +502,13 @@ class Heap { ...@@ -502,6 +502,13 @@ class Heap {
} }
Object allocation_sites_list() { return allocation_sites_list_; } Object allocation_sites_list() { return allocation_sites_list_; }
void set_dirty_js_finalization_groups_list(Object object) {
dirty_js_finalization_groups_list_ = object;
}
Object dirty_js_finalization_groups_list() {
return dirty_js_finalization_groups_list_;
}
// Used in CreateAllocationSiteStub and the (de)serializer. // Used in CreateAllocationSiteStub and the (de)serializer.
Address allocation_sites_list_address() { Address allocation_sites_list_address() {
return reinterpret_cast<Address>(&allocation_sites_list_); return reinterpret_cast<Address>(&allocation_sites_list_);
...@@ -1737,6 +1744,7 @@ class Heap { ...@@ -1737,6 +1744,7 @@ class Heap {
void ProcessYoungWeakReferences(WeakObjectRetainer* retainer); void ProcessYoungWeakReferences(WeakObjectRetainer* retainer);
void ProcessNativeContexts(WeakObjectRetainer* retainer); void ProcessNativeContexts(WeakObjectRetainer* retainer);
void ProcessAllocationSites(WeakObjectRetainer* retainer); void ProcessAllocationSites(WeakObjectRetainer* retainer);
void ProcessDirtyJSFinalizationGroups(WeakObjectRetainer* retainer);
void ProcessWeakListRoots(WeakObjectRetainer* retainer); void ProcessWeakListRoots(WeakObjectRetainer* retainer);
// =========================================================================== // ===========================================================================
...@@ -2041,6 +2049,7 @@ class Heap { ...@@ -2041,6 +2049,7 @@ class Heap {
// List heads are initialized lazily and contain the undefined_value at start. // List heads are initialized lazily and contain the undefined_value at start.
Object native_contexts_list_; Object native_contexts_list_;
Object allocation_sites_list_; Object allocation_sites_list_;
Object dirty_js_finalization_groups_list_;
std::vector<GCCallbackTuple> gc_epilogue_callbacks_; std::vector<GCCallbackTuple> gc_epilogue_callbacks_;
std::vector<GCCallbackTuple> gc_prologue_callbacks_; std::vector<GCCallbackTuple> gc_prologue_callbacks_;
......
...@@ -185,10 +185,31 @@ struct WeakListVisitor<AllocationSite> { ...@@ -185,10 +185,31 @@ struct WeakListVisitor<AllocationSite> {
static void VisitPhantomObject(Heap*, AllocationSite) {} static void VisitPhantomObject(Heap*, AllocationSite) {}
}; };
template <>
struct WeakListVisitor<JSFinalizationGroup> {
static void SetWeakNext(JSFinalizationGroup obj, Object next) {
obj.set_next(next, UPDATE_WEAK_WRITE_BARRIER);
}
static Object WeakNext(JSFinalizationGroup obj) { return obj.next(); }
static HeapObject WeakNextHolder(JSFinalizationGroup obj) { return obj; }
static int WeakNextOffset() { return JSFinalizationGroup::kNextOffset; }
static void VisitLiveObject(Heap*, JSFinalizationGroup, WeakObjectRetainer*) {
}
static void VisitPhantomObject(Heap*, JSFinalizationGroup) {}
};
template Object VisitWeakList<Context>(Heap* heap, Object list, template Object VisitWeakList<Context>(Heap* heap, Object list,
WeakObjectRetainer* retainer); WeakObjectRetainer* retainer);
template Object VisitWeakList<AllocationSite>(Heap* heap, Object list, template Object VisitWeakList<AllocationSite>(Heap* heap, Object list,
WeakObjectRetainer* retainer); WeakObjectRetainer* retainer);
template Object VisitWeakList<JSFinalizationGroup>(
Heap* heap, Object list, WeakObjectRetainer* retainer);
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -67,6 +67,7 @@ bool Heap::CreateHeapObjects() { ...@@ -67,6 +67,7 @@ bool Heap::CreateHeapObjects() {
set_native_contexts_list(ReadOnlyRoots(this).undefined_value()); set_native_contexts_list(ReadOnlyRoots(this).undefined_value());
set_allocation_sites_list(ReadOnlyRoots(this).undefined_value()); set_allocation_sites_list(ReadOnlyRoots(this).undefined_value());
set_dirty_js_finalization_groups_list(ReadOnlyRoots(this).undefined_value());
return true; return true;
} }
...@@ -618,7 +619,6 @@ void Heap::CreateInitialObjects() { ...@@ -618,7 +619,6 @@ void Heap::CreateInitialObjects() {
// There's no "current microtask" in the beginning. // There's no "current microtask" in the beginning.
set_current_microtask(roots.undefined_value()); set_current_microtask(roots.undefined_value());
set_dirty_js_finalization_groups(roots.undefined_value());
set_weak_refs_keep_during_job(roots.undefined_value()); set_weak_refs_keep_during_job(roots.undefined_value());
// Allocate cache for single character one byte strings. // Allocate cache for single character one byte strings.
......
...@@ -36,6 +36,8 @@ class JSFinalizationGroup : public JSObject { ...@@ -36,6 +36,8 @@ class JSFinalizationGroup : public JSObject {
DECL_INT_ACCESSORS(flags) DECL_INT_ACCESSORS(flags)
class BodyDescriptor;
inline static void Register(Handle<JSFinalizationGroup> finalization_group, inline static void Register(Handle<JSFinalizationGroup> finalization_group,
Handle<JSReceiver> target, Handle<JSReceiver> target,
Handle<Object> holdings, Handle<Object> key, Handle<Object> holdings, Handle<Object> key,
......
...@@ -8,6 +8,8 @@ extern class JSFinalizationGroup extends JSObject { ...@@ -8,6 +8,8 @@ extern class JSFinalizationGroup extends JSObject {
active_cells: Undefined|WeakCell; active_cells: Undefined|WeakCell;
cleared_cells: Undefined|WeakCell; cleared_cells: Undefined|WeakCell;
key_map: Object; key_map: Object;
// For the linked list of FinalizationGroups that need cleanup. This link
// is weak.
next: Undefined|JSFinalizationGroup; next: Undefined|JSFinalizationGroup;
flags: Smi; flags: Smi;
} }
......
...@@ -248,6 +248,27 @@ class JSWeakRef::BodyDescriptor final : public BodyDescriptorBase { ...@@ -248,6 +248,27 @@ class JSWeakRef::BodyDescriptor final : public BodyDescriptorBase {
} }
}; };
class JSFinalizationGroup::BodyDescriptor final : public BodyDescriptorBase {
public:
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
return IsValidJSObjectSlotImpl(map, obj, offset);
}
template <typename ObjectVisitor>
static inline void IterateBody(Map map, HeapObject obj, int object_size,
ObjectVisitor* v) {
IteratePointers(obj, JSObject::BodyDescriptor::kStartOffset, kNextOffset,
v);
IterateCustomWeakPointer(obj, kNextOffset, v);
IterateJSObjectBodyImpl(map, obj, kNextOffset + kTaggedSize, object_size,
v);
}
static inline int SizeOf(Map map, HeapObject object) {
return map.instance_size();
}
};
class SharedFunctionInfo::BodyDescriptor final : public BodyDescriptorBase { class SharedFunctionInfo::BodyDescriptor final : public BodyDescriptorBase {
public: public:
static bool IsValidSlot(Map map, HeapObject obj, int offset) { static bool IsValidSlot(Map map, HeapObject obj, int offset) {
......
...@@ -251,8 +251,6 @@ class Symbol; ...@@ -251,8 +251,6 @@ class Symbol;
V(TemplateList, message_listeners, MessageListeners) \ V(TemplateList, message_listeners, MessageListeners) \
/* Support for async stack traces */ \ /* Support for async stack traces */ \
V(HeapObject, current_microtask, CurrentMicrotask) \ V(HeapObject, current_microtask, CurrentMicrotask) \
/* JSFinalizationGroup objects which need cleanup */ \
V(Object, dirty_js_finalization_groups, DirtyJSFinalizationGroups) \
/* KeepDuringJob set for JS WeakRefs */ \ /* KeepDuringJob set for JS WeakRefs */ \
V(HeapObject, weak_refs_keep_during_job, WeakRefsKeepDuringJob) \ V(HeapObject, weak_refs_keep_during_job, WeakRefsKeepDuringJob) \
V(HeapObject, interpreter_entry_trampoline_for_profiling, \ V(HeapObject, interpreter_entry_trampoline_for_profiling, \
......
...@@ -54,6 +54,8 @@ void StartupDeserializer::DeserializeInto(Isolate* isolate) { ...@@ -54,6 +54,8 @@ void StartupDeserializer::DeserializeInto(Isolate* isolate) {
isolate->heap()->set_allocation_sites_list( isolate->heap()->set_allocation_sites_list(
ReadOnlyRoots(isolate).undefined_value()); ReadOnlyRoots(isolate).undefined_value());
} }
isolate->heap()->set_dirty_js_finalization_groups_list(
ReadOnlyRoots(isolate).undefined_value());
isolate->builtins()->MarkInitialized(); isolate->builtins()->MarkInitialized();
......
...@@ -44,7 +44,6 @@ bool IsInitiallyMutable(Factory* factory, Address object_address) { ...@@ -44,7 +44,6 @@ bool IsInitiallyMutable(Factory* factory, Address object_address) {
V(builtins_constants_table) \ V(builtins_constants_table) \
V(current_microtask) \ V(current_microtask) \
V(detached_contexts) \ V(detached_contexts) \
V(dirty_js_finalization_groups) \
V(feedback_vectors_for_profiling_tools) \ V(feedback_vectors_for_profiling_tools) \
V(shared_wasm_memories) \ V(shared_wasm_memories) \
V(materialized_objects) \ V(materialized_objects) \
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --harmony-weak-refs --expose-gc --noincremental-marking
let cleanup_called = false;
function cleanup(iter) {
[...iter];
cleanup_called = true;
};
(function() {
let fg = new FinalizationGroup(cleanup);
(function() {
let x = {};
fg.register(x, {});
x = null;
})();
// Schedule fg for cleanup.
gc();
})();
// Collect fg, which should result in cleanup not called.
gc();
setTimeout(function() { assertFalse(cleanup_called); }, 0);
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment