Commit 16586b52 authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

Keep track of EphemeronHashTables in Worklist

JSWeakCollections used to be chained in a singly-linked list. The GC now
stores the JSWeakCollection's backing store EphemeronHashTable in a
Worklist. This should allow easier parallelization in the future.

Bug: chromium:844008
Change-Id: Icc0c1b537c23e8d24dc5b27452fb8106562c9a09
Reviewed-on: https://chromium-review.googlesource.com/1092496
Commit-Queue: Dominik Inführ <dinfuehr@google.com>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53619}
parent 5760586e
......@@ -241,7 +241,6 @@ Heap::Heap()
memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
set_native_contexts_list(nullptr);
set_allocation_sites_list(Smi::kZero);
set_encountered_weak_collections(Smi::kZero);
// Put a dummy entry in the remembered pages so we can find the list the
// minidump even if there are no real unmapped pages.
RememberUnmappedPage(kNullAddress, false);
......@@ -2181,11 +2180,6 @@ void Heap::Scavenge() {
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_ROOTS);
IterateRoots(&root_scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
}
{
// Weak collections are held strongly by the Scavenger.
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK);
IterateEncounteredWeakCollections(&root_scavenge_visitor);
}
{
// Parallel phase scavenging all copied and promoted objects.
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_PARALLEL);
......@@ -3945,11 +3939,6 @@ void Heap::IterateSmiRoots(RootVisitor* v) {
v->Synchronize(VisitorSynchronization::kSmiRootList);
}
void Heap::IterateEncounteredWeakCollections(RootVisitor* visitor) {
visitor->VisitRootPointer(Root::kWeakCollections, nullptr,
&encountered_weak_collections_);
}
// We cannot avoid stale handles to left-trimmed objects, but can only make
// sure all handles still needed are updated. Filter out a stale pointer
// and clear the slot to allow post processing of handles (needed because
......
......@@ -801,14 +801,6 @@ class Heap {
// Used in CreateAllocationSiteStub and the (de)serializer.
Object** allocation_sites_list_address() { return &allocation_sites_list_; }
void set_encountered_weak_collections(Object* weak_collection) {
encountered_weak_collections_ = weak_collection;
}
Object* encountered_weak_collections() const {
return encountered_weak_collections_;
}
void IterateEncounteredWeakCollections(RootVisitor* visitor);
// Number of mark-sweeps.
int ms_count() const { return ms_count_; }
......@@ -2337,11 +2329,6 @@ class Heap {
Object* native_contexts_list_;
Object* allocation_sites_list_;
// List of encountered weak collections (JSWeakMap and JSWeakSet) during
// marking. It is initialized during marking, destroyed after marking and
// contains Smi(0) while marking is not active.
Object* encountered_weak_collections_;
std::vector<GCCallbackTuple> gc_epilogue_callbacks_;
std::vector<GCCallbackTuple> gc_prologue_callbacks_;
......
......@@ -691,6 +691,27 @@ void IncrementalMarking::UpdateWeakReferencesAfterScavenge() {
// get scavenged). Drop references to it.
return false;
}
*slot_out = slot_in;
return true;
});
weak_objects_->ephemeron_hash_tables.Update(
[heap](EphemeronHashTable* slot_in,
EphemeronHashTable** slot_out) -> bool {
HeapObject* heap_obj = slot_in;
MapWord map_word = heap_obj->map_word();
if (map_word.IsForwardingAddress()) {
*slot_out = EphemeronHashTable::cast(map_word.ToForwardingAddress());
return true;
}
if (heap->InNewSpace(heap_obj)) {
// An object could die in scavenge even though an earlier full GC's
// concurrent marking has already marked it. In the case of an
// EphemeronHashTable it would have already been added to the
// worklist. If that happens the table needs to be removed again.
return false;
}
*slot_out = slot_in;
return true;
});
......
......@@ -86,26 +86,12 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
return size;
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
VisitJSWeakCollection(Map* map, JSWeakCollection* weak_collection) {
// Enqueue weak collection in linked list of encountered weak collections.
if (weak_collection->next() == heap_->undefined_value()) {
weak_collection->set_next(heap_->encountered_weak_collections());
heap_->set_encountered_weak_collections(weak_collection);
}
int size = JSWeakCollection::BodyDescriptor::SizeOf(map, weak_collection);
JSWeakCollection::BodyDescriptor::IterateBody(map, weak_collection, size,
this);
return size;
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
VisitEphemeronHashTable(Map* map, EphemeronHashTable* table) {
collector_->AddEphemeronHashTable(table);
// TODO(dinfuehr): Account size of the backing store.
return 0;
}
......
......@@ -1854,71 +1854,43 @@ void MarkCompactCollector::TrimEnumCache(Map* map,
void MarkCompactCollector::ProcessWeakCollections() {
MarkCompactMarkingVisitor visitor(this, marking_state());
Object* weak_collection_obj = heap()->encountered_weak_collections();
while (weak_collection_obj != Smi::kZero) {
JSWeakCollection* weak_collection =
reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
DCHECK(non_atomic_marking_state()->IsBlackOrGrey(weak_collection));
if (weak_collection->table()->IsEphemeronHashTable()) {
EphemeronHashTable* table =
EphemeronHashTable::cast(weak_collection->table());
for (int i = 0; i < table->Capacity(); i++) {
HeapObject* heap_object = HeapObject::cast(table->KeyAt(i));
if (non_atomic_marking_state()->IsBlackOrGrey(heap_object)) {
Object** key_slot =
table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
RecordSlot(table, key_slot, *key_slot);
Object** value_slot = table->RawFieldOfElementAt(
EphemeronHashTable::EntryToValueIndex(i));
if (V8_UNLIKELY(FLAG_track_retaining_path) &&
(*value_slot)->IsHeapObject()) {
heap()->AddEphemeralRetainer(heap_object,
HeapObject::cast(*value_slot));
}
visitor.VisitPointer(table, value_slot);
weak_objects_.ephemeron_hash_tables.Iterate([&](EphemeronHashTable* table) {
for (int i = 0; i < table->Capacity(); i++) {
HeapObject* heap_object = HeapObject::cast(table->KeyAt(i));
if (non_atomic_marking_state()->IsBlackOrGrey(heap_object)) {
Object** key_slot =
table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
RecordSlot(table, key_slot, *key_slot);
Object** value_slot = table->RawFieldOfElementAt(
EphemeronHashTable::EntryToValueIndex(i));
if (V8_UNLIKELY(FLAG_track_retaining_path) &&
(*value_slot)->IsHeapObject()) {
heap()->AddEphemeralRetainer(heap_object,
HeapObject::cast(*value_slot));
}
visitor.VisitPointer(table, value_slot);
}
} else {
DCHECK(weak_collection->table()->IsUndefined(isolate()));
}
weak_collection_obj = weak_collection->next();
}
});
}
void MarkCompactCollector::ClearWeakCollections() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_COLLECTIONS);
Object* weak_collection_obj = heap()->encountered_weak_collections();
while (weak_collection_obj != Smi::kZero) {
JSWeakCollection* weak_collection =
reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
DCHECK(non_atomic_marking_state()->IsBlackOrGrey(weak_collection));
if (weak_collection->table()->IsEphemeronHashTable()) {
EphemeronHashTable* table =
EphemeronHashTable::cast(weak_collection->table());
for (int i = 0; i < table->Capacity(); i++) {
HeapObject* key = HeapObject::cast(table->KeyAt(i));
if (!non_atomic_marking_state()->IsBlackOrGrey(key)) {
table->RemoveEntry(i);
}
EphemeronHashTable* table;
while (weak_objects_.ephemeron_hash_tables.Pop(kMainThread, &table)) {
for (int i = 0; i < table->Capacity(); i++) {
HeapObject* key = HeapObject::cast(table->KeyAt(i));
if (!non_atomic_marking_state()->IsBlackOrGrey(key)) {
table->RemoveEntry(i);
}
} else {
DCHECK(weak_collection->table()->IsUndefined(isolate()));
}
weak_collection_obj = weak_collection->next();
weak_collection->set_next(heap()->undefined_value());
}
heap()->set_encountered_weak_collections(Smi::kZero);
}
void MarkCompactCollector::AbortWeakCollections() {
Object* weak_collection_obj = heap()->encountered_weak_collections();
while (weak_collection_obj != Smi::kZero) {
JSWeakCollection* weak_collection =
reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
weak_collection_obj = weak_collection->next();
weak_collection->set_next(heap()->undefined_value());
}
heap()->set_encountered_weak_collections(Smi::kZero);
weak_objects_.ephemeron_hash_tables.Clear();
}
void MarkCompactCollector::ClearWeakCells() {
......@@ -3634,7 +3606,6 @@ void MinorMarkCompactCollector::UpdatePointersAfterEvacuation() {
// Update pointers from external string table.
heap()->UpdateNewSpaceReferencesInExternalStringTable(
&UpdateReferenceInExternalStringTableEntry);
heap()->IterateEncounteredWeakCollections(&updating_visitor);
}
}
......@@ -4128,7 +4099,6 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
// Mark rest on the main thread.
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_WEAK);
heap()->IterateEncounteredWeakCollections(&root_visitor);
ProcessMarkingWorklist();
}
......
......@@ -415,6 +415,7 @@ class MajorNonAtomicMarkingState final
struct WeakObjects {
Worklist<WeakCell*, 64> weak_cells;
Worklist<TransitionArray*, 64> transition_arrays;
Worklist<EphemeronHashTable*, 64> ephemeron_hash_tables;
// TODO(marja): For old space, we only need the slot, not the host
// object. Optimize this by adding a different storage for old space.
Worklist<std::pair<HeapObject*, HeapObjectReference**>, 64> weak_references;
......@@ -607,6 +608,10 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
weak_objects_.transition_arrays.Push(kMainThread, array);
}
void AddEphemeronHashTable(EphemeronHashTable* table) {
weak_objects_.ephemeron_hash_tables.Push(kMainThread, table);
}
void AddWeakReference(HeapObject* host, HeapObjectReference** slot) {
weak_objects_.weak_references.Push(kMainThread, std::make_pair(host, slot));
}
......@@ -826,7 +831,6 @@ class MarkingVisitor final
V8_INLINE int VisitFixedArray(Map* map, FixedArray* object);
V8_INLINE int VisitJSApiObject(Map* map, JSObject* object);
V8_INLINE int VisitJSFunction(Map* map, JSFunction* object);
V8_INLINE int VisitJSWeakCollection(Map* map, JSWeakCollection* object);
V8_INLINE int VisitMap(Map* map, Map* object);
V8_INLINE int VisitNativeContext(Map* map, Context* object);
V8_INLINE int VisitTransitionArray(Map* map, TransitionArray* object);
......
......@@ -159,6 +159,20 @@ class Worklist {
global_pool_.Update(callback);
}
// Calls the specified callback on each element of the deques.
// The signature of the callback is:
// void Callback(EntryType entry).
//
// Assumes that no other tasks are running.
template <typename Callback>
void Iterate(Callback callback) {
for (int i = 0; i < num_tasks_; i++) {
private_pop_segment(i)->Iterate(callback);
private_push_segment(i)->Iterate(callback);
}
global_pool_.Iterate(callback);
}
template <typename Callback>
void IterateGlobalPool(Callback callback) {
global_pool_.Iterate(callback);
......
......@@ -17984,6 +17984,15 @@ Object* Dictionary<Derived, Shape>::SlowReverseLookup(Object* value) {
return isolate->heap()->undefined_value();
}
template <typename Derived, typename Shape>
void ObjectHashTableBase<Derived, Shape>::FillEntriesWithHoles(
Handle<Derived> table) {
int length = table->length();
for (int i = Derived::EntryToIndex(0); i < length; i++) {
table->set_the_hole(i);
}
}
template <typename Derived, typename Shape>
Object* ObjectHashTableBase<Derived, Shape>::Lookup(Isolate* isolate,
Handle<Object> key,
......@@ -18172,7 +18181,7 @@ void JSWeakCollection::Set(Handle<JSWeakCollection> weak_collection,
weak_collection->set_table(*new_table);
if (*table != *new_table) {
// Zap the old table since we didn't record slots for its elements.
table->FillWithHoles(0, table->length());
EphemeronHashTable::FillEntriesWithHoles(table);
}
}
......@@ -18189,7 +18198,7 @@ bool JSWeakCollection::Delete(Handle<JSWeakCollection> weak_collection,
weak_collection->set_table(*new_table);
if (*table != *new_table) {
// Zap the old table since we didn't record slots for its elements.
table->FillWithHoles(0, table->length());
EphemeronHashTable::FillEntriesWithHoles(table);
}
return was_present;
}
......
......@@ -281,6 +281,9 @@ class ObjectHashTableBase : public HashTable<Derived, Shape> {
// Returns the value at entry.
Object* ValueAt(int entry);
// Overwrite all keys and values with the hole value.
static void FillEntriesWithHoles(Handle<Derived>);
// Adds (or overwrites) the value associated with the given key.
static Handle<Derived> Put(Handle<Derived> table, Handle<Object> key,
Handle<Object> value);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment