Commit 969cdfe6 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Convert WeakObjects to heap::base::Worklist

This splits WeakObjects into explicit global and local worklists.
The latter are defined in WeakObjects::Local and are thread-local.

The main thread local worklist is stored in
MarkCompactCollector::local_weak_objects and exists during marking
similar to local_marking_worklists. Concurrent markers create their
own local worklists that are published at the end.

Change-Id: I093fdc580b4609ce83455b860b90a5099085beac
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2440607
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#70317}
parent defe1a0f
......@@ -60,6 +60,9 @@ class Worklist {
// marking worklist.
void Merge(Worklist<EntryType, SegmentSize>* other);
// Swaps the segments with the given marking worklist.
void Swap(Worklist<EntryType, SegmentSize>* other);
// These functions are not thread-safe. They should be called only
// if all local marking worklists that use the current worklist have
// been published and are empty.
......@@ -190,6 +193,17 @@ void Worklist<EntryType, SegmentSize>::Merge(
}
}
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Swap(
Worklist<EntryType, SegmentSize>* other) {
Segment* top = top_;
set_top(other->top_);
other->set_top(top);
size_t other_size = other->size_.exchange(
size_.load(std::memory_order_relaxed), std::memory_order_relaxed);
size_.store(other_size, std::memory_order_relaxed);
}
template <typename EntryType, uint16_t SegmentSize>
class Worklist<EntryType, SegmentSize>::Segment : public internal::SegmentBase {
public:
......@@ -283,10 +297,12 @@ class Worklist<EntryType, SegmentSize>::Local {
bool IsGlobalEmpty() const;
void Publish();
void Merge(Worklist<EntryType, SegmentSize>::Local* other);
void Merge(Local* other);
size_t PushSegmentSize() const { return push_segment_->Size(); }
void Swap(Local* other);
private:
void PublishPushSegment();
void PublishPopSegment();
......@@ -419,6 +435,14 @@ void Worklist<EntryType, SegmentSize>::Local::Merge(
worklist_->Merge(other->worklist_);
}
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Local::Swap(
Worklist<EntryType, SegmentSize>::Local* other) {
CHECK(IsLocalEmpty());
CHECK(other->IsLocalEmpty());
worklist_->Swap(other->worklist_);
}
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Local::PublishPushSegment() {
if (push_segment_ != internal::SegmentBase::GetSentinelSegmentAddress())
......
......@@ -79,14 +79,13 @@ class ConcurrentMarkingVisitor final
: public MarkingVisitorBase<ConcurrentMarkingVisitor,
ConcurrentMarkingState> {
public:
ConcurrentMarkingVisitor(int task_id,
MarkingWorklists::Local* local_marking_worklists,
WeakObjects* weak_objects, Heap* heap,
ConcurrentMarkingVisitor(MarkingWorklists::Local* local_marking_worklists,
WeakObjects::Local* local_weak_objects, Heap* heap,
unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode,
bool embedder_tracing_enabled, bool is_forced_gc,
MemoryChunkDataMap* memory_chunk_data)
: MarkingVisitorBase(task_id, local_marking_worklists, weak_objects, heap,
: MarkingVisitorBase(local_marking_worklists, local_weak_objects, heap,
mark_compact_epoch, bytecode_flush_mode,
embedder_tracing_enabled, is_forced_gc),
marking_state_(memory_chunk_data),
......@@ -151,7 +150,7 @@ class ConcurrentMarkingVisitor final
}
} else if (marking_state_.IsWhite(value)) {
weak_objects_->next_ephemerons.Push(task_id_, Ephemeron{key, value});
local_weak_objects_->next_ephemerons.Push(Ephemeron{key, value});
}
return false;
}
......@@ -388,8 +387,9 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
size_t kBytesUntilInterruptCheck = 64 * KB;
int kObjectsUntilInterrupCheck = 1000;
MarkingWorklists::Local local_marking_worklists(marking_worklists_);
WeakObjects::Local local_weak_objects(weak_objects_);
ConcurrentMarkingVisitor visitor(
task_id, &local_marking_worklists, weak_objects_, heap_,
&local_marking_worklists, &local_weak_objects, heap_,
task_state->mark_compact_epoch, Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(), task_state->is_forced_gc,
&task_state->memory_chunk_data);
......@@ -411,7 +411,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
{
Ephemeron ephemeron;
while (weak_objects_->current_ephemerons.Pop(task_id, &ephemeron)) {
while (local_weak_objects.current_ephemerons.Pop(&ephemeron)) {
if (visitor.ProcessEphemeron(ephemeron.key, ephemeron.value)) {
ephemeron_marked = true;
}
......@@ -467,7 +467,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
if (done) {
Ephemeron ephemeron;
while (weak_objects_->discovered_ephemerons.Pop(task_id, &ephemeron)) {
while (local_weak_objects.discovered_ephemerons.Pop(&ephemeron)) {
if (visitor.ProcessEphemeron(ephemeron.key, ephemeron.value)) {
ephemeron_marked = true;
}
......@@ -475,17 +475,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
}
local_marking_worklists.Publish();
weak_objects_->transition_arrays.FlushToGlobal(task_id);
weak_objects_->ephemeron_hash_tables.FlushToGlobal(task_id);
weak_objects_->current_ephemerons.FlushToGlobal(task_id);
weak_objects_->next_ephemerons.FlushToGlobal(task_id);
weak_objects_->discovered_ephemerons.FlushToGlobal(task_id);
weak_objects_->weak_references.FlushToGlobal(task_id);
weak_objects_->js_weak_refs.FlushToGlobal(task_id);
weak_objects_->weak_cells.FlushToGlobal(task_id);
weak_objects_->weak_objects_in_code.FlushToGlobal(task_id);
weak_objects_->bytecode_flushing_candidates.FlushToGlobal(task_id);
weak_objects_->flushed_js_functions.FlushToGlobal(task_id);
local_weak_objects.Publish();
base::AsAtomicWord::Relaxed_Store<size_t>(&task_state->marked_bytes, 0);
total_marked_bytes_ += marked_bytes;
......@@ -565,8 +555,8 @@ void ConcurrentMarking::RescheduleTasksIfNeeded() {
}
}
if (!marking_worklists_->shared()->IsEmpty() ||
!weak_objects_->current_ephemerons.IsGlobalPoolEmpty() ||
!weak_objects_->discovered_ephemerons.IsGlobalPoolEmpty()) {
!weak_objects_->current_ephemerons.IsEmpty() ||
!weak_objects_->discovered_ephemerons.IsEmpty()) {
ScheduleTasks();
}
}
......
......@@ -5149,8 +5149,7 @@ void Heap::SetUp() {
scavenger_collector_.reset(new ScavengerCollector(this));
incremental_marking_.reset(
new IncrementalMarking(this, mark_compact_collector_->weak_objects()));
incremental_marking_.reset(new IncrementalMarking(this));
if (FLAG_concurrent_marking || FLAG_parallel_marking) {
concurrent_marking_.reset(new ConcurrentMarking(
......
......@@ -49,11 +49,9 @@ void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr,
incremental_marking_->EnsureBlackAllocated(addr, size);
}
IncrementalMarking::IncrementalMarking(Heap* heap,
WeakObjects* weak_objects)
IncrementalMarking::IncrementalMarking(Heap* heap)
: heap_(heap),
collector_(heap->mark_compact_collector()),
weak_objects_(weak_objects),
new_generation_observer_(this, kYoungGenerationAllocatedThreshold),
old_generation_observer_(this, kOldGenerationAllocatedThreshold) {
SetState(STOPPED);
......@@ -501,7 +499,8 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
}
});
weak_objects_->UpdateAfterScavenge();
collector_->local_weak_objects()->Publish();
collector_->weak_objects()->UpdateAfterScavenge();
}
void IncrementalMarking::UpdateMarkedBytesAfterScavenge(
......
......@@ -87,7 +87,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
static const AccessMode kAtomicity = AccessMode::NON_ATOMIC;
#endif
IncrementalMarking(Heap* heap, WeakObjects* weak_objects);
explicit IncrementalMarking(Heap* heap);
MarkingState* marking_state() { return &marking_state_; }
......@@ -286,7 +286,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
Heap* const heap_;
MarkCompactCollector* const collector_;
WeakObjects* weak_objects_;
double start_time_ms_ = 0.0;
double time_to_force_completion_ = 0.0;
......
......@@ -86,7 +86,7 @@ void MarkCompactCollector::RecordSlot(MemoryChunk* source_page,
}
void MarkCompactCollector::AddTransitionArray(TransitionArray array) {
weak_objects_.transition_arrays.Push(kMainThreadTask, array);
local_weak_objects()->transition_arrays.Push(array);
}
template <typename MarkingState>
......
......@@ -427,12 +427,12 @@ void MarkCompactCollector::SetUp() {
void MarkCompactCollector::TearDown() {
AbortCompaction();
AbortWeakObjects();
if (heap()->incremental_marking()->IsMarking()) {
local_marking_worklists()->Publish();
heap()->marking_barrier()->Publish();
// Marking barriers of LocalHeaps will be published in their destructors.
marking_worklists()->Clear();
AbortWeakObjects();
}
}
......@@ -496,8 +496,9 @@ void MarkCompactCollector::StartMarking() {
marking_worklists()->CreateContextWorklists(contexts);
local_marking_worklists_ =
std::make_unique<MarkingWorklists::Local>(marking_worklists());
local_weak_objects_ = std::make_unique<WeakObjects::Local>(weak_objects());
marking_visitor_ = std::make_unique<MarkingVisitor>(
marking_state(), local_marking_worklists(), weak_objects(), heap_,
marking_state(), local_marking_worklists(), local_weak_objects(), heap_,
epoch(), Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(),
heap_->is_current_gc_forced());
......@@ -894,6 +895,7 @@ void MarkCompactCollector::FinishConcurrentMarking(
void MarkCompactCollector::VerifyMarking() {
CHECK(local_marking_worklists()->IsEmpty());
CHECK(local_weak_objects()->IsLocalAndGlobalEmpty());
DCHECK(heap_->incremental_marking()->IsStopped());
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
......@@ -923,10 +925,7 @@ void MarkCompactCollector::Finish() {
local_marking_worklists_.reset();
marking_worklists_.ReleaseContextWorklists();
native_context_stats_.Clear();
CHECK(weak_objects_.current_ephemerons.IsEmpty());
CHECK(weak_objects_.discovered_ephemerons.IsEmpty());
weak_objects_.next_ephemerons.Clear();
local_weak_objects_.reset();
sweeper()->StartSweeperTasks();
sweeper()->StartIterabilityTasks();
......@@ -1631,7 +1630,9 @@ void MarkCompactCollector::ProcessEphemeronsUntilFixpoint() {
// Move ephemerons from next_ephemerons into current_ephemerons to
// drain them in this iteration.
weak_objects_.current_ephemerons.Swap(weak_objects_.next_ephemerons);
DCHECK(local_weak_objects()->current_ephemerons.IsLocalAndGlobalEmpty());
local_weak_objects()->current_ephemerons.Swap(
&local_weak_objects()->next_ephemerons);
heap()->concurrent_marking()->set_ephemeron_marked(false);
{
......@@ -1647,8 +1648,8 @@ void MarkCompactCollector::ProcessEphemeronsUntilFixpoint() {
ConcurrentMarking::StopRequest::COMPLETE_ONGOING_TASKS);
}
CHECK(weak_objects_.current_ephemerons.IsEmpty());
CHECK(weak_objects_.discovered_ephemerons.IsEmpty());
CHECK(local_weak_objects()->current_ephemerons.IsLocalAndGlobalEmpty());
CHECK(local_weak_objects()->discovered_ephemerons.IsLocalAndGlobalEmpty());
work_to_do = work_to_do || !local_marking_worklists()->IsEmpty() ||
heap()->concurrent_marking()->ephemeron_marked() ||
......@@ -1656,10 +1657,12 @@ void MarkCompactCollector::ProcessEphemeronsUntilFixpoint() {
!heap()->local_embedder_heap_tracer()->IsRemoteTracingDone();
++iterations;
}
local_weak_objects()->next_ephemerons.Publish();
weak_objects()->next_ephemerons.Clear();
CHECK(local_marking_worklists()->IsEmpty());
CHECK(weak_objects_.current_ephemerons.IsEmpty());
CHECK(weak_objects_.discovered_ephemerons.IsEmpty());
CHECK(local_weak_objects()->current_ephemerons.IsLocalAndGlobalEmpty());
CHECK(local_weak_objects()->discovered_ephemerons.IsLocalAndGlobalEmpty());
}
bool MarkCompactCollector::ProcessEphemerons() {
......@@ -1668,7 +1671,7 @@ bool MarkCompactCollector::ProcessEphemerons() {
// Drain current_ephemerons and push ephemerons where key and value are still
// unreachable into next_ephemerons.
while (weak_objects_.current_ephemerons.Pop(kMainThreadTask, &ephemeron)) {
while (local_weak_objects()->current_ephemerons.Pop(&ephemeron)) {
if (ProcessEphemeron(ephemeron.key, ephemeron.value)) {
ephemeron_marked = true;
}
......@@ -1681,15 +1684,15 @@ bool MarkCompactCollector::ProcessEphemerons() {
// Drain discovered_ephemerons (filled in the drain MarkingWorklist-phase
// before) and push ephemerons where key and value are still unreachable into
// next_ephemerons.
while (weak_objects_.discovered_ephemerons.Pop(kMainThreadTask, &ephemeron)) {
while (local_weak_objects()->discovered_ephemerons.Pop(&ephemeron)) {
if (ProcessEphemeron(ephemeron.key, ephemeron.value)) {
ephemeron_marked = true;
}
}
// Flush local ephemerons for main task to global pool.
weak_objects_.ephemeron_hash_tables.FlushToGlobal(kMainThreadTask);
weak_objects_.next_ephemerons.FlushToGlobal(kMainThreadTask);
local_weak_objects()->ephemeron_hash_tables.Publish();
local_weak_objects()->next_ephemerons.Publish();
return ephemeron_marked;
}
......@@ -1701,10 +1704,11 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
std::unordered_multimap<HeapObject, HeapObject, Object::Hasher> key_to_values;
Ephemeron ephemeron;
DCHECK(weak_objects_.current_ephemerons.IsEmpty());
weak_objects_.current_ephemerons.Swap(weak_objects_.next_ephemerons);
DCHECK(local_weak_objects()->current_ephemerons.IsLocalAndGlobalEmpty());
local_weak_objects()->current_ephemerons.Swap(
&local_weak_objects()->next_ephemerons);
while (weak_objects_.current_ephemerons.Pop(kMainThreadTask, &ephemeron)) {
while (local_weak_objects()->current_ephemerons.Pop(&ephemeron)) {
ProcessEphemeron(ephemeron.key, ephemeron.value);
if (non_atomic_marking_state()->IsWhite(ephemeron.value)) {
......@@ -1731,8 +1735,7 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
kTrackNewlyDiscoveredObjects>(0);
}
while (
weak_objects_.discovered_ephemerons.Pop(kMainThreadTask, &ephemeron)) {
while (local_weak_objects()->discovered_ephemerons.Pop(&ephemeron)) {
ProcessEphemeron(ephemeron.key, ephemeron.value);
if (non_atomic_marking_state()->IsWhite(ephemeron.value)) {
......@@ -1743,7 +1746,8 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
if (ephemeron_marking_.newly_discovered_overflowed) {
// If newly_discovered was overflowed just visit all ephemerons in
// next_ephemerons.
weak_objects_.next_ephemerons.Iterate([&](Ephemeron ephemeron) {
local_weak_objects()->next_ephemerons.Publish();
weak_objects()->next_ephemerons.Iterate([&](Ephemeron ephemeron) {
if (non_atomic_marking_state()->IsBlackOrGrey(ephemeron.key) &&
non_atomic_marking_state()->WhiteToGrey(ephemeron.value)) {
local_marking_worklists()->Push(ephemeron.value);
......@@ -1770,7 +1774,7 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
work_to_do = !local_marking_worklists()->IsEmpty() ||
!local_marking_worklists()->IsEmbedderEmpty() ||
!heap()->local_embedder_heap_tracer()->IsRemoteTracingDone();
CHECK(weak_objects_.discovered_ephemerons.IsEmpty());
CHECK(local_weak_objects()->discovered_ephemerons.IsLocalAndGlobalEmpty());
}
ResetNewlyDiscovered();
......@@ -1863,7 +1867,7 @@ bool MarkCompactCollector::ProcessEphemeron(HeapObject key, HeapObject value) {
}
} else if (marking_state()->IsWhite(value)) {
weak_objects_.next_ephemerons.Push(kMainThreadTask, Ephemeron{key, value});
local_weak_objects()->next_ephemerons.Push(Ephemeron{key, value});
}
return false;
......@@ -1874,7 +1878,7 @@ void MarkCompactCollector::ProcessEphemeronMarking() {
// Incremental marking might leave ephemerons in main task's local
// buffer, flush it into global pool.
weak_objects_.next_ephemerons.FlushToGlobal(kMainThreadTask);
local_weak_objects()->next_ephemerons.Publish();
ProcessEphemeronsUntilFixpoint();
......@@ -2099,19 +2103,12 @@ void MarkCompactCollector::ClearNonLiveReferences() {
MarkDependentCodeForDeoptimization();
DCHECK(weak_objects_.transition_arrays.IsEmpty());
DCHECK(weak_objects_.weak_references.IsEmpty());
DCHECK(weak_objects_.weak_objects_in_code.IsEmpty());
DCHECK(weak_objects_.js_weak_refs.IsEmpty());
DCHECK(weak_objects_.weak_cells.IsEmpty());
DCHECK(weak_objects_.bytecode_flushing_candidates.IsEmpty());
DCHECK(weak_objects_.flushed_js_functions.IsEmpty());
DCHECK(local_weak_objects()->IsLocalAndGlobalEmpty());
}
void MarkCompactCollector::MarkDependentCodeForDeoptimization() {
std::pair<HeapObject, Code> weak_object_in_code;
while (weak_objects_.weak_objects_in_code.Pop(kMainThreadTask,
&weak_object_in_code)) {
while (local_weak_objects()->weak_objects_in_code.Pop(&weak_object_in_code)) {
HeapObject object = weak_object_in_code.first;
Code code = weak_object_in_code.second;
if (!non_atomic_marking_state()->IsBlackOrGrey(object) &&
......@@ -2224,9 +2221,10 @@ void MarkCompactCollector::FlushBytecodeFromSFI(
void MarkCompactCollector::ClearOldBytecodeCandidates() {
DCHECK(FLAG_flush_bytecode ||
weak_objects_.bytecode_flushing_candidates.IsEmpty());
local_weak_objects()
->bytecode_flushing_candidates.IsLocalAndGlobalEmpty());
SharedFunctionInfo flushing_candidate;
while (weak_objects_.bytecode_flushing_candidates.Pop(kMainThreadTask,
while (local_weak_objects()->bytecode_flushing_candidates.Pop(
&flushing_candidate)) {
// If the BytecodeArray is dead, flush it, which will replace the field with
// an uncompiled data object.
......@@ -2244,10 +2242,10 @@ void MarkCompactCollector::ClearOldBytecodeCandidates() {
}
void MarkCompactCollector::ClearFlushedJsFunctions() {
DCHECK(FLAG_flush_bytecode || weak_objects_.flushed_js_functions.IsEmpty());
DCHECK(FLAG_flush_bytecode ||
local_weak_objects()->flushed_js_functions.IsLocalAndGlobalEmpty());
JSFunction flushed_js_function;
while (weak_objects_.flushed_js_functions.Pop(kMainThreadTask,
&flushed_js_function)) {
while (local_weak_objects()->flushed_js_functions.Pop(&flushed_js_function)) {
auto gc_notify_updated_slot = [](HeapObject object, ObjectSlot slot,
Object target) {
RecordSlot(object, slot, HeapObject::cast(target));
......@@ -2258,7 +2256,7 @@ void MarkCompactCollector::ClearFlushedJsFunctions() {
void MarkCompactCollector::ClearFullMapTransitions() {
TransitionArray array;
while (weak_objects_.transition_arrays.Pop(kMainThreadTask, &array)) {
while (local_weak_objects()->transition_arrays.Pop(&array)) {
int num_transitions = array.number_of_entries();
if (num_transitions > 0) {
Map map;
......@@ -2409,7 +2407,7 @@ void MarkCompactCollector::ClearWeakCollections() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_COLLECTIONS);
EphemeronHashTable table;
while (weak_objects_.ephemeron_hash_tables.Pop(kMainThreadTask, &table)) {
while (local_weak_objects()->ephemeron_hash_tables.Pop(&table)) {
for (InternalIndex i : table.IterateEntries()) {
HeapObject key = HeapObject::cast(table.KeyAt(i));
#ifdef VERIFY_HEAP
......@@ -2442,7 +2440,7 @@ void MarkCompactCollector::ClearWeakReferences() {
std::pair<HeapObject, HeapObjectSlot> slot;
HeapObjectReference cleared_weak_ref =
HeapObjectReference::ClearedValue(isolate());
while (weak_objects_.weak_references.Pop(kMainThreadTask, &slot)) {
while (local_weak_objects()->weak_references.Pop(&slot)) {
HeapObject value;
// The slot could have been overwritten, so we have to treat it
// as MaybeObjectSlot.
......@@ -2468,7 +2466,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
return;
}
JSWeakRef weak_ref;
while (weak_objects_.js_weak_refs.Pop(kMainThreadTask, &weak_ref)) {
while (local_weak_objects()->js_weak_refs.Pop(&weak_ref)) {
HeapObject target = HeapObject::cast(weak_ref.target());
if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
weak_ref.set_target(ReadOnlyRoots(isolate()).undefined_value());
......@@ -2479,7 +2477,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
}
}
WeakCell weak_cell;
while (weak_objects_.weak_cells.Pop(kMainThreadTask, &weak_cell)) {
while (local_weak_objects()->weak_cells.Pop(&weak_cell)) {
auto gc_notify_updated_slot = [](HeapObject object, ObjectSlot slot,
Object target) {
if (target.IsHeapObject()) {
......@@ -2540,17 +2538,8 @@ void MarkCompactCollector::ClearJSWeakRefs() {
}
void MarkCompactCollector::AbortWeakObjects() {
weak_objects_.transition_arrays.Clear();
weak_objects_.ephemeron_hash_tables.Clear();
weak_objects_.current_ephemerons.Clear();
weak_objects_.next_ephemerons.Clear();
weak_objects_.discovered_ephemerons.Clear();
weak_objects_.weak_references.Clear();
weak_objects_.weak_objects_in_code.Clear();
weak_objects_.js_weak_refs.Clear();
weak_objects_.weak_cells.Clear();
weak_objects_.bytecode_flushing_candidates.Clear();
weak_objects_.flushed_js_functions.Clear();
local_weak_objects()->Publish();
weak_objects()->Clear();
}
bool MarkCompactCollector::IsOnEvacuationCandidate(MaybeObject obj) {
......
......@@ -378,12 +378,12 @@ class MainMarkingVisitor final
MainMarkingVisitor(MarkingState* marking_state,
MarkingWorklists::Local* local_marking_worklists,
WeakObjects* weak_objects, Heap* heap,
WeakObjects::Local* local_weak_objects, Heap* heap,
unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode,
bool embedder_tracing_enabled, bool is_forced_gc)
: MarkingVisitorBase<MainMarkingVisitor<MarkingState>, MarkingState>(
kMainThreadTask, local_marking_worklists, weak_objects, heap,
local_marking_worklists, local_weak_objects, heap,
mark_compact_epoch, bytecode_flush_mode, embedder_tracing_enabled,
is_forced_gc),
marking_state_(marking_state),
......@@ -533,6 +533,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
}
WeakObjects* weak_objects() { return &weak_objects_; }
WeakObjects::Local* local_weak_objects() { return local_weak_objects_.get(); }
inline void AddTransitionArray(TransitionArray array);
......@@ -758,6 +759,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
MarkingWorklists marking_worklists_;
WeakObjects weak_objects_;
std::unique_ptr<WeakObjects::Local> local_weak_objects_;
EphemeronMarking ephemeron_marking_;
std::unique_ptr<MarkingVisitor> marking_visitor_;
......
......@@ -58,7 +58,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::ProcessWeakHeapObject(
// If we do not know about liveness of the value, we have to process
// the reference when we know the liveness of the whole transitive
// closure.
weak_objects_->weak_references.Push(task_id_, std::make_pair(host, slot));
local_weak_objects_->weak_references.Push(std::make_pair(host, slot));
}
}
......@@ -91,7 +91,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEmbeddedPointer(
HeapObject object = rinfo->target_object();
if (!concrete_visitor()->marking_state()->IsBlackOrGrey(object)) {
if (host.IsWeakObject(object)) {
weak_objects_->weak_objects_in_code.Push(task_id_,
local_weak_objects_->weak_objects_in_code.Push(
std::make_pair(object, host));
} else {
MarkObject(host, object);
......@@ -133,7 +133,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSFunction(
// Check if the JSFunction needs reset due to bytecode being flushed.
if (bytecode_flush_mode_ != BytecodeFlushMode::kDoNotFlushBytecode &&
object.NeedsResetDueToFlushedBytecode()) {
weak_objects_->flushed_js_functions.Push(task_id_, object);
local_weak_objects_->flushed_js_functions.Push(object);
}
return size;
}
......@@ -150,7 +150,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitSharedFunctionInfo(
// If the SharedFunctionInfo has old bytecode, mark it as flushable,
// otherwise visit the function data field strongly.
if (shared_info.ShouldFlushBytecode(bytecode_flush_mode_)) {
weak_objects_->bytecode_flushing_candidates.Push(task_id_, shared_info);
local_weak_objects_->bytecode_flushing_candidates.Push(shared_info);
} else {
VisitPointer(shared_info,
shared_info.RawField(SharedFunctionInfo::kFunctionDataOffset));
......@@ -260,7 +260,7 @@ template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable(
Map map, EphemeronHashTable table) {
if (!concrete_visitor()->ShouldVisit(table)) return 0;
weak_objects_->ephemeron_hash_tables.Push(task_id_, table);
local_weak_objects_->ephemeron_hash_tables.Push(table);
for (InternalIndex i : table.IterateEntries()) {
ObjectSlot key_slot =
......@@ -286,7 +286,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable(
// Revisit ephemerons with both key and value unreachable at end
// of concurrent marking cycle.
if (concrete_visitor()->marking_state()->IsWhite(value)) {
weak_objects_->discovered_ephemerons.Push(task_id_,
local_weak_objects_->discovered_ephemerons.Push(
Ephemeron{key, value});
}
}
......@@ -311,7 +311,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSWeakRef(
} else {
// JSWeakRef points to a potentially dead object. We have to process
// them when we know the liveness of the whole transitive closure.
weak_objects_->js_weak_refs.Push(task_id_, weak_ref);
local_weak_objects_->js_weak_refs.Push(weak_ref);
}
}
return size;
......@@ -341,7 +341,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitWeakCell(
// WeakCell points to a potentially dead object or a dead unregister
// token. We have to process them when we know the liveness of the whole
// transitive closure.
weak_objects_->weak_cells.Push(task_id_, weak_cell);
local_weak_objects_->weak_cells.Push(weak_cell);
}
return size;
}
......@@ -459,7 +459,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitTransitionArray(
this->VisitMapPointer(array);
int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
weak_objects_->transition_arrays.Push(task_id_, array);
local_weak_objects_->transition_arrays.Push(array);
return size;
}
......
......@@ -101,16 +101,14 @@ class MarkingStateBase {
template <typename ConcreteVisitor, typename MarkingState>
class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
public:
MarkingVisitorBase(int task_id,
MarkingWorklists::Local* local_marking_worklists,
WeakObjects* weak_objects, Heap* heap,
MarkingVisitorBase(MarkingWorklists::Local* local_marking_worklists,
WeakObjects::Local* local_weak_objects, Heap* heap,
unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode,
bool is_embedder_tracing_enabled, bool is_forced_gc)
: local_marking_worklists_(local_marking_worklists),
weak_objects_(weak_objects),
local_weak_objects_(local_weak_objects),
heap_(heap),
task_id_(task_id),
mark_compact_epoch_(mark_compact_epoch),
bytecode_flush_mode_(bytecode_flush_mode),
is_embedder_tracing_enabled_(is_embedder_tracing_enabled),
......@@ -189,9 +187,8 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
MarkingWorklists::Local* const local_marking_worklists_;
WeakObjects* const weak_objects_;
WeakObjects::Local* const local_weak_objects_;
Heap* const heap_;
const int task_id_;
const unsigned mark_compact_epoch_;
const BytecodeFlushMode bytecode_flush_mode_;
const bool is_embedder_tracing_enabled_;
......
......@@ -19,6 +19,36 @@ namespace v8 {
namespace internal {
WeakObjects::Local::Local(WeakObjects* weak_objects)
:
#define CONSTRUCT_FIELD(Type, name, _) name(&weak_objects->name),
WEAK_OBJECT_WORKLISTS(CONSTRUCT_FIELD)
#undef CONSTRUCT_FIELD
end_of_initializer_list_(false) {
USE(end_of_initializer_list_);
}
bool WeakObjects::Local::IsLocalAndGlobalEmpty() {
bool result = true;
#define INVOKE_PREDICATE(Type, name, _) \
result = result && name.IsLocalAndGlobalEmpty();
WEAK_OBJECT_WORKLISTS(INVOKE_PREDICATE)
#undef INVOKE_PREDICATE
return result;
}
void WeakObjects::Local::Publish() {
#define INVOKE_PUBLISH(Type, name, _) name.Publish();
WEAK_OBJECT_WORKLISTS(INVOKE_PUBLISH)
#undef INVOKE_PUBLISH
}
void WeakObjects::Clear() {
#define INVOKE_CLEAR(Type, name, _) name.Clear();
WEAK_OBJECT_WORKLISTS(INVOKE_CLEAR)
#undef INVOKE_CLEAR
}
void WeakObjects::UpdateAfterScavenge() {
#define INVOKE_UPDATE(_, name, Name) Update##Name(name);
WEAK_OBJECT_WORKLISTS(INVOKE_UPDATE)
......
......@@ -6,7 +6,7 @@
#define V8_HEAP_WEAK_OBJECT_WORKLISTS_H_
#include "src/common/globals.h"
#include "src/heap/worklist.h"
#include "src/heap/base/worklist.h"
#include "src/objects/heap-object.h"
#include "src/objects/js-weak-refs.h"
......@@ -64,14 +64,31 @@ class TransitionArray;
class WeakObjects {
public:
template <typename Type>
using WeakObjectWorklist = Worklist<Type, 64>;
using WeakObjectWorklist = ::heap::base::Worklist<Type, 64>;
#define DECLARE_WORKLIST(Type, name, _) WeakObjectWorklist<Type> name;
class Local {
public:
explicit Local(WeakObjects* weak_objects);
bool IsLocalAndGlobalEmpty();
void Publish();
#define DECLARE_WORKLIST(Type, name, _) WeakObjectWorklist<Type>::Local name;
WEAK_OBJECT_WORKLISTS(DECLARE_WORKLIST)
#undef DECLARE_WORKLIST
private:
// Dummy field used for terminating the initializer list
// in the constructor.
bool end_of_initializer_list_;
};
void Clear();
void UpdateAfterScavenge();
#define DECLARE_WORKLIST(Type, name, _) WeakObjectWorklist<Type> name;
WEAK_OBJECT_WORKLISTS(DECLARE_WORKLIST)
#undef DECLARE_WORKLIST
private:
#define DECLARE_UPDATE_METHODS(Type, _, Name) \
void Update##Name(WeakObjectWorklist<Type>&);
......
......@@ -911,17 +911,11 @@ TEST(JSWeakRefScavengedInWorklist) {
// Do marking. This puts the WeakRef above into the js_weak_refs worklist
// since its target isn't marked.
CHECK(
heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
heap::SimulateIncrementalMarking(heap, true);
CHECK(!heap->mark_compact_collector()
->weak_objects()
->js_weak_refs.IsEmpty());
}
// Now collect both weak_ref and its target. The worklist should be empty.
CcTest::CollectGarbage(NEW_SPACE);
CHECK(heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
// The mark-compactor shouldn't see zapped WeakRefs in the worklist.
CcTest::CollectAllGarbage();
......@@ -956,22 +950,16 @@ TEST(JSWeakRefTenuredInWorklist) {
// Do marking. This puts the WeakRef above into the js_weak_refs worklist
// since its target isn't marked.
CHECK(heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
heap::SimulateIncrementalMarking(heap, true);
CHECK(
!heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
// Now collect weak_ref's target. We still have a Handle to weak_ref, so it is
// moved and remains on the worklist.
CcTest::CollectGarbage(NEW_SPACE);
JSWeakRef new_weak_ref_location = *weak_ref;
CHECK_NE(old_weak_ref_location, new_weak_ref_location);
CHECK(
!heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
// The mark-compactor should see the moved WeakRef in the worklist.
CcTest::CollectAllGarbage();
CHECK(heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
CHECK(weak_ref->target().IsUndefined(isolate));
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment