Commit 969cdfe6 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Convert WeakObjects to heap::base::Worklist

This splits WeakObjects into explicit global and local worklists.
The latter are defined in WeakObjects::Local and are thread-local.

The main thread local worklist is stored in
MarkCompactCollector::local_weak_objects and exists during marking
similar to local_marking_worklists. Concurrent markers create their
own local worklists that are published at the end.

Change-Id: I093fdc580b4609ce83455b860b90a5099085beac
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2440607
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#70317}
parent defe1a0f
...@@ -60,6 +60,9 @@ class Worklist { ...@@ -60,6 +60,9 @@ class Worklist {
// marking worklist. // marking worklist.
void Merge(Worklist<EntryType, SegmentSize>* other); void Merge(Worklist<EntryType, SegmentSize>* other);
// Swaps the segments with the given marking worklist.
void Swap(Worklist<EntryType, SegmentSize>* other);
// These functions are not thread-safe. They should be called only // These functions are not thread-safe. They should be called only
// if all local marking worklists that use the current worklist have // if all local marking worklists that use the current worklist have
// been published and are empty. // been published and are empty.
...@@ -190,6 +193,17 @@ void Worklist<EntryType, SegmentSize>::Merge( ...@@ -190,6 +193,17 @@ void Worklist<EntryType, SegmentSize>::Merge(
} }
} }
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Swap(
Worklist<EntryType, SegmentSize>* other) {
Segment* top = top_;
set_top(other->top_);
other->set_top(top);
size_t other_size = other->size_.exchange(
size_.load(std::memory_order_relaxed), std::memory_order_relaxed);
size_.store(other_size, std::memory_order_relaxed);
}
template <typename EntryType, uint16_t SegmentSize> template <typename EntryType, uint16_t SegmentSize>
class Worklist<EntryType, SegmentSize>::Segment : public internal::SegmentBase { class Worklist<EntryType, SegmentSize>::Segment : public internal::SegmentBase {
public: public:
...@@ -283,10 +297,12 @@ class Worklist<EntryType, SegmentSize>::Local { ...@@ -283,10 +297,12 @@ class Worklist<EntryType, SegmentSize>::Local {
bool IsGlobalEmpty() const; bool IsGlobalEmpty() const;
void Publish(); void Publish();
void Merge(Worklist<EntryType, SegmentSize>::Local* other); void Merge(Local* other);
size_t PushSegmentSize() const { return push_segment_->Size(); } size_t PushSegmentSize() const { return push_segment_->Size(); }
void Swap(Local* other);
private: private:
void PublishPushSegment(); void PublishPushSegment();
void PublishPopSegment(); void PublishPopSegment();
...@@ -419,6 +435,14 @@ void Worklist<EntryType, SegmentSize>::Local::Merge( ...@@ -419,6 +435,14 @@ void Worklist<EntryType, SegmentSize>::Local::Merge(
worklist_->Merge(other->worklist_); worklist_->Merge(other->worklist_);
} }
template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Local::Swap(
Worklist<EntryType, SegmentSize>::Local* other) {
CHECK(IsLocalEmpty());
CHECK(other->IsLocalEmpty());
worklist_->Swap(other->worklist_);
}
template <typename EntryType, uint16_t SegmentSize> template <typename EntryType, uint16_t SegmentSize>
void Worklist<EntryType, SegmentSize>::Local::PublishPushSegment() { void Worklist<EntryType, SegmentSize>::Local::PublishPushSegment() {
if (push_segment_ != internal::SegmentBase::GetSentinelSegmentAddress()) if (push_segment_ != internal::SegmentBase::GetSentinelSegmentAddress())
......
...@@ -79,14 +79,13 @@ class ConcurrentMarkingVisitor final ...@@ -79,14 +79,13 @@ class ConcurrentMarkingVisitor final
: public MarkingVisitorBase<ConcurrentMarkingVisitor, : public MarkingVisitorBase<ConcurrentMarkingVisitor,
ConcurrentMarkingState> { ConcurrentMarkingState> {
public: public:
ConcurrentMarkingVisitor(int task_id, ConcurrentMarkingVisitor(MarkingWorklists::Local* local_marking_worklists,
MarkingWorklists::Local* local_marking_worklists, WeakObjects::Local* local_weak_objects, Heap* heap,
WeakObjects* weak_objects, Heap* heap,
unsigned mark_compact_epoch, unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode, BytecodeFlushMode bytecode_flush_mode,
bool embedder_tracing_enabled, bool is_forced_gc, bool embedder_tracing_enabled, bool is_forced_gc,
MemoryChunkDataMap* memory_chunk_data) MemoryChunkDataMap* memory_chunk_data)
: MarkingVisitorBase(task_id, local_marking_worklists, weak_objects, heap, : MarkingVisitorBase(local_marking_worklists, local_weak_objects, heap,
mark_compact_epoch, bytecode_flush_mode, mark_compact_epoch, bytecode_flush_mode,
embedder_tracing_enabled, is_forced_gc), embedder_tracing_enabled, is_forced_gc),
marking_state_(memory_chunk_data), marking_state_(memory_chunk_data),
...@@ -151,7 +150,7 @@ class ConcurrentMarkingVisitor final ...@@ -151,7 +150,7 @@ class ConcurrentMarkingVisitor final
} }
} else if (marking_state_.IsWhite(value)) { } else if (marking_state_.IsWhite(value)) {
weak_objects_->next_ephemerons.Push(task_id_, Ephemeron{key, value}); local_weak_objects_->next_ephemerons.Push(Ephemeron{key, value});
} }
return false; return false;
} }
...@@ -388,8 +387,9 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) { ...@@ -388,8 +387,9 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
size_t kBytesUntilInterruptCheck = 64 * KB; size_t kBytesUntilInterruptCheck = 64 * KB;
int kObjectsUntilInterrupCheck = 1000; int kObjectsUntilInterrupCheck = 1000;
MarkingWorklists::Local local_marking_worklists(marking_worklists_); MarkingWorklists::Local local_marking_worklists(marking_worklists_);
WeakObjects::Local local_weak_objects(weak_objects_);
ConcurrentMarkingVisitor visitor( ConcurrentMarkingVisitor visitor(
task_id, &local_marking_worklists, weak_objects_, heap_, &local_marking_worklists, &local_weak_objects, heap_,
task_state->mark_compact_epoch, Heap::GetBytecodeFlushMode(), task_state->mark_compact_epoch, Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(), task_state->is_forced_gc, heap_->local_embedder_heap_tracer()->InUse(), task_state->is_forced_gc,
&task_state->memory_chunk_data); &task_state->memory_chunk_data);
...@@ -411,7 +411,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) { ...@@ -411,7 +411,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
{ {
Ephemeron ephemeron; Ephemeron ephemeron;
while (weak_objects_->current_ephemerons.Pop(task_id, &ephemeron)) { while (local_weak_objects.current_ephemerons.Pop(&ephemeron)) {
if (visitor.ProcessEphemeron(ephemeron.key, ephemeron.value)) { if (visitor.ProcessEphemeron(ephemeron.key, ephemeron.value)) {
ephemeron_marked = true; ephemeron_marked = true;
} }
...@@ -467,7 +467,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) { ...@@ -467,7 +467,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
if (done) { if (done) {
Ephemeron ephemeron; Ephemeron ephemeron;
while (weak_objects_->discovered_ephemerons.Pop(task_id, &ephemeron)) { while (local_weak_objects.discovered_ephemerons.Pop(&ephemeron)) {
if (visitor.ProcessEphemeron(ephemeron.key, ephemeron.value)) { if (visitor.ProcessEphemeron(ephemeron.key, ephemeron.value)) {
ephemeron_marked = true; ephemeron_marked = true;
} }
...@@ -475,17 +475,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) { ...@@ -475,17 +475,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
} }
local_marking_worklists.Publish(); local_marking_worklists.Publish();
weak_objects_->transition_arrays.FlushToGlobal(task_id); local_weak_objects.Publish();
weak_objects_->ephemeron_hash_tables.FlushToGlobal(task_id);
weak_objects_->current_ephemerons.FlushToGlobal(task_id);
weak_objects_->next_ephemerons.FlushToGlobal(task_id);
weak_objects_->discovered_ephemerons.FlushToGlobal(task_id);
weak_objects_->weak_references.FlushToGlobal(task_id);
weak_objects_->js_weak_refs.FlushToGlobal(task_id);
weak_objects_->weak_cells.FlushToGlobal(task_id);
weak_objects_->weak_objects_in_code.FlushToGlobal(task_id);
weak_objects_->bytecode_flushing_candidates.FlushToGlobal(task_id);
weak_objects_->flushed_js_functions.FlushToGlobal(task_id);
base::AsAtomicWord::Relaxed_Store<size_t>(&task_state->marked_bytes, 0); base::AsAtomicWord::Relaxed_Store<size_t>(&task_state->marked_bytes, 0);
total_marked_bytes_ += marked_bytes; total_marked_bytes_ += marked_bytes;
...@@ -565,8 +555,8 @@ void ConcurrentMarking::RescheduleTasksIfNeeded() { ...@@ -565,8 +555,8 @@ void ConcurrentMarking::RescheduleTasksIfNeeded() {
} }
} }
if (!marking_worklists_->shared()->IsEmpty() || if (!marking_worklists_->shared()->IsEmpty() ||
!weak_objects_->current_ephemerons.IsGlobalPoolEmpty() || !weak_objects_->current_ephemerons.IsEmpty() ||
!weak_objects_->discovered_ephemerons.IsGlobalPoolEmpty()) { !weak_objects_->discovered_ephemerons.IsEmpty()) {
ScheduleTasks(); ScheduleTasks();
} }
} }
......
...@@ -5149,8 +5149,7 @@ void Heap::SetUp() { ...@@ -5149,8 +5149,7 @@ void Heap::SetUp() {
scavenger_collector_.reset(new ScavengerCollector(this)); scavenger_collector_.reset(new ScavengerCollector(this));
incremental_marking_.reset( incremental_marking_.reset(new IncrementalMarking(this));
new IncrementalMarking(this, mark_compact_collector_->weak_objects()));
if (FLAG_concurrent_marking || FLAG_parallel_marking) { if (FLAG_concurrent_marking || FLAG_parallel_marking) {
concurrent_marking_.reset(new ConcurrentMarking( concurrent_marking_.reset(new ConcurrentMarking(
......
...@@ -49,11 +49,9 @@ void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr, ...@@ -49,11 +49,9 @@ void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr,
incremental_marking_->EnsureBlackAllocated(addr, size); incremental_marking_->EnsureBlackAllocated(addr, size);
} }
IncrementalMarking::IncrementalMarking(Heap* heap, IncrementalMarking::IncrementalMarking(Heap* heap)
WeakObjects* weak_objects)
: heap_(heap), : heap_(heap),
collector_(heap->mark_compact_collector()), collector_(heap->mark_compact_collector()),
weak_objects_(weak_objects),
new_generation_observer_(this, kYoungGenerationAllocatedThreshold), new_generation_observer_(this, kYoungGenerationAllocatedThreshold),
old_generation_observer_(this, kOldGenerationAllocatedThreshold) { old_generation_observer_(this, kOldGenerationAllocatedThreshold) {
SetState(STOPPED); SetState(STOPPED);
...@@ -501,7 +499,8 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() { ...@@ -501,7 +499,8 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
} }
}); });
weak_objects_->UpdateAfterScavenge(); collector_->local_weak_objects()->Publish();
collector_->weak_objects()->UpdateAfterScavenge();
} }
void IncrementalMarking::UpdateMarkedBytesAfterScavenge( void IncrementalMarking::UpdateMarkedBytesAfterScavenge(
......
...@@ -87,7 +87,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -87,7 +87,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
static const AccessMode kAtomicity = AccessMode::NON_ATOMIC; static const AccessMode kAtomicity = AccessMode::NON_ATOMIC;
#endif #endif
IncrementalMarking(Heap* heap, WeakObjects* weak_objects); explicit IncrementalMarking(Heap* heap);
MarkingState* marking_state() { return &marking_state_; } MarkingState* marking_state() { return &marking_state_; }
...@@ -286,7 +286,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -286,7 +286,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
Heap* const heap_; Heap* const heap_;
MarkCompactCollector* const collector_; MarkCompactCollector* const collector_;
WeakObjects* weak_objects_;
double start_time_ms_ = 0.0; double start_time_ms_ = 0.0;
double time_to_force_completion_ = 0.0; double time_to_force_completion_ = 0.0;
......
...@@ -86,7 +86,7 @@ void MarkCompactCollector::RecordSlot(MemoryChunk* source_page, ...@@ -86,7 +86,7 @@ void MarkCompactCollector::RecordSlot(MemoryChunk* source_page,
} }
void MarkCompactCollector::AddTransitionArray(TransitionArray array) { void MarkCompactCollector::AddTransitionArray(TransitionArray array) {
weak_objects_.transition_arrays.Push(kMainThreadTask, array); local_weak_objects()->transition_arrays.Push(array);
} }
template <typename MarkingState> template <typename MarkingState>
......
...@@ -427,12 +427,12 @@ void MarkCompactCollector::SetUp() { ...@@ -427,12 +427,12 @@ void MarkCompactCollector::SetUp() {
void MarkCompactCollector::TearDown() { void MarkCompactCollector::TearDown() {
AbortCompaction(); AbortCompaction();
AbortWeakObjects();
if (heap()->incremental_marking()->IsMarking()) { if (heap()->incremental_marking()->IsMarking()) {
local_marking_worklists()->Publish(); local_marking_worklists()->Publish();
heap()->marking_barrier()->Publish(); heap()->marking_barrier()->Publish();
// Marking barriers of LocalHeaps will be published in their destructors. // Marking barriers of LocalHeaps will be published in their destructors.
marking_worklists()->Clear(); marking_worklists()->Clear();
AbortWeakObjects();
} }
} }
...@@ -496,8 +496,9 @@ void MarkCompactCollector::StartMarking() { ...@@ -496,8 +496,9 @@ void MarkCompactCollector::StartMarking() {
marking_worklists()->CreateContextWorklists(contexts); marking_worklists()->CreateContextWorklists(contexts);
local_marking_worklists_ = local_marking_worklists_ =
std::make_unique<MarkingWorklists::Local>(marking_worklists()); std::make_unique<MarkingWorklists::Local>(marking_worklists());
local_weak_objects_ = std::make_unique<WeakObjects::Local>(weak_objects());
marking_visitor_ = std::make_unique<MarkingVisitor>( marking_visitor_ = std::make_unique<MarkingVisitor>(
marking_state(), local_marking_worklists(), weak_objects(), heap_, marking_state(), local_marking_worklists(), local_weak_objects(), heap_,
epoch(), Heap::GetBytecodeFlushMode(), epoch(), Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(), heap_->local_embedder_heap_tracer()->InUse(),
heap_->is_current_gc_forced()); heap_->is_current_gc_forced());
...@@ -894,6 +895,7 @@ void MarkCompactCollector::FinishConcurrentMarking( ...@@ -894,6 +895,7 @@ void MarkCompactCollector::FinishConcurrentMarking(
void MarkCompactCollector::VerifyMarking() { void MarkCompactCollector::VerifyMarking() {
CHECK(local_marking_worklists()->IsEmpty()); CHECK(local_marking_worklists()->IsEmpty());
CHECK(local_weak_objects()->IsLocalAndGlobalEmpty());
DCHECK(heap_->incremental_marking()->IsStopped()); DCHECK(heap_->incremental_marking()->IsStopped());
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
if (FLAG_verify_heap) { if (FLAG_verify_heap) {
...@@ -923,10 +925,7 @@ void MarkCompactCollector::Finish() { ...@@ -923,10 +925,7 @@ void MarkCompactCollector::Finish() {
local_marking_worklists_.reset(); local_marking_worklists_.reset();
marking_worklists_.ReleaseContextWorklists(); marking_worklists_.ReleaseContextWorklists();
native_context_stats_.Clear(); native_context_stats_.Clear();
local_weak_objects_.reset();
CHECK(weak_objects_.current_ephemerons.IsEmpty());
CHECK(weak_objects_.discovered_ephemerons.IsEmpty());
weak_objects_.next_ephemerons.Clear();
sweeper()->StartSweeperTasks(); sweeper()->StartSweeperTasks();
sweeper()->StartIterabilityTasks(); sweeper()->StartIterabilityTasks();
...@@ -1631,7 +1630,9 @@ void MarkCompactCollector::ProcessEphemeronsUntilFixpoint() { ...@@ -1631,7 +1630,9 @@ void MarkCompactCollector::ProcessEphemeronsUntilFixpoint() {
// Move ephemerons from next_ephemerons into current_ephemerons to // Move ephemerons from next_ephemerons into current_ephemerons to
// drain them in this iteration. // drain them in this iteration.
weak_objects_.current_ephemerons.Swap(weak_objects_.next_ephemerons); DCHECK(local_weak_objects()->current_ephemerons.IsLocalAndGlobalEmpty());
local_weak_objects()->current_ephemerons.Swap(
&local_weak_objects()->next_ephemerons);
heap()->concurrent_marking()->set_ephemeron_marked(false); heap()->concurrent_marking()->set_ephemeron_marked(false);
{ {
...@@ -1647,8 +1648,8 @@ void MarkCompactCollector::ProcessEphemeronsUntilFixpoint() { ...@@ -1647,8 +1648,8 @@ void MarkCompactCollector::ProcessEphemeronsUntilFixpoint() {
ConcurrentMarking::StopRequest::COMPLETE_ONGOING_TASKS); ConcurrentMarking::StopRequest::COMPLETE_ONGOING_TASKS);
} }
CHECK(weak_objects_.current_ephemerons.IsEmpty()); CHECK(local_weak_objects()->current_ephemerons.IsLocalAndGlobalEmpty());
CHECK(weak_objects_.discovered_ephemerons.IsEmpty()); CHECK(local_weak_objects()->discovered_ephemerons.IsLocalAndGlobalEmpty());
work_to_do = work_to_do || !local_marking_worklists()->IsEmpty() || work_to_do = work_to_do || !local_marking_worklists()->IsEmpty() ||
heap()->concurrent_marking()->ephemeron_marked() || heap()->concurrent_marking()->ephemeron_marked() ||
...@@ -1656,10 +1657,12 @@ void MarkCompactCollector::ProcessEphemeronsUntilFixpoint() { ...@@ -1656,10 +1657,12 @@ void MarkCompactCollector::ProcessEphemeronsUntilFixpoint() {
!heap()->local_embedder_heap_tracer()->IsRemoteTracingDone(); !heap()->local_embedder_heap_tracer()->IsRemoteTracingDone();
++iterations; ++iterations;
} }
local_weak_objects()->next_ephemerons.Publish();
weak_objects()->next_ephemerons.Clear();
CHECK(local_marking_worklists()->IsEmpty()); CHECK(local_marking_worklists()->IsEmpty());
CHECK(weak_objects_.current_ephemerons.IsEmpty()); CHECK(local_weak_objects()->current_ephemerons.IsLocalAndGlobalEmpty());
CHECK(weak_objects_.discovered_ephemerons.IsEmpty()); CHECK(local_weak_objects()->discovered_ephemerons.IsLocalAndGlobalEmpty());
} }
bool MarkCompactCollector::ProcessEphemerons() { bool MarkCompactCollector::ProcessEphemerons() {
...@@ -1668,7 +1671,7 @@ bool MarkCompactCollector::ProcessEphemerons() { ...@@ -1668,7 +1671,7 @@ bool MarkCompactCollector::ProcessEphemerons() {
// Drain current_ephemerons and push ephemerons where key and value are still // Drain current_ephemerons and push ephemerons where key and value are still
// unreachable into next_ephemerons. // unreachable into next_ephemerons.
while (weak_objects_.current_ephemerons.Pop(kMainThreadTask, &ephemeron)) { while (local_weak_objects()->current_ephemerons.Pop(&ephemeron)) {
if (ProcessEphemeron(ephemeron.key, ephemeron.value)) { if (ProcessEphemeron(ephemeron.key, ephemeron.value)) {
ephemeron_marked = true; ephemeron_marked = true;
} }
...@@ -1681,15 +1684,15 @@ bool MarkCompactCollector::ProcessEphemerons() { ...@@ -1681,15 +1684,15 @@ bool MarkCompactCollector::ProcessEphemerons() {
// Drain discovered_ephemerons (filled in the drain MarkingWorklist-phase // Drain discovered_ephemerons (filled in the drain MarkingWorklist-phase
// before) and push ephemerons where key and value are still unreachable into // before) and push ephemerons where key and value are still unreachable into
// next_ephemerons. // next_ephemerons.
while (weak_objects_.discovered_ephemerons.Pop(kMainThreadTask, &ephemeron)) { while (local_weak_objects()->discovered_ephemerons.Pop(&ephemeron)) {
if (ProcessEphemeron(ephemeron.key, ephemeron.value)) { if (ProcessEphemeron(ephemeron.key, ephemeron.value)) {
ephemeron_marked = true; ephemeron_marked = true;
} }
} }
// Flush local ephemerons for main task to global pool. // Flush local ephemerons for main task to global pool.
weak_objects_.ephemeron_hash_tables.FlushToGlobal(kMainThreadTask); local_weak_objects()->ephemeron_hash_tables.Publish();
weak_objects_.next_ephemerons.FlushToGlobal(kMainThreadTask); local_weak_objects()->next_ephemerons.Publish();
return ephemeron_marked; return ephemeron_marked;
} }
...@@ -1701,10 +1704,11 @@ void MarkCompactCollector::ProcessEphemeronsLinear() { ...@@ -1701,10 +1704,11 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
std::unordered_multimap<HeapObject, HeapObject, Object::Hasher> key_to_values; std::unordered_multimap<HeapObject, HeapObject, Object::Hasher> key_to_values;
Ephemeron ephemeron; Ephemeron ephemeron;
DCHECK(weak_objects_.current_ephemerons.IsEmpty()); DCHECK(local_weak_objects()->current_ephemerons.IsLocalAndGlobalEmpty());
weak_objects_.current_ephemerons.Swap(weak_objects_.next_ephemerons); local_weak_objects()->current_ephemerons.Swap(
&local_weak_objects()->next_ephemerons);
while (weak_objects_.current_ephemerons.Pop(kMainThreadTask, &ephemeron)) { while (local_weak_objects()->current_ephemerons.Pop(&ephemeron)) {
ProcessEphemeron(ephemeron.key, ephemeron.value); ProcessEphemeron(ephemeron.key, ephemeron.value);
if (non_atomic_marking_state()->IsWhite(ephemeron.value)) { if (non_atomic_marking_state()->IsWhite(ephemeron.value)) {
...@@ -1731,8 +1735,7 @@ void MarkCompactCollector::ProcessEphemeronsLinear() { ...@@ -1731,8 +1735,7 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
kTrackNewlyDiscoveredObjects>(0); kTrackNewlyDiscoveredObjects>(0);
} }
while ( while (local_weak_objects()->discovered_ephemerons.Pop(&ephemeron)) {
weak_objects_.discovered_ephemerons.Pop(kMainThreadTask, &ephemeron)) {
ProcessEphemeron(ephemeron.key, ephemeron.value); ProcessEphemeron(ephemeron.key, ephemeron.value);
if (non_atomic_marking_state()->IsWhite(ephemeron.value)) { if (non_atomic_marking_state()->IsWhite(ephemeron.value)) {
...@@ -1743,7 +1746,8 @@ void MarkCompactCollector::ProcessEphemeronsLinear() { ...@@ -1743,7 +1746,8 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
if (ephemeron_marking_.newly_discovered_overflowed) { if (ephemeron_marking_.newly_discovered_overflowed) {
// If newly_discovered was overflowed just visit all ephemerons in // If newly_discovered was overflowed just visit all ephemerons in
// next_ephemerons. // next_ephemerons.
weak_objects_.next_ephemerons.Iterate([&](Ephemeron ephemeron) { local_weak_objects()->next_ephemerons.Publish();
weak_objects()->next_ephemerons.Iterate([&](Ephemeron ephemeron) {
if (non_atomic_marking_state()->IsBlackOrGrey(ephemeron.key) && if (non_atomic_marking_state()->IsBlackOrGrey(ephemeron.key) &&
non_atomic_marking_state()->WhiteToGrey(ephemeron.value)) { non_atomic_marking_state()->WhiteToGrey(ephemeron.value)) {
local_marking_worklists()->Push(ephemeron.value); local_marking_worklists()->Push(ephemeron.value);
...@@ -1770,7 +1774,7 @@ void MarkCompactCollector::ProcessEphemeronsLinear() { ...@@ -1770,7 +1774,7 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
work_to_do = !local_marking_worklists()->IsEmpty() || work_to_do = !local_marking_worklists()->IsEmpty() ||
!local_marking_worklists()->IsEmbedderEmpty() || !local_marking_worklists()->IsEmbedderEmpty() ||
!heap()->local_embedder_heap_tracer()->IsRemoteTracingDone(); !heap()->local_embedder_heap_tracer()->IsRemoteTracingDone();
CHECK(weak_objects_.discovered_ephemerons.IsEmpty()); CHECK(local_weak_objects()->discovered_ephemerons.IsLocalAndGlobalEmpty());
} }
ResetNewlyDiscovered(); ResetNewlyDiscovered();
...@@ -1863,7 +1867,7 @@ bool MarkCompactCollector::ProcessEphemeron(HeapObject key, HeapObject value) { ...@@ -1863,7 +1867,7 @@ bool MarkCompactCollector::ProcessEphemeron(HeapObject key, HeapObject value) {
} }
} else if (marking_state()->IsWhite(value)) { } else if (marking_state()->IsWhite(value)) {
weak_objects_.next_ephemerons.Push(kMainThreadTask, Ephemeron{key, value}); local_weak_objects()->next_ephemerons.Push(Ephemeron{key, value});
} }
return false; return false;
...@@ -1874,7 +1878,7 @@ void MarkCompactCollector::ProcessEphemeronMarking() { ...@@ -1874,7 +1878,7 @@ void MarkCompactCollector::ProcessEphemeronMarking() {
// Incremental marking might leave ephemerons in main task's local // Incremental marking might leave ephemerons in main task's local
// buffer, flush it into global pool. // buffer, flush it into global pool.
weak_objects_.next_ephemerons.FlushToGlobal(kMainThreadTask); local_weak_objects()->next_ephemerons.Publish();
ProcessEphemeronsUntilFixpoint(); ProcessEphemeronsUntilFixpoint();
...@@ -2099,19 +2103,12 @@ void MarkCompactCollector::ClearNonLiveReferences() { ...@@ -2099,19 +2103,12 @@ void MarkCompactCollector::ClearNonLiveReferences() {
MarkDependentCodeForDeoptimization(); MarkDependentCodeForDeoptimization();
DCHECK(weak_objects_.transition_arrays.IsEmpty()); DCHECK(local_weak_objects()->IsLocalAndGlobalEmpty());
DCHECK(weak_objects_.weak_references.IsEmpty());
DCHECK(weak_objects_.weak_objects_in_code.IsEmpty());
DCHECK(weak_objects_.js_weak_refs.IsEmpty());
DCHECK(weak_objects_.weak_cells.IsEmpty());
DCHECK(weak_objects_.bytecode_flushing_candidates.IsEmpty());
DCHECK(weak_objects_.flushed_js_functions.IsEmpty());
} }
void MarkCompactCollector::MarkDependentCodeForDeoptimization() { void MarkCompactCollector::MarkDependentCodeForDeoptimization() {
std::pair<HeapObject, Code> weak_object_in_code; std::pair<HeapObject, Code> weak_object_in_code;
while (weak_objects_.weak_objects_in_code.Pop(kMainThreadTask, while (local_weak_objects()->weak_objects_in_code.Pop(&weak_object_in_code)) {
&weak_object_in_code)) {
HeapObject object = weak_object_in_code.first; HeapObject object = weak_object_in_code.first;
Code code = weak_object_in_code.second; Code code = weak_object_in_code.second;
if (!non_atomic_marking_state()->IsBlackOrGrey(object) && if (!non_atomic_marking_state()->IsBlackOrGrey(object) &&
...@@ -2224,10 +2221,11 @@ void MarkCompactCollector::FlushBytecodeFromSFI( ...@@ -2224,10 +2221,11 @@ void MarkCompactCollector::FlushBytecodeFromSFI(
void MarkCompactCollector::ClearOldBytecodeCandidates() { void MarkCompactCollector::ClearOldBytecodeCandidates() {
DCHECK(FLAG_flush_bytecode || DCHECK(FLAG_flush_bytecode ||
weak_objects_.bytecode_flushing_candidates.IsEmpty()); local_weak_objects()
->bytecode_flushing_candidates.IsLocalAndGlobalEmpty());
SharedFunctionInfo flushing_candidate; SharedFunctionInfo flushing_candidate;
while (weak_objects_.bytecode_flushing_candidates.Pop(kMainThreadTask, while (local_weak_objects()->bytecode_flushing_candidates.Pop(
&flushing_candidate)) { &flushing_candidate)) {
// If the BytecodeArray is dead, flush it, which will replace the field with // If the BytecodeArray is dead, flush it, which will replace the field with
// an uncompiled data object. // an uncompiled data object.
if (!non_atomic_marking_state()->IsBlackOrGrey( if (!non_atomic_marking_state()->IsBlackOrGrey(
...@@ -2244,10 +2242,10 @@ void MarkCompactCollector::ClearOldBytecodeCandidates() { ...@@ -2244,10 +2242,10 @@ void MarkCompactCollector::ClearOldBytecodeCandidates() {
} }
void MarkCompactCollector::ClearFlushedJsFunctions() { void MarkCompactCollector::ClearFlushedJsFunctions() {
DCHECK(FLAG_flush_bytecode || weak_objects_.flushed_js_functions.IsEmpty()); DCHECK(FLAG_flush_bytecode ||
local_weak_objects()->flushed_js_functions.IsLocalAndGlobalEmpty());
JSFunction flushed_js_function; JSFunction flushed_js_function;
while (weak_objects_.flushed_js_functions.Pop(kMainThreadTask, while (local_weak_objects()->flushed_js_functions.Pop(&flushed_js_function)) {
&flushed_js_function)) {
auto gc_notify_updated_slot = [](HeapObject object, ObjectSlot slot, auto gc_notify_updated_slot = [](HeapObject object, ObjectSlot slot,
Object target) { Object target) {
RecordSlot(object, slot, HeapObject::cast(target)); RecordSlot(object, slot, HeapObject::cast(target));
...@@ -2258,7 +2256,7 @@ void MarkCompactCollector::ClearFlushedJsFunctions() { ...@@ -2258,7 +2256,7 @@ void MarkCompactCollector::ClearFlushedJsFunctions() {
void MarkCompactCollector::ClearFullMapTransitions() { void MarkCompactCollector::ClearFullMapTransitions() {
TransitionArray array; TransitionArray array;
while (weak_objects_.transition_arrays.Pop(kMainThreadTask, &array)) { while (local_weak_objects()->transition_arrays.Pop(&array)) {
int num_transitions = array.number_of_entries(); int num_transitions = array.number_of_entries();
if (num_transitions > 0) { if (num_transitions > 0) {
Map map; Map map;
...@@ -2409,7 +2407,7 @@ void MarkCompactCollector::ClearWeakCollections() { ...@@ -2409,7 +2407,7 @@ void MarkCompactCollector::ClearWeakCollections() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_COLLECTIONS); TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_COLLECTIONS);
EphemeronHashTable table; EphemeronHashTable table;
while (weak_objects_.ephemeron_hash_tables.Pop(kMainThreadTask, &table)) { while (local_weak_objects()->ephemeron_hash_tables.Pop(&table)) {
for (InternalIndex i : table.IterateEntries()) { for (InternalIndex i : table.IterateEntries()) {
HeapObject key = HeapObject::cast(table.KeyAt(i)); HeapObject key = HeapObject::cast(table.KeyAt(i));
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
...@@ -2442,7 +2440,7 @@ void MarkCompactCollector::ClearWeakReferences() { ...@@ -2442,7 +2440,7 @@ void MarkCompactCollector::ClearWeakReferences() {
std::pair<HeapObject, HeapObjectSlot> slot; std::pair<HeapObject, HeapObjectSlot> slot;
HeapObjectReference cleared_weak_ref = HeapObjectReference cleared_weak_ref =
HeapObjectReference::ClearedValue(isolate()); HeapObjectReference::ClearedValue(isolate());
while (weak_objects_.weak_references.Pop(kMainThreadTask, &slot)) { while (local_weak_objects()->weak_references.Pop(&slot)) {
HeapObject value; HeapObject value;
// The slot could have been overwritten, so we have to treat it // The slot could have been overwritten, so we have to treat it
// as MaybeObjectSlot. // as MaybeObjectSlot.
...@@ -2468,7 +2466,7 @@ void MarkCompactCollector::ClearJSWeakRefs() { ...@@ -2468,7 +2466,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
return; return;
} }
JSWeakRef weak_ref; JSWeakRef weak_ref;
while (weak_objects_.js_weak_refs.Pop(kMainThreadTask, &weak_ref)) { while (local_weak_objects()->js_weak_refs.Pop(&weak_ref)) {
HeapObject target = HeapObject::cast(weak_ref.target()); HeapObject target = HeapObject::cast(weak_ref.target());
if (!non_atomic_marking_state()->IsBlackOrGrey(target)) { if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
weak_ref.set_target(ReadOnlyRoots(isolate()).undefined_value()); weak_ref.set_target(ReadOnlyRoots(isolate()).undefined_value());
...@@ -2479,7 +2477,7 @@ void MarkCompactCollector::ClearJSWeakRefs() { ...@@ -2479,7 +2477,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
} }
} }
WeakCell weak_cell; WeakCell weak_cell;
while (weak_objects_.weak_cells.Pop(kMainThreadTask, &weak_cell)) { while (local_weak_objects()->weak_cells.Pop(&weak_cell)) {
auto gc_notify_updated_slot = [](HeapObject object, ObjectSlot slot, auto gc_notify_updated_slot = [](HeapObject object, ObjectSlot slot,
Object target) { Object target) {
if (target.IsHeapObject()) { if (target.IsHeapObject()) {
...@@ -2540,17 +2538,8 @@ void MarkCompactCollector::ClearJSWeakRefs() { ...@@ -2540,17 +2538,8 @@ void MarkCompactCollector::ClearJSWeakRefs() {
} }
void MarkCompactCollector::AbortWeakObjects() { void MarkCompactCollector::AbortWeakObjects() {
weak_objects_.transition_arrays.Clear(); local_weak_objects()->Publish();
weak_objects_.ephemeron_hash_tables.Clear(); weak_objects()->Clear();
weak_objects_.current_ephemerons.Clear();
weak_objects_.next_ephemerons.Clear();
weak_objects_.discovered_ephemerons.Clear();
weak_objects_.weak_references.Clear();
weak_objects_.weak_objects_in_code.Clear();
weak_objects_.js_weak_refs.Clear();
weak_objects_.weak_cells.Clear();
weak_objects_.bytecode_flushing_candidates.Clear();
weak_objects_.flushed_js_functions.Clear();
} }
bool MarkCompactCollector::IsOnEvacuationCandidate(MaybeObject obj) { bool MarkCompactCollector::IsOnEvacuationCandidate(MaybeObject obj) {
......
...@@ -378,12 +378,12 @@ class MainMarkingVisitor final ...@@ -378,12 +378,12 @@ class MainMarkingVisitor final
MainMarkingVisitor(MarkingState* marking_state, MainMarkingVisitor(MarkingState* marking_state,
MarkingWorklists::Local* local_marking_worklists, MarkingWorklists::Local* local_marking_worklists,
WeakObjects* weak_objects, Heap* heap, WeakObjects::Local* local_weak_objects, Heap* heap,
unsigned mark_compact_epoch, unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode, BytecodeFlushMode bytecode_flush_mode,
bool embedder_tracing_enabled, bool is_forced_gc) bool embedder_tracing_enabled, bool is_forced_gc)
: MarkingVisitorBase<MainMarkingVisitor<MarkingState>, MarkingState>( : MarkingVisitorBase<MainMarkingVisitor<MarkingState>, MarkingState>(
kMainThreadTask, local_marking_worklists, weak_objects, heap, local_marking_worklists, local_weak_objects, heap,
mark_compact_epoch, bytecode_flush_mode, embedder_tracing_enabled, mark_compact_epoch, bytecode_flush_mode, embedder_tracing_enabled,
is_forced_gc), is_forced_gc),
marking_state_(marking_state), marking_state_(marking_state),
...@@ -533,6 +533,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -533,6 +533,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
} }
WeakObjects* weak_objects() { return &weak_objects_; } WeakObjects* weak_objects() { return &weak_objects_; }
WeakObjects::Local* local_weak_objects() { return local_weak_objects_.get(); }
inline void AddTransitionArray(TransitionArray array); inline void AddTransitionArray(TransitionArray array);
...@@ -758,6 +759,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -758,6 +759,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
MarkingWorklists marking_worklists_; MarkingWorklists marking_worklists_;
WeakObjects weak_objects_; WeakObjects weak_objects_;
std::unique_ptr<WeakObjects::Local> local_weak_objects_;
EphemeronMarking ephemeron_marking_; EphemeronMarking ephemeron_marking_;
std::unique_ptr<MarkingVisitor> marking_visitor_; std::unique_ptr<MarkingVisitor> marking_visitor_;
......
...@@ -58,7 +58,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::ProcessWeakHeapObject( ...@@ -58,7 +58,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::ProcessWeakHeapObject(
// If we do not know about liveness of the value, we have to process // If we do not know about liveness of the value, we have to process
// the reference when we know the liveness of the whole transitive // the reference when we know the liveness of the whole transitive
// closure. // closure.
weak_objects_->weak_references.Push(task_id_, std::make_pair(host, slot)); local_weak_objects_->weak_references.Push(std::make_pair(host, slot));
} }
} }
...@@ -91,8 +91,8 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEmbeddedPointer( ...@@ -91,8 +91,8 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEmbeddedPointer(
HeapObject object = rinfo->target_object(); HeapObject object = rinfo->target_object();
if (!concrete_visitor()->marking_state()->IsBlackOrGrey(object)) { if (!concrete_visitor()->marking_state()->IsBlackOrGrey(object)) {
if (host.IsWeakObject(object)) { if (host.IsWeakObject(object)) {
weak_objects_->weak_objects_in_code.Push(task_id_, local_weak_objects_->weak_objects_in_code.Push(
std::make_pair(object, host)); std::make_pair(object, host));
} else { } else {
MarkObject(host, object); MarkObject(host, object);
} }
...@@ -133,7 +133,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSFunction( ...@@ -133,7 +133,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSFunction(
// Check if the JSFunction needs reset due to bytecode being flushed. // Check if the JSFunction needs reset due to bytecode being flushed.
if (bytecode_flush_mode_ != BytecodeFlushMode::kDoNotFlushBytecode && if (bytecode_flush_mode_ != BytecodeFlushMode::kDoNotFlushBytecode &&
object.NeedsResetDueToFlushedBytecode()) { object.NeedsResetDueToFlushedBytecode()) {
weak_objects_->flushed_js_functions.Push(task_id_, object); local_weak_objects_->flushed_js_functions.Push(object);
} }
return size; return size;
} }
...@@ -150,7 +150,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitSharedFunctionInfo( ...@@ -150,7 +150,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitSharedFunctionInfo(
// If the SharedFunctionInfo has old bytecode, mark it as flushable, // If the SharedFunctionInfo has old bytecode, mark it as flushable,
// otherwise visit the function data field strongly. // otherwise visit the function data field strongly.
if (shared_info.ShouldFlushBytecode(bytecode_flush_mode_)) { if (shared_info.ShouldFlushBytecode(bytecode_flush_mode_)) {
weak_objects_->bytecode_flushing_candidates.Push(task_id_, shared_info); local_weak_objects_->bytecode_flushing_candidates.Push(shared_info);
} else { } else {
VisitPointer(shared_info, VisitPointer(shared_info,
shared_info.RawField(SharedFunctionInfo::kFunctionDataOffset)); shared_info.RawField(SharedFunctionInfo::kFunctionDataOffset));
...@@ -260,7 +260,7 @@ template <typename ConcreteVisitor, typename MarkingState> ...@@ -260,7 +260,7 @@ template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable( int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable(
Map map, EphemeronHashTable table) { Map map, EphemeronHashTable table) {
if (!concrete_visitor()->ShouldVisit(table)) return 0; if (!concrete_visitor()->ShouldVisit(table)) return 0;
weak_objects_->ephemeron_hash_tables.Push(task_id_, table); local_weak_objects_->ephemeron_hash_tables.Push(table);
for (InternalIndex i : table.IterateEntries()) { for (InternalIndex i : table.IterateEntries()) {
ObjectSlot key_slot = ObjectSlot key_slot =
...@@ -286,8 +286,8 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable( ...@@ -286,8 +286,8 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable(
// Revisit ephemerons with both key and value unreachable at end // Revisit ephemerons with both key and value unreachable at end
// of concurrent marking cycle. // of concurrent marking cycle.
if (concrete_visitor()->marking_state()->IsWhite(value)) { if (concrete_visitor()->marking_state()->IsWhite(value)) {
weak_objects_->discovered_ephemerons.Push(task_id_, local_weak_objects_->discovered_ephemerons.Push(
Ephemeron{key, value}); Ephemeron{key, value});
} }
} }
} }
...@@ -311,7 +311,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSWeakRef( ...@@ -311,7 +311,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSWeakRef(
} else { } else {
// JSWeakRef points to a potentially dead object. We have to process // JSWeakRef points to a potentially dead object. We have to process
// them when we know the liveness of the whole transitive closure. // them when we know the liveness of the whole transitive closure.
weak_objects_->js_weak_refs.Push(task_id_, weak_ref); local_weak_objects_->js_weak_refs.Push(weak_ref);
} }
} }
return size; return size;
...@@ -341,7 +341,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitWeakCell( ...@@ -341,7 +341,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitWeakCell(
// WeakCell points to a potentially dead object or a dead unregister // WeakCell points to a potentially dead object or a dead unregister
// token. We have to process them when we know the liveness of the whole // token. We have to process them when we know the liveness of the whole
// transitive closure. // transitive closure.
weak_objects_->weak_cells.Push(task_id_, weak_cell); local_weak_objects_->weak_cells.Push(weak_cell);
} }
return size; return size;
} }
...@@ -459,7 +459,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitTransitionArray( ...@@ -459,7 +459,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitTransitionArray(
this->VisitMapPointer(array); this->VisitMapPointer(array);
int size = TransitionArray::BodyDescriptor::SizeOf(map, array); int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
TransitionArray::BodyDescriptor::IterateBody(map, array, size, this); TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
weak_objects_->transition_arrays.Push(task_id_, array); local_weak_objects_->transition_arrays.Push(array);
return size; return size;
} }
......
...@@ -101,16 +101,14 @@ class MarkingStateBase { ...@@ -101,16 +101,14 @@ class MarkingStateBase {
template <typename ConcreteVisitor, typename MarkingState> template <typename ConcreteVisitor, typename MarkingState>
class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> { class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
public: public:
MarkingVisitorBase(int task_id, MarkingVisitorBase(MarkingWorklists::Local* local_marking_worklists,
MarkingWorklists::Local* local_marking_worklists, WeakObjects::Local* local_weak_objects, Heap* heap,
WeakObjects* weak_objects, Heap* heap,
unsigned mark_compact_epoch, unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode, BytecodeFlushMode bytecode_flush_mode,
bool is_embedder_tracing_enabled, bool is_forced_gc) bool is_embedder_tracing_enabled, bool is_forced_gc)
: local_marking_worklists_(local_marking_worklists), : local_marking_worklists_(local_marking_worklists),
weak_objects_(weak_objects), local_weak_objects_(local_weak_objects),
heap_(heap), heap_(heap),
task_id_(task_id),
mark_compact_epoch_(mark_compact_epoch), mark_compact_epoch_(mark_compact_epoch),
bytecode_flush_mode_(bytecode_flush_mode), bytecode_flush_mode_(bytecode_flush_mode),
is_embedder_tracing_enabled_(is_embedder_tracing_enabled), is_embedder_tracing_enabled_(is_embedder_tracing_enabled),
...@@ -189,9 +187,8 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> { ...@@ -189,9 +187,8 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
V8_INLINE void MarkObject(HeapObject host, HeapObject obj); V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
MarkingWorklists::Local* const local_marking_worklists_; MarkingWorklists::Local* const local_marking_worklists_;
WeakObjects* const weak_objects_; WeakObjects::Local* const local_weak_objects_;
Heap* const heap_; Heap* const heap_;
const int task_id_;
const unsigned mark_compact_epoch_; const unsigned mark_compact_epoch_;
const BytecodeFlushMode bytecode_flush_mode_; const BytecodeFlushMode bytecode_flush_mode_;
const bool is_embedder_tracing_enabled_; const bool is_embedder_tracing_enabled_;
......
...@@ -19,6 +19,36 @@ namespace v8 { ...@@ -19,6 +19,36 @@ namespace v8 {
namespace internal { namespace internal {
WeakObjects::Local::Local(WeakObjects* weak_objects)
:
#define CONSTRUCT_FIELD(Type, name, _) name(&weak_objects->name),
WEAK_OBJECT_WORKLISTS(CONSTRUCT_FIELD)
#undef CONSTRUCT_FIELD
end_of_initializer_list_(false) {
USE(end_of_initializer_list_);
}
bool WeakObjects::Local::IsLocalAndGlobalEmpty() {
bool result = true;
#define INVOKE_PREDICATE(Type, name, _) \
result = result && name.IsLocalAndGlobalEmpty();
WEAK_OBJECT_WORKLISTS(INVOKE_PREDICATE)
#undef INVOKE_PREDICATE
return result;
}
void WeakObjects::Local::Publish() {
#define INVOKE_PUBLISH(Type, name, _) name.Publish();
WEAK_OBJECT_WORKLISTS(INVOKE_PUBLISH)
#undef INVOKE_PUBLISH
}
void WeakObjects::Clear() {
#define INVOKE_CLEAR(Type, name, _) name.Clear();
WEAK_OBJECT_WORKLISTS(INVOKE_CLEAR)
#undef INVOKE_CLEAR
}
void WeakObjects::UpdateAfterScavenge() { void WeakObjects::UpdateAfterScavenge() {
#define INVOKE_UPDATE(_, name, Name) Update##Name(name); #define INVOKE_UPDATE(_, name, Name) Update##Name(name);
WEAK_OBJECT_WORKLISTS(INVOKE_UPDATE) WEAK_OBJECT_WORKLISTS(INVOKE_UPDATE)
......
...@@ -6,7 +6,7 @@ ...@@ -6,7 +6,7 @@
#define V8_HEAP_WEAK_OBJECT_WORKLISTS_H_ #define V8_HEAP_WEAK_OBJECT_WORKLISTS_H_
#include "src/common/globals.h" #include "src/common/globals.h"
#include "src/heap/worklist.h" #include "src/heap/base/worklist.h"
#include "src/objects/heap-object.h" #include "src/objects/heap-object.h"
#include "src/objects/js-weak-refs.h" #include "src/objects/js-weak-refs.h"
...@@ -64,14 +64,31 @@ class TransitionArray; ...@@ -64,14 +64,31 @@ class TransitionArray;
class WeakObjects { class WeakObjects {
public: public:
template <typename Type> template <typename Type>
using WeakObjectWorklist = Worklist<Type, 64>; using WeakObjectWorklist = ::heap::base::Worklist<Type, 64>;
#define DECLARE_WORKLIST(Type, name, _) WeakObjectWorklist<Type> name; class Local {
WEAK_OBJECT_WORKLISTS(DECLARE_WORKLIST) public:
explicit Local(WeakObjects* weak_objects);
bool IsLocalAndGlobalEmpty();
void Publish();
#define DECLARE_WORKLIST(Type, name, _) WeakObjectWorklist<Type>::Local name;
WEAK_OBJECT_WORKLISTS(DECLARE_WORKLIST)
#undef DECLARE_WORKLIST #undef DECLARE_WORKLIST
private:
// Dummy field used for terminating the initializer list
// in the constructor.
bool end_of_initializer_list_;
};
void Clear();
void UpdateAfterScavenge(); void UpdateAfterScavenge();
#define DECLARE_WORKLIST(Type, name, _) WeakObjectWorklist<Type> name;
WEAK_OBJECT_WORKLISTS(DECLARE_WORKLIST)
#undef DECLARE_WORKLIST
private: private:
#define DECLARE_UPDATE_METHODS(Type, _, Name) \ #define DECLARE_UPDATE_METHODS(Type, _, Name) \
void Update##Name(WeakObjectWorklist<Type>&); void Update##Name(WeakObjectWorklist<Type>&);
......
...@@ -911,17 +911,11 @@ TEST(JSWeakRefScavengedInWorklist) { ...@@ -911,17 +911,11 @@ TEST(JSWeakRefScavengedInWorklist) {
// Do marking. This puts the WeakRef above into the js_weak_refs worklist // Do marking. This puts the WeakRef above into the js_weak_refs worklist
// since its target isn't marked. // since its target isn't marked.
CHECK(
heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
heap::SimulateIncrementalMarking(heap, true); heap::SimulateIncrementalMarking(heap, true);
CHECK(!heap->mark_compact_collector()
->weak_objects()
->js_weak_refs.IsEmpty());
} }
// Now collect both weak_ref and its target. The worklist should be empty. // Now collect both weak_ref and its target. The worklist should be empty.
CcTest::CollectGarbage(NEW_SPACE); CcTest::CollectGarbage(NEW_SPACE);
CHECK(heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
// The mark-compactor shouldn't see zapped WeakRefs in the worklist. // The mark-compactor shouldn't see zapped WeakRefs in the worklist.
CcTest::CollectAllGarbage(); CcTest::CollectAllGarbage();
...@@ -956,22 +950,16 @@ TEST(JSWeakRefTenuredInWorklist) { ...@@ -956,22 +950,16 @@ TEST(JSWeakRefTenuredInWorklist) {
// Do marking. This puts the WeakRef above into the js_weak_refs worklist // Do marking. This puts the WeakRef above into the js_weak_refs worklist
// since its target isn't marked. // since its target isn't marked.
CHECK(heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
heap::SimulateIncrementalMarking(heap, true); heap::SimulateIncrementalMarking(heap, true);
CHECK(
!heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
// Now collect weak_ref's target. We still have a Handle to weak_ref, so it is // Now collect weak_ref's target. We still have a Handle to weak_ref, so it is
// moved and remains on the worklist. // moved and remains on the worklist.
CcTest::CollectGarbage(NEW_SPACE); CcTest::CollectGarbage(NEW_SPACE);
JSWeakRef new_weak_ref_location = *weak_ref; JSWeakRef new_weak_ref_location = *weak_ref;
CHECK_NE(old_weak_ref_location, new_weak_ref_location); CHECK_NE(old_weak_ref_location, new_weak_ref_location);
CHECK(
!heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
// The mark-compactor should see the moved WeakRef in the worklist. // The mark-compactor should see the moved WeakRef in the worklist.
CcTest::CollectAllGarbage(); CcTest::CollectAllGarbage();
CHECK(heap->mark_compact_collector()->weak_objects()->js_weak_refs.IsEmpty());
CHECK(weak_ref->target().IsUndefined(isolate)); CHECK(weak_ref->target().IsUndefined(isolate));
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment