Commit 28133adc authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Split marking worklist into global worklist and local worklists

This is the first step in refactoring Worklist to allow arbitrary
number of local worklists with private segments:
- Introduce MarkingWorklistImpl<> which will eventually replace
  (and will be renamed to) Worklist.
- MarkingWorklistImpl<> owns the global pool of segments but does not
  keep track of private segments.
- MarkingWorklistImpl<>::Local owns private segments and can be
  constructed dynamically on background threads.
- Rename the existing MarkingWorklistsHolder to MarkingWorklists.
- Rename the existing MarkingWorklists to MarkingWorklists::Local.
- Rename the existing marking_workists_holder to marking_worklists.
- Rename the existing marking_worklists to local_marking_worklists.

Design doc: https://bit.ly/2XMtjLi
Bug: v8:10315

Change-Id: I9da34883ad34f4572fccd40c51e51eaf50c617bc
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2343330Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69330}
parent 45928320
...@@ -2587,6 +2587,7 @@ v8_source_set("v8_base_without_compiler") { ...@@ -2587,6 +2587,7 @@ v8_source_set("v8_base_without_compiler") {
"src/heap/marking-barrier.h", "src/heap/marking-barrier.h",
"src/heap/marking-visitor-inl.h", "src/heap/marking-visitor-inl.h",
"src/heap/marking-visitor.h", "src/heap/marking-visitor.h",
"src/heap/marking-worklist-inl.h",
"src/heap/marking-worklist.cc", "src/heap/marking-worklist.cc",
"src/heap/marking-worklist.h", "src/heap/marking-worklist.h",
"src/heap/marking.cc", "src/heap/marking.cc",
......
...@@ -79,13 +79,14 @@ class ConcurrentMarkingVisitor final ...@@ -79,13 +79,14 @@ class ConcurrentMarkingVisitor final
: public MarkingVisitorBase<ConcurrentMarkingVisitor, : public MarkingVisitorBase<ConcurrentMarkingVisitor,
ConcurrentMarkingState> { ConcurrentMarkingState> {
public: public:
ConcurrentMarkingVisitor(int task_id, MarkingWorklists* marking_worklists, ConcurrentMarkingVisitor(int task_id,
MarkingWorklists::Local* local_marking_worklists,
WeakObjects* weak_objects, Heap* heap, WeakObjects* weak_objects, Heap* heap,
unsigned mark_compact_epoch, unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode, BytecodeFlushMode bytecode_flush_mode,
bool embedder_tracing_enabled, bool is_forced_gc, bool embedder_tracing_enabled, bool is_forced_gc,
MemoryChunkDataMap* memory_chunk_data) MemoryChunkDataMap* memory_chunk_data)
: MarkingVisitorBase(task_id, marking_worklists, weak_objects, heap, : MarkingVisitorBase(task_id, local_marking_worklists, weak_objects, heap,
mark_compact_epoch, bytecode_flush_mode, mark_compact_epoch, bytecode_flush_mode,
embedder_tracing_enabled, is_forced_gc), embedder_tracing_enabled, is_forced_gc),
marking_state_(memory_chunk_data), marking_state_(memory_chunk_data),
...@@ -145,7 +146,7 @@ class ConcurrentMarkingVisitor final ...@@ -145,7 +146,7 @@ class ConcurrentMarkingVisitor final
bool ProcessEphemeron(HeapObject key, HeapObject value) { bool ProcessEphemeron(HeapObject key, HeapObject value) {
if (marking_state_.IsBlackOrGrey(key)) { if (marking_state_.IsBlackOrGrey(key)) {
if (marking_state_.WhiteToGrey(value)) { if (marking_state_.WhiteToGrey(value)) {
marking_worklists_->Push(value); local_marking_worklists_->Push(value);
return true; return true;
} }
...@@ -369,11 +370,11 @@ class ConcurrentMarking::Task : public CancelableTask { ...@@ -369,11 +370,11 @@ class ConcurrentMarking::Task : public CancelableTask {
DISALLOW_COPY_AND_ASSIGN(Task); DISALLOW_COPY_AND_ASSIGN(Task);
}; };
ConcurrentMarking::ConcurrentMarking( ConcurrentMarking::ConcurrentMarking(Heap* heap,
Heap* heap, MarkingWorklistsHolder* marking_worklists_holder, MarkingWorklists* marking_worklists,
WeakObjects* weak_objects) WeakObjects* weak_objects)
: heap_(heap), : heap_(heap),
marking_worklists_holder_(marking_worklists_holder), marking_worklists_(marking_worklists),
weak_objects_(weak_objects) { weak_objects_(weak_objects) {
// The runtime flag should be set only if the compile time flag was set. // The runtime flag should be set only if the compile time flag was set.
#ifndef V8_CONCURRENT_MARKING #ifndef V8_CONCURRENT_MARKING
...@@ -386,9 +387,9 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) { ...@@ -386,9 +387,9 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
GCTracer::BackgroundScope::MC_BACKGROUND_MARKING); GCTracer::BackgroundScope::MC_BACKGROUND_MARKING);
size_t kBytesUntilInterruptCheck = 64 * KB; size_t kBytesUntilInterruptCheck = 64 * KB;
int kObjectsUntilInterrupCheck = 1000; int kObjectsUntilInterrupCheck = 1000;
MarkingWorklists marking_worklists(task_id, marking_worklists_holder_); MarkingWorklists::Local local_marking_worklists(marking_worklists_);
ConcurrentMarkingVisitor visitor( ConcurrentMarkingVisitor visitor(
task_id, &marking_worklists, weak_objects_, heap_, task_id, &local_marking_worklists, weak_objects_, heap_,
task_state->mark_compact_epoch, Heap::GetBytecodeFlushMode(), task_state->mark_compact_epoch, Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(), task_state->is_forced_gc, heap_->local_embedder_heap_tracer()->InUse(), task_state->is_forced_gc,
&task_state->memory_chunk_data); &task_state->memory_chunk_data);
...@@ -416,7 +417,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) { ...@@ -416,7 +417,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
} }
} }
} }
bool is_per_context_mode = marking_worklists.IsPerContextMode(); bool is_per_context_mode = local_marking_worklists.IsPerContextMode();
bool done = false; bool done = false;
while (!done) { while (!done) {
size_t current_marked_bytes = 0; size_t current_marked_bytes = 0;
...@@ -424,7 +425,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) { ...@@ -424,7 +425,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
while (current_marked_bytes < kBytesUntilInterruptCheck && while (current_marked_bytes < kBytesUntilInterruptCheck &&
objects_processed < kObjectsUntilInterrupCheck) { objects_processed < kObjectsUntilInterrupCheck) {
HeapObject object; HeapObject object;
if (!marking_worklists.Pop(&object)) { if (!local_marking_worklists.Pop(&object)) {
done = true; done = true;
break; break;
} }
...@@ -436,19 +437,19 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) { ...@@ -436,19 +437,19 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
Address addr = object.address(); Address addr = object.address();
if ((new_space_top <= addr && addr < new_space_limit) || if ((new_space_top <= addr && addr < new_space_limit) ||
addr == new_large_object) { addr == new_large_object) {
marking_worklists.PushOnHold(object); local_marking_worklists.PushOnHold(object);
} else { } else {
Map map = object.synchronized_map(isolate); Map map = object.synchronized_map(isolate);
if (is_per_context_mode) { if (is_per_context_mode) {
Address context; Address context;
if (native_context_inferrer.Infer(isolate, map, object, &context)) { if (native_context_inferrer.Infer(isolate, map, object, &context)) {
marking_worklists.SwitchToContext(context); local_marking_worklists.SwitchToContext(context);
} }
} }
size_t visited_size = visitor.Visit(map, object); size_t visited_size = visitor.Visit(map, object);
if (is_per_context_mode) { if (is_per_context_mode) {
native_context_stats.IncrementSize(marking_worklists.Context(), map, native_context_stats.IncrementSize(
object, visited_size); local_marking_worklists.Context(), map, object, visited_size);
} }
current_marked_bytes += visited_size; current_marked_bytes += visited_size;
} }
...@@ -473,7 +474,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) { ...@@ -473,7 +474,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
} }
} }
marking_worklists.FlushToGlobal(); local_marking_worklists.Publish();
weak_objects_->transition_arrays.FlushToGlobal(task_id); weak_objects_->transition_arrays.FlushToGlobal(task_id);
weak_objects_->ephemeron_hash_tables.FlushToGlobal(task_id); weak_objects_->ephemeron_hash_tables.FlushToGlobal(task_id);
weak_objects_->current_ephemerons.FlushToGlobal(task_id); weak_objects_->current_ephemerons.FlushToGlobal(task_id);
...@@ -529,8 +530,6 @@ void ConcurrentMarking::ScheduleTasks() { ...@@ -529,8 +530,6 @@ void ConcurrentMarking::ScheduleTasks() {
total_task_count_ = Max(1, Min(kMaxTasks, (num_cores / 2) - 1)); total_task_count_ = Max(1, Min(kMaxTasks, (num_cores / 2) - 1));
} }
DCHECK_LE(total_task_count_, kMaxTasks); DCHECK_LE(total_task_count_, kMaxTasks);
// One task is for the main thread.
STATIC_ASSERT(kMaxTasks + 1 <= MarkingWorklist::kMaxNumTasks);
} }
// Task id 0 is for the main thread. // Task id 0 is for the main thread.
for (int i = 1; i <= total_task_count_; i++) { for (int i = 1; i <= total_task_count_; i++) {
...@@ -565,7 +564,7 @@ void ConcurrentMarking::RescheduleTasksIfNeeded() { ...@@ -565,7 +564,7 @@ void ConcurrentMarking::RescheduleTasksIfNeeded() {
return; return;
} }
} }
if (!marking_worklists_holder_->shared()->IsGlobalPoolEmpty() || if (!marking_worklists_->shared()->IsEmpty() ||
!weak_objects_->current_ephemerons.IsGlobalPoolEmpty() || !weak_objects_->current_ephemerons.IsGlobalPoolEmpty() ||
!weak_objects_->discovered_ephemerons.IsGlobalPoolEmpty()) { !weak_objects_->discovered_ephemerons.IsGlobalPoolEmpty()) {
ScheduleTasks(); ScheduleTasks();
......
...@@ -70,8 +70,7 @@ class V8_EXPORT_PRIVATE ConcurrentMarking { ...@@ -70,8 +70,7 @@ class V8_EXPORT_PRIVATE ConcurrentMarking {
// task 0, reserved for the main thread). // task 0, reserved for the main thread).
static constexpr int kMaxTasks = 7; static constexpr int kMaxTasks = 7;
ConcurrentMarking(Heap* heap, ConcurrentMarking(Heap* heap, MarkingWorklists* marking_worklists,
MarkingWorklistsHolder* marking_worklists_holder,
WeakObjects* weak_objects); WeakObjects* weak_objects);
// Schedules asynchronous tasks to perform concurrent marking. Objects in the // Schedules asynchronous tasks to perform concurrent marking. Objects in the
...@@ -118,7 +117,7 @@ class V8_EXPORT_PRIVATE ConcurrentMarking { ...@@ -118,7 +117,7 @@ class V8_EXPORT_PRIVATE ConcurrentMarking {
class Task; class Task;
void Run(int task_id, TaskState* task_state); void Run(int task_id, TaskState* task_state);
Heap* const heap_; Heap* const heap_;
MarkingWorklistsHolder* const marking_worklists_holder_; MarkingWorklists* const marking_worklists_;
WeakObjects* const weak_objects_; WeakObjects* const weak_objects_;
TaskState task_state_[kMaxTasks + 1]; TaskState task_state_[kMaxTasks + 1];
std::atomic<size_t> total_marked_bytes_{0}; std::atomic<size_t> total_marked_bytes_{0};
......
...@@ -1259,7 +1259,7 @@ TimedHistogram* Heap::GCTypeTimer(GarbageCollector collector) { ...@@ -1259,7 +1259,7 @@ TimedHistogram* Heap::GCTypeTimer(GarbageCollector collector) {
return isolate_->counters()->gc_finalize_reduce_memory(); return isolate_->counters()->gc_finalize_reduce_memory();
} }
if (incremental_marking()->IsMarking() && if (incremental_marking()->IsMarking() &&
incremental_marking()->marking_worklists()->IsPerContextMode()) { incremental_marking()->local_marking_worklists()->IsPerContextMode()) {
return isolate_->counters()->gc_finalize_measure_memory(); return isolate_->counters()->gc_finalize_measure_memory();
} }
return isolate_->counters()->gc_finalize(); return isolate_->counters()->gc_finalize();
...@@ -3489,12 +3489,12 @@ void Heap::FinalizeIncrementalMarkingIfComplete( ...@@ -3489,12 +3489,12 @@ void Heap::FinalizeIncrementalMarkingIfComplete(
if (incremental_marking()->IsMarking() && if (incremental_marking()->IsMarking() &&
(incremental_marking()->IsReadyToOverApproximateWeakClosure() || (incremental_marking()->IsReadyToOverApproximateWeakClosure() ||
(!incremental_marking()->finalize_marking_completed() && (!incremental_marking()->finalize_marking_completed() &&
mark_compact_collector()->marking_worklists()->IsEmpty() && mark_compact_collector()->local_marking_worklists()->IsEmpty() &&
local_embedder_heap_tracer()->ShouldFinalizeIncrementalMarking()))) { local_embedder_heap_tracer()->ShouldFinalizeIncrementalMarking()))) {
FinalizeIncrementalMarkingIncrementally(gc_reason); FinalizeIncrementalMarkingIncrementally(gc_reason);
} else if (incremental_marking()->IsComplete() || } else if (incremental_marking()->IsComplete() ||
(incremental_marking()->IsMarking() && (incremental_marking()->IsMarking() &&
mark_compact_collector()->marking_worklists()->IsEmpty() && mark_compact_collector()->local_marking_worklists()->IsEmpty() &&
local_embedder_heap_tracer() local_embedder_heap_tracer()
->ShouldFinalizeIncrementalMarking())) { ->ShouldFinalizeIncrementalMarking())) {
CollectAllGarbage(current_gc_flags_, gc_reason, current_gc_callback_flags_); CollectAllGarbage(current_gc_flags_, gc_reason, current_gc_callback_flags_);
...@@ -5277,7 +5277,7 @@ void Heap::SetUp() { ...@@ -5277,7 +5277,7 @@ void Heap::SetUp() {
if (FLAG_concurrent_marking || FLAG_parallel_marking) { if (FLAG_concurrent_marking || FLAG_parallel_marking) {
concurrent_marking_.reset(new ConcurrentMarking( concurrent_marking_.reset(new ConcurrentMarking(
this, mark_compact_collector_->marking_worklists_holder(), this, mark_compact_collector_->marking_worklists(),
mark_compact_collector_->weak_objects())); mark_compact_collector_->weak_objects()));
} else { } else {
concurrent_marking_.reset(new ConcurrentMarking(this, nullptr, nullptr)); concurrent_marking_.reset(new ConcurrentMarking(this, nullptr, nullptr));
......
...@@ -35,7 +35,7 @@ void IncrementalMarking::TransferColor(HeapObject from, HeapObject to) { ...@@ -35,7 +35,7 @@ void IncrementalMarking::TransferColor(HeapObject from, HeapObject to) {
bool IncrementalMarking::WhiteToGreyAndPush(HeapObject obj) { bool IncrementalMarking::WhiteToGreyAndPush(HeapObject obj) {
if (marking_state()->WhiteToGrey(obj)) { if (marking_state()->WhiteToGrey(obj)) {
marking_worklists()->Push(obj); local_marking_worklists()->Push(obj);
return true; return true;
} }
return false; return false;
......
...@@ -432,7 +432,8 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() { ...@@ -432,7 +432,8 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
heap()->minor_mark_compact_collector()->marking_state(); heap()->minor_mark_compact_collector()->marking_state();
#endif // ENABLE_MINOR_MC #endif // ENABLE_MINOR_MC
collector_->marking_worklists_holder()->Update( collector_->local_marking_worklists()->Publish();
collector_->marking_worklists()->Update(
[ [
#ifdef DEBUG #ifdef DEBUG
// this is referred inside DCHECK. // this is referred inside DCHECK.
...@@ -633,7 +634,7 @@ StepResult IncrementalMarking::EmbedderStep(double expected_duration_ms, ...@@ -633,7 +634,7 @@ StepResult IncrementalMarking::EmbedderStep(double expected_duration_ms,
HeapObject object; HeapObject object;
size_t cnt = 0; size_t cnt = 0;
empty_worklist = true; empty_worklist = true;
while (marking_worklists()->PopEmbedder(&object)) { while (local_marking_worklists()->PopEmbedder(&object)) {
scope.TracePossibleWrapper(JSObject::cast(object)); scope.TracePossibleWrapper(JSObject::cast(object));
if (++cnt == kObjectsToProcessBeforeDeadlineCheck) { if (++cnt == kObjectsToProcessBeforeDeadlineCheck) {
if (deadline <= heap_->MonotonicallyIncreasingTimeInMs()) { if (deadline <= heap_->MonotonicallyIncreasingTimeInMs()) {
...@@ -658,7 +659,7 @@ StepResult IncrementalMarking::EmbedderStep(double expected_duration_ms, ...@@ -658,7 +659,7 @@ StepResult IncrementalMarking::EmbedderStep(double expected_duration_ms,
} }
void IncrementalMarking::Hurry() { void IncrementalMarking::Hurry() {
if (!marking_worklists()->IsEmpty()) { if (!local_marking_worklists()->IsEmpty()) {
double start = 0.0; double start = 0.0;
if (FLAG_trace_incremental_marking) { if (FLAG_trace_incremental_marking) {
start = heap_->MonotonicallyIncreasingTimeInMs(); start = heap_->MonotonicallyIncreasingTimeInMs();
...@@ -1042,14 +1043,14 @@ StepResult IncrementalMarking::Step(double max_step_size_in_ms, ...@@ -1042,14 +1043,14 @@ StepResult IncrementalMarking::Step(double max_step_size_in_ms,
// It is safe to merge back all objects that were on hold to the shared // It is safe to merge back all objects that were on hold to the shared
// work list at Step because we are at a safepoint where all objects // work list at Step because we are at a safepoint where all objects
// are properly initialized. // are properly initialized.
marking_worklists()->MergeOnHold(); local_marking_worklists()->MergeOnHold();
} }
// Only print marking worklist in debug mode to save ~40KB of code size. // Only print marking worklist in debug mode to save ~40KB of code size.
#ifdef DEBUG #ifdef DEBUG
if (FLAG_trace_incremental_marking && FLAG_trace_concurrent_marking && if (FLAG_trace_incremental_marking && FLAG_trace_concurrent_marking &&
FLAG_trace_gc_verbose) { FLAG_trace_gc_verbose) {
collector_->marking_worklists_holder()->Print(); collector_->marking_worklists()->Print();
} }
#endif #endif
if (FLAG_trace_incremental_marking) { if (FLAG_trace_incremental_marking) {
...@@ -1073,7 +1074,7 @@ StepResult IncrementalMarking::Step(double max_step_size_in_ms, ...@@ -1073,7 +1074,7 @@ StepResult IncrementalMarking::Step(double max_step_size_in_ms,
// assumption is that large graphs are well connected and can mostly be // assumption is that large graphs are well connected and can mostly be
// processed on their own. For small graphs, helping is not necessary. // processed on their own. For small graphs, helping is not necessary.
v8_bytes_processed = collector_->ProcessMarkingWorklist(bytes_to_process); v8_bytes_processed = collector_->ProcessMarkingWorklist(bytes_to_process);
StepResult v8_result = marking_worklists()->IsEmpty() StepResult v8_result = local_marking_worklists()->IsEmpty()
? StepResult::kNoImmediateWork ? StepResult::kNoImmediateWork
: StepResult::kMoreWorkRemaining; : StepResult::kMoreWorkRemaining;
StepResult embedder_result = StepResult::kNoImmediateWork; StepResult embedder_result = StepResult::kNoImmediateWork;
...@@ -1098,7 +1099,7 @@ StepResult IncrementalMarking::Step(double max_step_size_in_ms, ...@@ -1098,7 +1099,7 @@ StepResult IncrementalMarking::Step(double max_step_size_in_ms,
} }
} }
if (FLAG_concurrent_marking) { if (FLAG_concurrent_marking) {
marking_worklists()->ShareWorkIfGlobalPoolIsEmpty(); local_marking_worklists()->ShareWork();
heap_->concurrent_marking()->RescheduleTasksIfNeeded(); heap_->concurrent_marking()->RescheduleTasksIfNeeded();
} }
} }
......
...@@ -209,8 +209,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -209,8 +209,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
} }
} }
MarkingWorklists* marking_worklists() const { MarkingWorklists::Local* local_marking_worklists() const {
return collector_->marking_worklists(); return collector_->local_marking_worklists();
} }
void Deactivate(); void Deactivate();
......
...@@ -10,6 +10,8 @@ ...@@ -10,6 +10,8 @@
#include "src/heap/heap-inl.h" #include "src/heap/heap-inl.h"
#include "src/heap/incremental-marking.h" #include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact.h" #include "src/heap/mark-compact.h"
#include "src/heap/marking-worklist-inl.h"
#include "src/heap/marking-worklist.h"
#include "src/heap/objects-visiting-inl.h" #include "src/heap/objects-visiting-inl.h"
#include "src/heap/remembered-set-inl.h" #include "src/heap/remembered-set-inl.h"
#include "src/objects/js-collection-inl.h" #include "src/objects/js-collection-inl.h"
...@@ -22,7 +24,7 @@ namespace internal { ...@@ -22,7 +24,7 @@ namespace internal {
void MarkCompactCollector::MarkObject(HeapObject host, HeapObject obj) { void MarkCompactCollector::MarkObject(HeapObject host, HeapObject obj) {
if (marking_state()->WhiteToGrey(obj)) { if (marking_state()->WhiteToGrey(obj)) {
marking_worklists()->Push(obj); local_marking_worklists()->Push(obj);
if (V8_UNLIKELY(FLAG_track_retaining_path)) { if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainer(host, obj); heap_->AddRetainer(host, obj);
} }
...@@ -31,7 +33,7 @@ void MarkCompactCollector::MarkObject(HeapObject host, HeapObject obj) { ...@@ -31,7 +33,7 @@ void MarkCompactCollector::MarkObject(HeapObject host, HeapObject obj) {
void MarkCompactCollector::MarkRootObject(Root root, HeapObject obj) { void MarkCompactCollector::MarkRootObject(Root root, HeapObject obj) {
if (marking_state()->WhiteToGrey(obj)) { if (marking_state()->WhiteToGrey(obj)) {
marking_worklists()->Push(obj); local_marking_worklists()->Push(obj);
if (V8_UNLIKELY(FLAG_track_retaining_path)) { if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainingRoot(root, obj); heap_->AddRetainingRoot(root, obj);
} }
...@@ -51,7 +53,7 @@ void MinorMarkCompactCollector::MarkRootObject(HeapObject obj) { ...@@ -51,7 +53,7 @@ void MinorMarkCompactCollector::MarkRootObject(HeapObject obj) {
void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject obj) { void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject obj) {
if (marking_state()->WhiteToGrey(obj)) { if (marking_state()->WhiteToGrey(obj)) {
marking_worklists()->Push(obj); local_marking_worklists()->Push(obj);
if (V8_UNLIKELY(FLAG_track_retaining_path)) { if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainingRoot(Root::kWrapperTracing, obj); heap_->AddRetainingRoot(Root::kWrapperTracing, obj);
} }
......
...@@ -455,7 +455,8 @@ void MarkCompactCollector::TearDown() { ...@@ -455,7 +455,8 @@ void MarkCompactCollector::TearDown() {
AbortCompaction(); AbortCompaction();
AbortWeakObjects(); AbortWeakObjects();
if (heap()->incremental_marking()->IsMarking()) { if (heap()->incremental_marking()->IsMarking()) {
marking_worklists_holder()->Clear(); local_marking_worklists()->Publish();
marking_worklists()->Clear();
} }
} }
...@@ -520,12 +521,12 @@ void MarkCompactCollector::StartMarking() { ...@@ -520,12 +521,12 @@ void MarkCompactCollector::StartMarking() {
contexts.push_back(context->ptr()); contexts.push_back(context->ptr());
} }
} }
marking_worklists_holder()->CreateContextWorklists(contexts); marking_worklists()->CreateContextWorklists(contexts);
marking_worklists_ = std::make_unique<MarkingWorklists>( local_marking_worklists_ =
kMainThreadTask, marking_worklists_holder()); std::make_unique<MarkingWorklists::Local>(marking_worklists());
marking_visitor_ = std::make_unique<MarkingVisitor>( marking_visitor_ = std::make_unique<MarkingVisitor>(
marking_state(), marking_worklists(), weak_objects(), heap_, epoch(), marking_state(), local_marking_worklists(), weak_objects(), heap_,
Heap::GetBytecodeFlushMode(), epoch(), Heap::GetBytecodeFlushMode(),
heap_->local_embedder_heap_tracer()->InUse(), heap_->local_embedder_heap_tracer()->InUse(),
heap_->is_current_gc_forced()); heap_->is_current_gc_forced());
// Marking bits are cleared by the sweeper. // Marking bits are cleared by the sweeper.
...@@ -912,7 +913,7 @@ void MarkCompactCollector::FinishConcurrentMarking( ...@@ -912,7 +913,7 @@ void MarkCompactCollector::FinishConcurrentMarking(
} }
void MarkCompactCollector::VerifyMarking() { void MarkCompactCollector::VerifyMarking() {
CHECK(marking_worklists()->IsEmpty()); CHECK(local_marking_worklists()->IsEmpty());
DCHECK(heap_->incremental_marking()->IsStopped()); DCHECK(heap_->incremental_marking()->IsStopped());
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
if (FLAG_verify_heap) { if (FLAG_verify_heap) {
...@@ -939,8 +940,8 @@ void MarkCompactCollector::Finish() { ...@@ -939,8 +940,8 @@ void MarkCompactCollector::Finish() {
#endif #endif
marking_visitor_.reset(); marking_visitor_.reset();
marking_worklists_.reset(); local_marking_worklists_.reset();
marking_worklists_holder_.ReleaseContextWorklists(); marking_worklists_.ReleaseContextWorklists();
native_context_stats_.Clear(); native_context_stats_.Clear();
CHECK(weak_objects_.current_ephemerons.IsEmpty()); CHECK(weak_objects_.current_ephemerons.IsEmpty());
...@@ -1663,14 +1664,14 @@ void MarkCompactCollector::ProcessEphemeronsUntilFixpoint() { ...@@ -1663,14 +1664,14 @@ void MarkCompactCollector::ProcessEphemeronsUntilFixpoint() {
CHECK(weak_objects_.current_ephemerons.IsEmpty()); CHECK(weak_objects_.current_ephemerons.IsEmpty());
CHECK(weak_objects_.discovered_ephemerons.IsEmpty()); CHECK(weak_objects_.discovered_ephemerons.IsEmpty());
work_to_do = work_to_do || !marking_worklists()->IsEmpty() || work_to_do = work_to_do || !local_marking_worklists()->IsEmpty() ||
heap()->concurrent_marking()->ephemeron_marked() || heap()->concurrent_marking()->ephemeron_marked() ||
!marking_worklists()->IsEmbedderEmpty() || !local_marking_worklists()->IsEmbedderEmpty() ||
!heap()->local_embedder_heap_tracer()->IsRemoteTracingDone(); !heap()->local_embedder_heap_tracer()->IsRemoteTracingDone();
++iterations; ++iterations;
} }
CHECK(marking_worklists()->IsEmpty()); CHECK(local_marking_worklists()->IsEmpty());
CHECK(weak_objects_.current_ephemerons.IsEmpty()); CHECK(weak_objects_.current_ephemerons.IsEmpty());
CHECK(weak_objects_.discovered_ephemerons.IsEmpty()); CHECK(weak_objects_.discovered_ephemerons.IsEmpty());
} }
...@@ -1759,7 +1760,7 @@ void MarkCompactCollector::ProcessEphemeronsLinear() { ...@@ -1759,7 +1760,7 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
weak_objects_.next_ephemerons.Iterate([&](Ephemeron ephemeron) { weak_objects_.next_ephemerons.Iterate([&](Ephemeron ephemeron) {
if (non_atomic_marking_state()->IsBlackOrGrey(ephemeron.key) && if (non_atomic_marking_state()->IsBlackOrGrey(ephemeron.key) &&
non_atomic_marking_state()->WhiteToGrey(ephemeron.value)) { non_atomic_marking_state()->WhiteToGrey(ephemeron.value)) {
marking_worklists()->Push(ephemeron.value); local_marking_worklists()->Push(ephemeron.value);
} }
}); });
...@@ -1780,8 +1781,8 @@ void MarkCompactCollector::ProcessEphemeronsLinear() { ...@@ -1780,8 +1781,8 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
// for work_to_do are not sufficient for determining if another iteration // for work_to_do are not sufficient for determining if another iteration
// is necessary. // is necessary.
work_to_do = !marking_worklists()->IsEmpty() || work_to_do = !local_marking_worklists()->IsEmpty() ||
!marking_worklists()->IsEmbedderEmpty() || !local_marking_worklists()->IsEmbedderEmpty() ||
!heap()->local_embedder_heap_tracer()->IsRemoteTracingDone(); !heap()->local_embedder_heap_tracer()->IsRemoteTracingDone();
CHECK(weak_objects_.discovered_ephemerons.IsEmpty()); CHECK(weak_objects_.discovered_ephemerons.IsEmpty());
} }
...@@ -1789,7 +1790,7 @@ void MarkCompactCollector::ProcessEphemeronsLinear() { ...@@ -1789,7 +1790,7 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
ResetNewlyDiscovered(); ResetNewlyDiscovered();
ephemeron_marking_.newly_discovered.shrink_to_fit(); ephemeron_marking_.newly_discovered.shrink_to_fit();
CHECK(marking_worklists()->IsEmpty()); CHECK(local_marking_worklists()->IsEmpty());
} }
void MarkCompactCollector::PerformWrapperTracing() { void MarkCompactCollector::PerformWrapperTracing() {
...@@ -1799,7 +1800,7 @@ void MarkCompactCollector::PerformWrapperTracing() { ...@@ -1799,7 +1800,7 @@ void MarkCompactCollector::PerformWrapperTracing() {
LocalEmbedderHeapTracer::ProcessingScope scope( LocalEmbedderHeapTracer::ProcessingScope scope(
heap_->local_embedder_heap_tracer()); heap_->local_embedder_heap_tracer());
HeapObject object; HeapObject object;
while (marking_worklists()->PopEmbedder(&object)) { while (local_marking_worklists()->PopEmbedder(&object)) {
scope.TracePossibleWrapper(JSObject::cast(object)); scope.TracePossibleWrapper(JSObject::cast(object));
} }
} }
...@@ -1814,10 +1815,10 @@ template <MarkCompactCollector::MarkingWorklistProcessingMode mode> ...@@ -1814,10 +1815,10 @@ template <MarkCompactCollector::MarkingWorklistProcessingMode mode>
size_t MarkCompactCollector::ProcessMarkingWorklist(size_t bytes_to_process) { size_t MarkCompactCollector::ProcessMarkingWorklist(size_t bytes_to_process) {
HeapObject object; HeapObject object;
size_t bytes_processed = 0; size_t bytes_processed = 0;
bool is_per_context_mode = marking_worklists()->IsPerContextMode(); bool is_per_context_mode = local_marking_worklists()->IsPerContextMode();
Isolate* isolate = heap()->isolate(); Isolate* isolate = heap()->isolate();
while (marking_worklists()->Pop(&object) || while (local_marking_worklists()->Pop(&object) ||
marking_worklists()->PopOnHold(&object)) { local_marking_worklists()->PopOnHold(&object)) {
// Left trimming may result in grey or black filler objects on the marking // Left trimming may result in grey or black filler objects on the marking
// worklist. Ignore these objects. // worklist. Ignore these objects.
if (object.IsFreeSpaceOrFiller()) { if (object.IsFreeSpaceOrFiller()) {
...@@ -1844,13 +1845,13 @@ size_t MarkCompactCollector::ProcessMarkingWorklist(size_t bytes_to_process) { ...@@ -1844,13 +1845,13 @@ size_t MarkCompactCollector::ProcessMarkingWorklist(size_t bytes_to_process) {
if (is_per_context_mode) { if (is_per_context_mode) {
Address context; Address context;
if (native_context_inferrer_.Infer(isolate, map, object, &context)) { if (native_context_inferrer_.Infer(isolate, map, object, &context)) {
marking_worklists()->SwitchToContext(context); local_marking_worklists()->SwitchToContext(context);
} }
} }
size_t visited_size = marking_visitor_->Visit(map, object); size_t visited_size = marking_visitor_->Visit(map, object);
if (is_per_context_mode) { if (is_per_context_mode) {
native_context_stats_.IncrementSize(marking_worklists()->Context(), map, native_context_stats_.IncrementSize(local_marking_worklists()->Context(),
object, visited_size); map, object, visited_size);
} }
bytes_processed += visited_size; bytes_processed += visited_size;
if (bytes_to_process && bytes_processed >= bytes_to_process) { if (bytes_to_process && bytes_processed >= bytes_to_process) {
...@@ -1871,7 +1872,7 @@ template size_t MarkCompactCollector::ProcessMarkingWorklist< ...@@ -1871,7 +1872,7 @@ template size_t MarkCompactCollector::ProcessMarkingWorklist<
bool MarkCompactCollector::ProcessEphemeron(HeapObject key, HeapObject value) { bool MarkCompactCollector::ProcessEphemeron(HeapObject key, HeapObject value) {
if (marking_state()->IsBlackOrGrey(key)) { if (marking_state()->IsBlackOrGrey(key)) {
if (marking_state()->WhiteToGrey(value)) { if (marking_state()->WhiteToGrey(value)) {
marking_worklists()->Push(value); local_marking_worklists()->Push(value);
return true; return true;
} }
...@@ -1883,7 +1884,7 @@ bool MarkCompactCollector::ProcessEphemeron(HeapObject key, HeapObject value) { ...@@ -1883,7 +1884,7 @@ bool MarkCompactCollector::ProcessEphemeron(HeapObject key, HeapObject value) {
} }
void MarkCompactCollector::ProcessEphemeronMarking() { void MarkCompactCollector::ProcessEphemeronMarking() {
DCHECK(marking_worklists()->IsEmpty()); DCHECK(local_marking_worklists()->IsEmpty());
// Incremental marking might leave ephemerons in main task's local // Incremental marking might leave ephemerons in main task's local
// buffer, flush it into global pool. // buffer, flush it into global pool.
...@@ -1891,7 +1892,7 @@ void MarkCompactCollector::ProcessEphemeronMarking() { ...@@ -1891,7 +1892,7 @@ void MarkCompactCollector::ProcessEphemeronMarking() {
ProcessEphemeronsUntilFixpoint(); ProcessEphemeronsUntilFixpoint();
CHECK(marking_worklists()->IsEmpty()); CHECK(local_marking_worklists()->IsEmpty());
CHECK(heap()->local_embedder_heap_tracer()->IsRemoteTracingDone()); CHECK(heap()->local_embedder_heap_tracer()->IsRemoteTracingDone());
} }
...@@ -1983,7 +1984,7 @@ void MarkCompactCollector::MarkLiveObjects() { ...@@ -1983,7 +1984,7 @@ void MarkCompactCollector::MarkLiveObjects() {
{ {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE); TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE);
DCHECK(marking_worklists()->IsEmpty()); DCHECK(local_marking_worklists()->IsEmpty());
// Mark objects reachable through the embedder heap. This phase is // Mark objects reachable through the embedder heap. This phase is
// opportunistic as it may not discover graphs that are only reachable // opportunistic as it may not discover graphs that are only reachable
...@@ -1998,9 +1999,9 @@ void MarkCompactCollector::MarkLiveObjects() { ...@@ -1998,9 +1999,9 @@ void MarkCompactCollector::MarkLiveObjects() {
PerformWrapperTracing(); PerformWrapperTracing();
DrainMarkingWorklist(); DrainMarkingWorklist();
} while (!heap_->local_embedder_heap_tracer()->IsRemoteTracingDone() || } while (!heap_->local_embedder_heap_tracer()->IsRemoteTracingDone() ||
!marking_worklists()->IsEmbedderEmpty()); !local_marking_worklists()->IsEmbedderEmpty());
DCHECK(marking_worklists()->IsEmbedderEmpty()); DCHECK(local_marking_worklists()->IsEmbedderEmpty());
DCHECK(marking_worklists()->IsEmpty()); DCHECK(local_marking_worklists()->IsEmpty());
} }
// The objects reachable from the roots are marked, yet unreachable objects // The objects reachable from the roots are marked, yet unreachable objects
...@@ -2010,7 +2011,7 @@ void MarkCompactCollector::MarkLiveObjects() { ...@@ -2010,7 +2011,7 @@ void MarkCompactCollector::MarkLiveObjects() {
TRACE_GC(heap()->tracer(), TRACE_GC(heap()->tracer(),
GCTracer::Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON); GCTracer::Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON);
ProcessEphemeronMarking(); ProcessEphemeronMarking();
DCHECK(marking_worklists()->IsEmpty()); DCHECK(local_marking_worklists()->IsEmpty());
} }
// The objects reachable from the roots, weak maps, and embedder heap // The objects reachable from the roots, weak maps, and embedder heap
...@@ -2042,8 +2043,8 @@ void MarkCompactCollector::MarkLiveObjects() { ...@@ -2042,8 +2043,8 @@ void MarkCompactCollector::MarkLiveObjects() {
{ {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE_HARMONY); TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE_HARMONY);
ProcessEphemeronMarking(); ProcessEphemeronMarking();
DCHECK(marking_worklists()->IsEmbedderEmpty()); DCHECK(local_marking_worklists()->IsEmbedderEmpty());
DCHECK(marking_worklists()->IsEmpty()); DCHECK(local_marking_worklists()->IsEmpty());
} }
{ {
......
...@@ -377,13 +377,13 @@ class MainMarkingVisitor final ...@@ -377,13 +377,13 @@ class MainMarkingVisitor final
}; };
MainMarkingVisitor(MarkingState* marking_state, MainMarkingVisitor(MarkingState* marking_state,
MarkingWorklists* marking_worklists, MarkingWorklists::Local* local_marking_worklists,
WeakObjects* weak_objects, Heap* heap, WeakObjects* weak_objects, Heap* heap,
unsigned mark_compact_epoch, unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode, BytecodeFlushMode bytecode_flush_mode,
bool embedder_tracing_enabled, bool is_forced_gc) bool embedder_tracing_enabled, bool is_forced_gc)
: MarkingVisitorBase<MainMarkingVisitor<MarkingState>, MarkingState>( : MarkingVisitorBase<MainMarkingVisitor<MarkingState>, MarkingState>(
kMainThreadTask, marking_worklists, weak_objects, heap, kMainThreadTask, local_marking_worklists, weak_objects, heap,
mark_compact_epoch, bytecode_flush_mode, embedder_tracing_enabled, mark_compact_epoch, bytecode_flush_mode, embedder_tracing_enabled,
is_forced_gc), is_forced_gc),
marking_state_(marking_state), marking_state_(marking_state),
...@@ -527,10 +527,11 @@ class MarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -527,10 +527,11 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
bool evacuation() const { return evacuation_; } bool evacuation() const { return evacuation_; }
MarkingWorklistsHolder* marking_worklists_holder() { MarkingWorklists* marking_worklists() { return &marking_worklists_; }
return &marking_worklists_holder_;
MarkingWorklists::Local* local_marking_worklists() {
return local_marking_worklists_.get();
} }
MarkingWorklists* marking_worklists() { return marking_worklists_.get(); }
WeakObjects* weak_objects() { return &weak_objects_; } WeakObjects* weak_objects() { return &weak_objects_; }
...@@ -758,13 +759,13 @@ class MarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -758,13 +759,13 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
bool have_code_to_deoptimize_; bool have_code_to_deoptimize_;
MarkingWorklistsHolder marking_worklists_holder_; MarkingWorklists marking_worklists_;
WeakObjects weak_objects_; WeakObjects weak_objects_;
EphemeronMarking ephemeron_marking_; EphemeronMarking ephemeron_marking_;
std::unique_ptr<MarkingVisitor> marking_visitor_; std::unique_ptr<MarkingVisitor> marking_visitor_;
std::unique_ptr<MarkingWorklists> marking_worklists_; std::unique_ptr<MarkingWorklists::Local> local_marking_worklists_;
NativeContextInferrer native_context_inferrer_; NativeContextInferrer native_context_inferrer_;
NativeContextStats native_context_stats_; NativeContextStats native_context_stats_;
......
...@@ -35,7 +35,7 @@ bool MarkingBarrier::MarkValue(HeapObject host, HeapObject value) { ...@@ -35,7 +35,7 @@ bool MarkingBarrier::MarkValue(HeapObject host, HeapObject value) {
bool MarkingBarrier::WhiteToGreyAndPush(HeapObject obj) { bool MarkingBarrier::WhiteToGreyAndPush(HeapObject obj) {
if (marking_state_.WhiteToGrey(obj)) { if (marking_state_.WhiteToGrey(obj)) {
collector_->marking_worklists()->Push(obj); collector_->local_marking_worklists()->Push(obj);
return true; return true;
} }
return false; return false;
......
...@@ -22,7 +22,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::MarkObject( ...@@ -22,7 +22,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::MarkObject(
HeapObject host, HeapObject object) { HeapObject host, HeapObject object) {
concrete_visitor()->SynchronizePageAccess(object); concrete_visitor()->SynchronizePageAccess(object);
if (concrete_visitor()->marking_state()->WhiteToGrey(object)) { if (concrete_visitor()->marking_state()->WhiteToGrey(object)) {
marking_worklists_->Push(object); local_marking_worklists_->Push(object);
if (V8_UNLIKELY(concrete_visitor()->retaining_path_mode() == if (V8_UNLIKELY(concrete_visitor()->retaining_path_mode() ==
TraceRetainingPathMode::kEnabled)) { TraceRetainingPathMode::kEnabled)) {
heap_->AddRetainer(host, object); heap_->AddRetainer(host, object);
...@@ -183,7 +183,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>:: ...@@ -183,7 +183,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::
if (end < size) { if (end < size) {
// The object can be pushed back onto the marking worklist only after // The object can be pushed back onto the marking worklist only after
// progress bar was updated. // progress bar was updated.
marking_worklists_->Push(object); local_marking_worklists_->Push(object);
} }
} }
return end - start; return end - start;
...@@ -220,7 +220,7 @@ int MarkingVisitorBase<ConcreteVisitor, ...@@ -220,7 +220,7 @@ int MarkingVisitorBase<ConcreteVisitor,
if (size && is_embedder_tracing_enabled_) { if (size && is_embedder_tracing_enabled_) {
// Success: The object needs to be processed for embedder references on // Success: The object needs to be processed for embedder references on
// the main thread. // the main thread.
marking_worklists_->PushEmbedder(object); local_marking_worklists_->PushEmbedder(object);
} }
return size; return size;
} }
......
...@@ -147,12 +147,13 @@ class MarkingStateBase { ...@@ -147,12 +147,13 @@ class MarkingStateBase {
template <typename ConcreteVisitor, typename MarkingState> template <typename ConcreteVisitor, typename MarkingState>
class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> { class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
public: public:
MarkingVisitorBase(int task_id, MarkingWorklists* marking_worklists, MarkingVisitorBase(int task_id,
MarkingWorklists::Local* local_marking_worklists,
WeakObjects* weak_objects, Heap* heap, WeakObjects* weak_objects, Heap* heap,
unsigned mark_compact_epoch, unsigned mark_compact_epoch,
BytecodeFlushMode bytecode_flush_mode, BytecodeFlushMode bytecode_flush_mode,
bool is_embedder_tracing_enabled, bool is_forced_gc) bool is_embedder_tracing_enabled, bool is_forced_gc)
: marking_worklists_(marking_worklists), : local_marking_worklists_(local_marking_worklists),
weak_objects_(weak_objects), weak_objects_(weak_objects),
heap_(heap), heap_(heap),
task_id_(task_id), task_id_(task_id),
...@@ -231,7 +232,7 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> { ...@@ -231,7 +232,7 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> {
// Marks the object grey and pushes it on the marking work list. // Marks the object grey and pushes it on the marking work list.
V8_INLINE void MarkObject(HeapObject host, HeapObject obj); V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
MarkingWorklists* const marking_worklists_; MarkingWorklists::Local* const local_marking_worklists_;
WeakObjects* const weak_objects_; WeakObjects* const weak_objects_;
Heap* const heap_; Heap* const heap_;
const int task_id_; const int task_id_;
......
This diff is collapsed.
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
#include <algorithm> #include <algorithm>
#include <map> #include <map>
#include "src/heap/marking-worklist-inl.h"
#include "src/objects/heap-object-inl.h" #include "src/objects/heap-object-inl.h"
#include "src/objects/heap-object.h" #include "src/objects/heap-object.h"
#include "src/objects/instance-type-inl.h" #include "src/objects/instance-type-inl.h"
...@@ -17,27 +18,35 @@ ...@@ -17,27 +18,35 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
MarkingWorklistsHolder::~MarkingWorklistsHolder() { MarkingWorklists::~MarkingWorklists() {
DCHECK(shared_.IsEmpty());
DCHECK(on_hold_.IsEmpty());
DCHECK(other_.IsEmpty());
DCHECK(worklists_.empty()); DCHECK(worklists_.empty());
DCHECK(context_worklists_.empty()); DCHECK(context_worklists_.empty());
} }
void MarkingWorklistsHolder::Clear() { void MarkingWorklists::Clear() {
shared_.Clear(); shared_.Clear();
on_hold_.Clear(); on_hold_.Clear();
embedder_.Clear(); embedder_.Clear();
other_.Clear();
for (auto cw : context_worklists_) { for (auto cw : context_worklists_) {
if (cw.context == kSharedContext || cw.context == kOtherContext) {
// These contexts were cleared above.
continue;
}
cw.worklist->Clear(); cw.worklist->Clear();
} }
ReleaseContextWorklists(); ReleaseContextWorklists();
} }
void MarkingWorklistsHolder::Print() { void MarkingWorklists::Print() {
PrintWorklist("shared", &shared_); PrintWorklist("shared", &shared_);
PrintWorklist("on_hold", &on_hold_); PrintWorklist("on_hold", &on_hold_);
} }
void MarkingWorklistsHolder::CreateContextWorklists( void MarkingWorklists::CreateContextWorklists(
const std::vector<Address>& contexts) { const std::vector<Address>& contexts) {
DCHECK(worklists_.empty()); DCHECK(worklists_.empty());
DCHECK(context_worklists_.empty()); DCHECK(context_worklists_.empty());
...@@ -53,17 +62,17 @@ void MarkingWorklistsHolder::CreateContextWorklists( ...@@ -53,17 +62,17 @@ void MarkingWorklistsHolder::CreateContextWorklists(
} }
} }
void MarkingWorklistsHolder::ReleaseContextWorklists() { void MarkingWorklists::ReleaseContextWorklists() {
context_worklists_.clear(); context_worklists_.clear();
worklists_.clear(); worklists_.clear();
} }
void MarkingWorklistsHolder::PrintWorklist(const char* worklist_name, void MarkingWorklists::PrintWorklist(const char* worklist_name,
MarkingWorklist* worklist) { MarkingWorklist* worklist) {
#ifdef DEBUG #ifdef DEBUG
std::map<InstanceType, int> count; std::map<InstanceType, int> count;
int total_count = 0; int total_count = 0;
worklist->IterateGlobalPool([&count, &total_count](HeapObject obj) { worklist->Iterate([&count, &total_count](HeapObject obj) {
++total_count; ++total_count;
count[obj.map().instance_type()]++; count[obj.map().instance_type()]++;
}); });
...@@ -85,111 +94,128 @@ void MarkingWorklistsHolder::PrintWorklist(const char* worklist_name, ...@@ -85,111 +94,128 @@ void MarkingWorklistsHolder::PrintWorklist(const char* worklist_name,
#endif #endif
} }
MarkingWorklists::MarkingWorklists(int task_id, MarkingWorklistsHolder* holder) const Address MarkingWorklists::Local::kSharedContext;
: shared_(holder->shared()), const Address MarkingWorklists::Local::kOtherContext;
on_hold_(holder->on_hold()),
embedder_(holder->embedder()), MarkingWorklists::Local::Local(MarkingWorklists* global)
active_(shared_), : on_hold_(global->on_hold()),
active_context_(kSharedContext), embedder_(global->embedder()),
task_id_(task_id), is_per_context_mode_(false) {
is_per_context_mode_(false), if (global->context_worklists().empty()) {
context_worklists_(holder->context_worklists()) { MarkingWorklist::Local shared(global->shared());
if (!context_worklists_.empty()) { active_ = std::move(shared);
active_context_ = kSharedContext;
active_owner_ = nullptr;
} else {
is_per_context_mode_ = true; is_per_context_mode_ = true;
worklist_by_context_.reserve(context_worklists_.size()); worklist_by_context_.reserve(global->context_worklists().size());
for (auto& cw : context_worklists_) { for (auto& cw : global->context_worklists()) {
worklist_by_context_[cw.context] = cw.worklist; worklist_by_context_[cw.context] =
std::make_unique<MarkingWorklist::Local>(cw.worklist);
}
active_owner_ = worklist_by_context_[kSharedContext].get();
active_ = std::move(*active_owner_);
active_context_ = kSharedContext;
}
}
MarkingWorklists::Local::~Local() {
DCHECK(active_.IsLocalEmpty());
if (is_per_context_mode_) {
for (auto& cw : worklist_by_context_) {
if (cw.first != active_context_) {
DCHECK(cw.second->IsLocalEmpty());
}
} }
} }
} }
void MarkingWorklists::FlushToGlobal() { void MarkingWorklists::Local::Publish() {
shared_->FlushToGlobal(task_id_); active_.Publish();
on_hold_->FlushToGlobal(task_id_); on_hold_.Publish();
embedder_->FlushToGlobal(task_id_); embedder_.Publish();
if (is_per_context_mode_) { if (is_per_context_mode_) {
for (auto& cw : context_worklists_) { for (auto& cw : worklist_by_context_) {
cw.worklist->FlushToGlobal(task_id_); if (cw.first != active_context_) {
cw.second->Publish();
}
} }
} }
} }
bool MarkingWorklists::IsEmpty() { bool MarkingWorklists::Local::IsEmpty() {
// This function checks the on_hold_ worklist, so it works only for the main // This function checks the on_hold_ worklist, so it works only for the main
// thread. // thread.
DCHECK_EQ(kMainThreadTask, task_id_); if (!active_.IsLocalEmpty() || !on_hold_.IsLocalEmpty() ||
if (!active_->IsLocalEmpty(task_id_) || !on_hold_->IsLocalEmpty(task_id_) || !active_.IsGlobalEmpty() || !on_hold_.IsGlobalEmpty()) {
!active_->IsGlobalPoolEmpty() || !on_hold_->IsGlobalPoolEmpty()) {
return false; return false;
} }
if (!is_per_context_mode_) { if (!is_per_context_mode_) {
DCHECK_EQ(active_, shared_);
return true; return true;
} }
for (auto& cw : context_worklists_) { for (auto& cw : worklist_by_context_) {
if (!cw.worklist->IsLocalEmpty(task_id_) || if (cw.first != active_context_ &&
!cw.worklist->IsGlobalPoolEmpty()) { !(cw.second->IsLocalEmpty() && cw.second->IsGlobalEmpty())) {
active_ = cw.worklist; SwitchToContext(cw.first, cw.second.get());
active_context_ = cw.context;
return false; return false;
} }
} }
return true; return true;
} }
bool MarkingWorklists::IsEmbedderEmpty() { bool MarkingWorklists::Local::IsEmbedderEmpty() const {
return embedder_->IsLocalEmpty(task_id_) && embedder_->IsGlobalPoolEmpty(); return embedder_.IsLocalEmpty() && embedder_.IsGlobalEmpty();
} }
void MarkingWorklists::ShareWorkIfGlobalPoolIsEmpty() { void MarkingWorklists::Local::ShareWork() {
if (!shared_->IsLocalEmpty(task_id_) && shared_->IsGlobalPoolEmpty()) { if (!active_.IsLocalEmpty() && active_.IsGlobalEmpty()) {
shared_->FlushToGlobal(task_id_); active_.Publish();
} }
if (is_per_context_mode_ && shared_ != active_) { if (is_per_context_mode_ && active_context_ != kSharedContext) {
if (!active_->IsLocalEmpty(task_id_) && active_->IsGlobalPoolEmpty()) { MarkingWorklist::Local* shared = worklist_by_context_[kSharedContext].get();
active_->FlushToGlobal(task_id_); if (!shared->IsLocalEmpty() && shared->IsGlobalEmpty()) {
shared->Publish();
} }
} }
} }
void MarkingWorklists::MergeOnHold() { void MarkingWorklists::Local::MergeOnHold() {
DCHECK_EQ(kMainThreadTask, task_id_); MarkingWorklist::Local* shared =
shared_->MergeGlobalPool(on_hold_); active_context_ == kSharedContext
? &active_
: worklist_by_context_[kSharedContext].get();
shared->Merge(&on_hold_);
} }
bool MarkingWorklists::PopContext(HeapObject* object) { bool MarkingWorklists::Local::PopContext(HeapObject* object) {
DCHECK(is_per_context_mode_); DCHECK(is_per_context_mode_);
// As an optimization we first check only the local segments to avoid locks. // As an optimization we first check only the local segments to avoid locks.
for (auto& cw : context_worklists_) { for (auto& cw : worklist_by_context_) {
if (!cw.worklist->IsLocalEmpty(task_id_)) { if (cw.first != active_context_ && !cw.second->IsLocalEmpty()) {
active_ = cw.worklist; SwitchToContext(cw.first, cw.second.get());
active_context_ = cw.context; return active_.Pop(object);
return active_->Pop(task_id_, object);
} }
} }
// All local segments are empty. Check global segments. // All local segments are empty. Check global segments.
for (auto& cw : context_worklists_) { for (auto& cw : worklist_by_context_) {
if (cw.worklist->Pop(task_id_, object)) { if (cw.first != active_context_ && cw.second->Pop(object)) {
active_ = cw.worklist; SwitchToContext(cw.first, cw.second.get());
active_context_ = cw.context;
return true; return true;
} }
} }
// All worklists are empty. Switch to the default shared worklist. // All worklists are empty. Switch to the default shared worklist.
SwitchToShared(); SwitchToContext(kSharedContext);
return false; return false;
} }
Address MarkingWorklists::SwitchToContextSlow(Address context) { Address MarkingWorklists::Local::SwitchToContextSlow(Address context) {
const auto& it = worklist_by_context_.find(context); const auto& it = worklist_by_context_.find(context);
if (V8_UNLIKELY(it == worklist_by_context_.end())) { if (V8_UNLIKELY(it == worklist_by_context_.end())) {
// This context was created during marking or is not being measured, // This context was created during marking or is not being measured,
// so we don't have a specific worklist for it. // so we don't have a specific worklist for it.
active_context_ = kOtherContext; SwitchToContext(kOtherContext, worklist_by_context_[kOtherContext].get());
active_ = worklist_by_context_[active_context_];
} else { } else {
active_ = it->second; SwitchToContext(it->first, it->second.get());
active_context_ = context;
} }
return active_context_; return active_context_;
} }
......
This diff is collapsed.
...@@ -4,13 +4,14 @@ ...@@ -4,13 +4,14 @@
#include <stdlib.h> #include <stdlib.h>
#include "src/init/v8.h"
#include "src/heap/concurrent-marking.h" #include "src/heap/concurrent-marking.h"
#include "src/heap/heap-inl.h" #include "src/heap/heap-inl.h"
#include "src/heap/heap.h" #include "src/heap/heap.h"
#include "src/heap/mark-compact.h" #include "src/heap/mark-compact.h"
#include "src/heap/marking-worklist-inl.h"
#include "src/heap/marking-worklist.h"
#include "src/heap/worklist.h" #include "src/heap/worklist.h"
#include "src/init/v8.h"
#include "test/cctest/cctest.h" #include "test/cctest/cctest.h"
#include "test/cctest/heap/heap-utils.h" #include "test/cctest/heap/heap-utils.h"
...@@ -19,10 +20,11 @@ namespace internal { ...@@ -19,10 +20,11 @@ namespace internal {
namespace heap { namespace heap {
void PublishSegment(MarkingWorklist* worklist, HeapObject object) { void PublishSegment(MarkingWorklist* worklist, HeapObject object) {
for (size_t i = 0; i <= MarkingWorklist::kSegmentCapacity; i++) { MarkingWorklist::Local local(worklist);
worklist->Push(0, object); for (size_t i = 0; i <= MarkingWorklist::kSegmentSize; i++) {
local.Push(object);
} }
CHECK(worklist->Pop(0, &object)); CHECK(local.Pop(&object));
} }
TEST(ConcurrentMarking) { TEST(ConcurrentMarking) {
...@@ -36,11 +38,11 @@ TEST(ConcurrentMarking) { ...@@ -36,11 +38,11 @@ TEST(ConcurrentMarking) {
collector->EnsureSweepingCompleted(); collector->EnsureSweepingCompleted();
} }
MarkingWorklistsHolder marking_worklists_holder; MarkingWorklists marking_worklists;
WeakObjects weak_objects; WeakObjects weak_objects;
ConcurrentMarking* concurrent_marking = ConcurrentMarking* concurrent_marking =
new ConcurrentMarking(heap, &marking_worklists_holder, &weak_objects); new ConcurrentMarking(heap, &marking_worklists, &weak_objects);
PublishSegment(marking_worklists_holder.shared(), PublishSegment(marking_worklists.shared(),
ReadOnlyRoots(heap).undefined_value()); ReadOnlyRoots(heap).undefined_value());
concurrent_marking->ScheduleTasks(); concurrent_marking->ScheduleTasks();
concurrent_marking->Stop( concurrent_marking->Stop(
...@@ -59,16 +61,16 @@ TEST(ConcurrentMarkingReschedule) { ...@@ -59,16 +61,16 @@ TEST(ConcurrentMarkingReschedule) {
collector->EnsureSweepingCompleted(); collector->EnsureSweepingCompleted();
} }
MarkingWorklistsHolder marking_worklists_holder; MarkingWorklists marking_worklists;
WeakObjects weak_objects; WeakObjects weak_objects;
ConcurrentMarking* concurrent_marking = ConcurrentMarking* concurrent_marking =
new ConcurrentMarking(heap, &marking_worklists_holder, &weak_objects); new ConcurrentMarking(heap, &marking_worklists, &weak_objects);
PublishSegment(marking_worklists_holder.shared(), PublishSegment(marking_worklists.shared(),
ReadOnlyRoots(heap).undefined_value()); ReadOnlyRoots(heap).undefined_value());
concurrent_marking->ScheduleTasks(); concurrent_marking->ScheduleTasks();
concurrent_marking->Stop( concurrent_marking->Stop(
ConcurrentMarking::StopRequest::COMPLETE_ONGOING_TASKS); ConcurrentMarking::StopRequest::COMPLETE_ONGOING_TASKS);
PublishSegment(marking_worklists_holder.shared(), PublishSegment(marking_worklists.shared(),
ReadOnlyRoots(heap).undefined_value()); ReadOnlyRoots(heap).undefined_value());
concurrent_marking->RescheduleTasksIfNeeded(); concurrent_marking->RescheduleTasksIfNeeded();
concurrent_marking->Stop( concurrent_marking->Stop(
...@@ -87,17 +89,17 @@ TEST(ConcurrentMarkingPreemptAndReschedule) { ...@@ -87,17 +89,17 @@ TEST(ConcurrentMarkingPreemptAndReschedule) {
collector->EnsureSweepingCompleted(); collector->EnsureSweepingCompleted();
} }
MarkingWorklistsHolder marking_worklists_holder; MarkingWorklists marking_worklists;
WeakObjects weak_objects; WeakObjects weak_objects;
ConcurrentMarking* concurrent_marking = ConcurrentMarking* concurrent_marking =
new ConcurrentMarking(heap, &marking_worklists_holder, &weak_objects); new ConcurrentMarking(heap, &marking_worklists, &weak_objects);
for (int i = 0; i < 5000; i++) for (int i = 0; i < 5000; i++)
PublishSegment(marking_worklists_holder.shared(), PublishSegment(marking_worklists.shared(),
ReadOnlyRoots(heap).undefined_value()); ReadOnlyRoots(heap).undefined_value());
concurrent_marking->ScheduleTasks(); concurrent_marking->ScheduleTasks();
concurrent_marking->Stop(ConcurrentMarking::StopRequest::PREEMPT_TASKS); concurrent_marking->Stop(ConcurrentMarking::StopRequest::PREEMPT_TASKS);
for (int i = 0; i < 5000; i++) for (int i = 0; i < 5000; i++)
PublishSegment(marking_worklists_holder.shared(), PublishSegment(marking_worklists.shared(),
ReadOnlyRoots(heap).undefined_value()); ReadOnlyRoots(heap).undefined_value());
concurrent_marking->RescheduleTasksIfNeeded(); concurrent_marking->RescheduleTasksIfNeeded();
concurrent_marking->Stop( concurrent_marking->Stop(
......
...@@ -2365,7 +2365,7 @@ TEST(IdleNotificationFinishMarking) { ...@@ -2365,7 +2365,7 @@ TEST(IdleNotificationFinishMarking) {
StepOrigin::kV8); StepOrigin::kV8);
} while (!CcTest::heap() } while (!CcTest::heap()
->mark_compact_collector() ->mark_compact_collector()
->marking_worklists() ->local_marking_worklists()
->IsEmpty()); ->IsEmpty());
marking->SetWeakClosureWasOverApproximatedForTesting(true); marking->SetWeakClosureWasOverApproximatedForTesting(true);
......
...@@ -2,12 +2,14 @@ ...@@ -2,12 +2,14 @@
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. // found in the LICENSE file.
#include "src/heap/marking-worklist.h"
#include <cmath> #include <cmath>
#include <limits> #include <limits>
#include "src/heap/heap-inl.h" #include "src/heap/heap-inl.h"
#include "src/heap/heap.h" #include "src/heap/heap.h"
#include "src/heap/marking-worklist.h" #include "src/heap/marking-worklist-inl.h"
#include "test/unittests/test-utils.h" #include "test/unittests/test-utils.h"
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
...@@ -17,8 +19,8 @@ namespace internal { ...@@ -17,8 +19,8 @@ namespace internal {
using MarkingWorklistTest = TestWithContext; using MarkingWorklistTest = TestWithContext;
TEST_F(MarkingWorklistTest, PushPop) { TEST_F(MarkingWorklistTest, PushPop) {
MarkingWorklistsHolder holder; MarkingWorklists holder;
MarkingWorklists worklists(kMainThreadTask, &holder); MarkingWorklists::Local worklists(&holder);
HeapObject pushed_object = HeapObject pushed_object =
ReadOnlyRoots(i_isolate()->heap()).undefined_value(); ReadOnlyRoots(i_isolate()->heap()).undefined_value();
worklists.Push(pushed_object); worklists.Push(pushed_object);
...@@ -28,8 +30,8 @@ TEST_F(MarkingWorklistTest, PushPop) { ...@@ -28,8 +30,8 @@ TEST_F(MarkingWorklistTest, PushPop) {
} }
TEST_F(MarkingWorklistTest, PushPopOnHold) { TEST_F(MarkingWorklistTest, PushPopOnHold) {
MarkingWorklistsHolder holder; MarkingWorklists holder;
MarkingWorklists worklists(kMainThreadTask, &holder); MarkingWorklists::Local worklists(&holder);
HeapObject pushed_object = HeapObject pushed_object =
ReadOnlyRoots(i_isolate()->heap()).undefined_value(); ReadOnlyRoots(i_isolate()->heap()).undefined_value();
worklists.PushOnHold(pushed_object); worklists.PushOnHold(pushed_object);
...@@ -39,8 +41,8 @@ TEST_F(MarkingWorklistTest, PushPopOnHold) { ...@@ -39,8 +41,8 @@ TEST_F(MarkingWorklistTest, PushPopOnHold) {
} }
TEST_F(MarkingWorklistTest, PushPopEmbedder) { TEST_F(MarkingWorklistTest, PushPopEmbedder) {
MarkingWorklistsHolder holder; MarkingWorklists holder;
MarkingWorklists worklists(kMainThreadTask, &holder); MarkingWorklists::Local worklists(&holder);
HeapObject pushed_object = HeapObject pushed_object =
ReadOnlyRoots(i_isolate()->heap()).undefined_value(); ReadOnlyRoots(i_isolate()->heap()).undefined_value();
worklists.PushEmbedder(pushed_object); worklists.PushEmbedder(pushed_object);
...@@ -50,13 +52,13 @@ TEST_F(MarkingWorklistTest, PushPopEmbedder) { ...@@ -50,13 +52,13 @@ TEST_F(MarkingWorklistTest, PushPopEmbedder) {
} }
TEST_F(MarkingWorklistTest, MergeOnHold) { TEST_F(MarkingWorklistTest, MergeOnHold) {
MarkingWorklistsHolder holder; MarkingWorklists holder;
MarkingWorklists main_worklists(kMainThreadTask, &holder); MarkingWorklists::Local main_worklists(&holder);
MarkingWorklists worker_worklists(kMainThreadTask + 1, &holder); MarkingWorklists::Local worker_worklists(&holder);
HeapObject pushed_object = HeapObject pushed_object =
ReadOnlyRoots(i_isolate()->heap()).undefined_value(); ReadOnlyRoots(i_isolate()->heap()).undefined_value();
worker_worklists.PushOnHold(pushed_object); worker_worklists.PushOnHold(pushed_object);
worker_worklists.FlushToGlobal(); worker_worklists.Publish();
main_worklists.MergeOnHold(); main_worklists.MergeOnHold();
HeapObject popped_object; HeapObject popped_object;
EXPECT_TRUE(main_worklists.Pop(&popped_object)); EXPECT_TRUE(main_worklists.Pop(&popped_object));
...@@ -64,13 +66,13 @@ TEST_F(MarkingWorklistTest, MergeOnHold) { ...@@ -64,13 +66,13 @@ TEST_F(MarkingWorklistTest, MergeOnHold) {
} }
TEST_F(MarkingWorklistTest, ShareWorkIfGlobalPoolIsEmpty) { TEST_F(MarkingWorklistTest, ShareWorkIfGlobalPoolIsEmpty) {
MarkingWorklistsHolder holder; MarkingWorklists holder;
MarkingWorklists main_worklists(kMainThreadTask, &holder); MarkingWorklists::Local main_worklists(&holder);
MarkingWorklists worker_worklists(kMainThreadTask + 1, &holder); MarkingWorklists::Local worker_worklists(&holder);
HeapObject pushed_object = HeapObject pushed_object =
ReadOnlyRoots(i_isolate()->heap()).undefined_value(); ReadOnlyRoots(i_isolate()->heap()).undefined_value();
main_worklists.Push(pushed_object); main_worklists.Push(pushed_object);
main_worklists.ShareWorkIfGlobalPoolIsEmpty(); main_worklists.ShareWork();
HeapObject popped_object; HeapObject popped_object;
EXPECT_TRUE(worker_worklists.Pop(&popped_object)); EXPECT_TRUE(worker_worklists.Pop(&popped_object));
EXPECT_EQ(popped_object, pushed_object); EXPECT_EQ(popped_object, pushed_object);
...@@ -78,9 +80,9 @@ TEST_F(MarkingWorklistTest, ShareWorkIfGlobalPoolIsEmpty) { ...@@ -78,9 +80,9 @@ TEST_F(MarkingWorklistTest, ShareWorkIfGlobalPoolIsEmpty) {
TEST_F(MarkingWorklistTest, ContextWorklistsPushPop) { TEST_F(MarkingWorklistTest, ContextWorklistsPushPop) {
const Address context = 0xabcdef; const Address context = 0xabcdef;
MarkingWorklistsHolder holder; MarkingWorklists holder;
holder.CreateContextWorklists({context}); holder.CreateContextWorklists({context});
MarkingWorklists worklists(kMainThreadTask, &holder); MarkingWorklists::Local worklists(&holder);
HeapObject pushed_object = HeapObject pushed_object =
ReadOnlyRoots(i_isolate()->heap()).undefined_value(); ReadOnlyRoots(i_isolate()->heap()).undefined_value();
worklists.SwitchToContext(context); worklists.SwitchToContext(context);
...@@ -94,9 +96,9 @@ TEST_F(MarkingWorklistTest, ContextWorklistsPushPop) { ...@@ -94,9 +96,9 @@ TEST_F(MarkingWorklistTest, ContextWorklistsPushPop) {
TEST_F(MarkingWorklistTest, ContextWorklistsEmpty) { TEST_F(MarkingWorklistTest, ContextWorklistsEmpty) {
const Address context = 0xabcdef; const Address context = 0xabcdef;
MarkingWorklistsHolder holder; MarkingWorklists holder;
holder.CreateContextWorklists({context}); holder.CreateContextWorklists({context});
MarkingWorklists worklists(kMainThreadTask, &holder); MarkingWorklists::Local worklists(&holder);
HeapObject pushed_object = HeapObject pushed_object =
ReadOnlyRoots(i_isolate()->heap()).undefined_value(); ReadOnlyRoots(i_isolate()->heap()).undefined_value();
worklists.SwitchToContext(context); worklists.SwitchToContext(context);
...@@ -114,15 +116,15 @@ TEST_F(MarkingWorklistTest, ContextWorklistsEmpty) { ...@@ -114,15 +116,15 @@ TEST_F(MarkingWorklistTest, ContextWorklistsEmpty) {
TEST_F(MarkingWorklistTest, ContextWorklistCrossTask) { TEST_F(MarkingWorklistTest, ContextWorklistCrossTask) {
const Address context1 = 0x1abcdef; const Address context1 = 0x1abcdef;
const Address context2 = 0x2abcdef; const Address context2 = 0x2abcdef;
MarkingWorklistsHolder holder; MarkingWorklists holder;
holder.CreateContextWorklists({context1, context2}); holder.CreateContextWorklists({context1, context2});
MarkingWorklists main_worklists(kMainThreadTask, &holder); MarkingWorklists::Local main_worklists(&holder);
MarkingWorklists worker_worklists(kMainThreadTask + 1, &holder); MarkingWorklists::Local worker_worklists(&holder);
HeapObject pushed_object = HeapObject pushed_object =
ReadOnlyRoots(i_isolate()->heap()).undefined_value(); ReadOnlyRoots(i_isolate()->heap()).undefined_value();
main_worklists.SwitchToContext(context1); main_worklists.SwitchToContext(context1);
main_worklists.Push(pushed_object); main_worklists.Push(pushed_object);
main_worklists.ShareWorkIfGlobalPoolIsEmpty(); main_worklists.ShareWork();
worker_worklists.SwitchToContext(context2); worker_worklists.SwitchToContext(context2);
HeapObject popped_object; HeapObject popped_object;
EXPECT_TRUE(worker_worklists.Pop(&popped_object)); EXPECT_TRUE(worker_worklists.Pop(&popped_object));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment