Commit 7da3767f authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

[heap] Update GC scopes and metrics for MinorMC

Bug: v8:12612
Change-Id: Iae827f12611d5028e25d72a9270bcf86240b4f20
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3904413Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83360}
parent e4144427
......@@ -817,45 +817,68 @@ void GCTracer::PrintNVP() const {
"minor_mc=%.2f "
"time_to_safepoint=%.2f "
"mark=%.2f "
"mark.incremental_roots=%.2f "
"mark.finish_incremental=%.2f "
"mark.seed=%.2f "
"mark.roots=%.2f "
"mark.weak=%.2f "
"mark.closure_parallel=%.2f "
"mark.closure=%.2f "
"mark.global_handles=%.2f "
"clear=%.2f "
"clear.string_table=%.2f "
"clear.weak_lists=%.2f "
"complete.sweep_array_buffers=%.2f "
"evacuate=%.2f "
"evacuate.clean_up=%.2f "
"evacuate.copy=%.2f "
"evacuate.prologue=%.2f "
"evacuate.epilogue=%.2f "
"evacuate.rebalance=%.2f "
"evacuate.update_pointers=%.2f "
"evacuate.update_pointers.slots=%.2f "
"evacuate.update_pointers.weak=%.2f "
"sweep=%.2f "
"sweep.new=%.2f "
"sweep.finish_new_space=%.2f "
"finish=%.2f "
"finish.sweep_array_buffers=%.2f "
"background.mark=%.2f "
"background.sweep=%.2f "
"background.evacuate.copy=%.2f "
"background.evacuate.update_pointers=%.2f "
"background.unmapper=%.2f "
"unmapper=%.2f "
"update_marking_deque=%.2f "
"reset_liveness=%.2f\n",
duration, spent_in_mutator, "mmc", current_.reduce_memory,
current_scope(Scope::MINOR_MC),
current_scope(Scope::TIME_TO_SAFEPOINT),
current_scope(Scope::MINOR_MC_MARK),
current_scope(Scope::MINOR_MC_MARK_SEED),
current_scope(Scope::MINOR_MC_MARK_ROOTS),
current_scope(Scope::MINOR_MC_MARK_WEAK),
current_scope(Scope::MINOR_MC_MARK_FINISH_INCREMENTAL),
current_scope(Scope::MINOR_MC_MARK_SEED),
current_scope(Scope::MINOR_MC_MARK_CLOSURE_PARALLEL),
current_scope(Scope::MINOR_MC_MARK_CLOSURE),
current_scope(Scope::MINOR_MC_MARK_GLOBAL_HANDLES),
current_scope(Scope::MINOR_MC_CLEAR),
current_scope(Scope::MINOR_MC_CLEAR_STRING_TABLE),
current_scope(Scope::MINOR_MC_CLEAR_WEAK_LISTS),
current_scope(Scope::MINOR_MC_COMPLETE_SWEEP_ARRAY_BUFFERS),
current_scope(Scope::MINOR_MC_EVACUATE),
current_scope(Scope::MINOR_MC_EVACUATE_CLEAN_UP),
current_scope(Scope::MINOR_MC_EVACUATE_COPY),
current_scope(Scope::MINOR_MC_EVACUATE_PROLOGUE),
current_scope(Scope::MINOR_MC_EVACUATE_EPILOGUE),
current_scope(Scope::MINOR_MC_EVACUATE_REBALANCE),
current_scope(Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS),
current_scope(Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS),
current_scope(Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_WEAK),
current_scope(Scope::MC_SWEEP), current_scope(Scope::MC_SWEEP_NEW),
current_scope(Scope::MINOR_MC_SWEEP_FINISH_NEW),
current_scope(Scope::MINOR_MC_FINISH),
current_scope(Scope::MINOR_MC_FINISH_SWEEP_ARRAY_BUFFERS),
current_scope(Scope::MINOR_MC_BACKGROUND_MARKING),
current_scope(Scope::MINOR_MC_BACKGROUND_SWEEPING),
current_scope(Scope::MINOR_MC_BACKGROUND_EVACUATE_COPY),
current_scope(Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS),
current_scope(Scope::BACKGROUND_UNMAPPER),
current_scope(Scope::UNMAPPER),
current_scope(Scope::MINOR_MC_MARKING_DEQUE),
current_scope(Scope::MINOR_MC_RESET_LIVENESS));
break;
case Event::MARK_COMPACTOR:
......@@ -912,7 +935,9 @@ void GCTracer::PrintNVP() const {
"sweep=%.1f "
"sweep.code=%.1f "
"sweep.map=%.1f "
"sweep.new=%.1f "
"sweep.old=%.1f "
"sweep.finish_new_space=%.2f "
"incremental=%.1f "
"incremental.finalize=%.1f "
"incremental.finalize.external.prologue=%.1f "
......@@ -996,7 +1021,9 @@ void GCTracer::PrintNVP() const {
current_scope(Scope::MC_PROLOGUE), current_scope(Scope::MC_SWEEP),
current_scope(Scope::MC_SWEEP_CODE),
current_scope(Scope::MC_SWEEP_MAP),
current_scope(Scope::MC_SWEEP_NEW),
current_scope(Scope::MC_SWEEP_OLD),
current_scope(Scope::MC_SWEEP_FINISH_NEW),
current_scope(Scope::MC_INCREMENTAL),
current_scope(Scope::MC_INCREMENTAL_FINALIZE),
current_scope(Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE),
......
......@@ -160,15 +160,22 @@ void IncrementalMarking::Start(GarbageCollector garbage_collector,
Counters* counters = heap_->isolate()->counters();
counters->incremental_marking_reason()->AddSample(
static_cast<int>(gc_reason));
const bool is_major = garbage_collector == GarbageCollector::MARK_COMPACTOR;
if (is_major) {
// Reasons are only reported for major GCs
counters->incremental_marking_reason()->AddSample(
static_cast<int>(gc_reason));
}
NestedTimedHistogramScope incremental_marking_scope(
counters->gc_incremental_marking_start());
TRACE_EVENT1(
"v8", "V8.GCIncrementalMarkingStart", "epoch",
heap_->tracer()->CurrentEpoch(GCTracer::Scope::MC_INCREMENTAL_START));
TRACE_GC_EPOCH(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START,
ThreadKind::kMain);
is_major ? counters->gc_incremental_marking_start()
: counters->gc_minor_incremental_marking_start());
const auto scope_id = is_major ? GCTracer::Scope::MC_INCREMENTAL_START
: GCTracer::Scope::MINOR_MC_INCREMENTAL_START;
TRACE_EVENT1("v8",
is_major ? "V8.GCIncrementalMarkingStart"
: "V8.GCMinorIncrementalMarkingStart",
"epoch", heap_->tracer()->CurrentEpoch(scope_id));
TRACE_GC_EPOCH(heap()->tracer(), scope_id, ThreadKind::kMain);
heap_->tracer()->NotifyIncrementalMarkingStart();
start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
......@@ -181,7 +188,7 @@ void IncrementalMarking::Start(GarbageCollector garbage_collector,
schedule_update_time_ms_ = start_time_ms_;
bytes_marked_concurrently_ = 0;
if (garbage_collector == GarbageCollector::MARK_COMPACTOR) {
if (is_major) {
current_collector_ = CurrentCollector::kMajorMC;
StartMarkingMajor();
heap_->AddAllocationObserversToAllSpaces(&old_generation_observer_,
......
......@@ -1105,6 +1105,25 @@ void ShrinkPagesToObjectSizes(Heap* heap, OldLargeObjectSpace* space) {
} // namespace
void MarkCompactCollector::Finish() {
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_SWEEP);
if (heap()->new_lo_space()) {
GCTracer::Scope sweep_scope(heap()->tracer(),
GCTracer::Scope::MC_SWEEP_FINISH_NEW_LO,
ThreadKind::kMain);
SweepLargeSpace(heap()->new_lo_space());
}
if (v8_flags.minor_mc && heap()->new_space()) {
// Keep new space sweeping atomic.
GCTracer::Scope sweep_scope(heap()->tracer(),
GCTracer::Scope::MC_SWEEP_FINISH_NEW,
ThreadKind::kMain);
sweeper()->ParallelSweepSpace(NEW_SPACE,
Sweeper::SweepingMode::kEagerDuringGC, 0);
heap()->paged_new_space()->paged_space()->RefillFreeList(sweeper());
}
}
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_FINISH);
heap()->isolate()->global_handles()->ClearListOfYoungNodes();
......@@ -1126,23 +1145,6 @@ void MarkCompactCollector::Finish() {
local_weak_objects_.reset();
weak_objects_.next_ephemerons.Clear();
if (heap()->new_lo_space()) {
GCTracer::Scope sweep_scope(heap()->tracer(),
GCTracer::Scope::MC_FINISH_SWEEP_NEW_LO,
ThreadKind::kMain);
SweepLargeSpace(heap()->new_lo_space());
}
if (v8_flags.minor_mc && heap()->new_space()) {
// Keep new space sweeping atomic.
GCTracer::Scope sweep_scope(heap()->tracer(),
GCTracer::Scope::MC_FINISH_SWEEP_NEW,
ThreadKind::kMain);
sweeper()->ParallelSweepSpace(NEW_SPACE,
Sweeper::SweepingMode::kEagerDuringGC, 0);
heap()->paged_new_space()->paged_space()->RefillFreeList(sweeper());
}
sweeper()->StartSweeperTasks();
// Ensure unmapper tasks are stopped such that queued pages aren't freed
......@@ -5652,7 +5654,7 @@ void MarkCompactCollector::Sweep() {
heap()->tracer(), GCTracer::Scope::MC_SWEEP_NEW, ThreadKind::kMain);
StartSweepNewSpace();
}
sweeper()->StartSweeping();
sweeper()->StartSweeping(garbage_collector_);
}
}
......@@ -5960,25 +5962,28 @@ void MinorMarkCompactCollector::UpdatePointersAfterEvacuation() {
std::vector<std::unique_ptr<UpdatingItem>> updating_items;
// Create batches of global handles.
CollectRememberedSetUpdatingItems(this, &updating_items, heap()->old_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
CollectRememberedSetUpdatingItems(this, &updating_items, heap()->code_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
if (heap()->map_space()) {
CollectRememberedSetUpdatingItems(
this, &updating_items, heap()->map_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
}
CollectRememberedSetUpdatingItems(this, &updating_items, heap()->lo_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
CollectRememberedSetUpdatingItems(this, &updating_items,
heap()->code_lo_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
{
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS);
// Create batches of global handles.
CollectRememberedSetUpdatingItems(
this, &updating_items, heap()->old_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
CollectRememberedSetUpdatingItems(
this, &updating_items, heap()->code_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
if (heap()->map_space()) {
CollectRememberedSetUpdatingItems(
this, &updating_items, heap()->map_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
}
CollectRememberedSetUpdatingItems(
this, &updating_items, heap()->lo_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
CollectRememberedSetUpdatingItems(
this, &updating_items, heap()->code_lo_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
V8::GetCurrentPlatform()
->CreateJob(
v8::TaskPriority::kUserBlocking,
......@@ -6048,17 +6053,18 @@ void MinorMarkCompactCollector::StartMarking() {
}
void MinorMarkCompactCollector::Finish() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_FINISH);
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_SWEEP);
// Keep new space sweeping atomic.
GCTracer::Scope sweep_scope(heap()->tracer(),
GCTracer::Scope::MC_FINISH_SWEEP_NEW,
GCTracer::Scope::MINOR_MC_SWEEP_FINISH_NEW,
ThreadKind::kMain);
sweeper_->EnsureCompleted(Sweeper::SweepingMode::kEagerDuringGC);
heap()->paged_new_space()->paged_space()->RefillFreeList(sweeper());
}
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_FINISH);
local_marking_worklists_.reset();
main_marking_visitor_.reset();
}
......@@ -6091,11 +6097,6 @@ void MinorMarkCompactCollector::CollectGarbage() {
}
#endif // VERIFY_HEAP
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARKING_DEQUE);
heap()->incremental_marking()->UpdateMarkingWorklistAfterYoungGenGC();
}
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_RESET_LIVENESS);
// Since we promote all surviving large objects immediately, all remaining
......@@ -6448,7 +6449,8 @@ void MinorMarkCompactCollector::MarkRootSetInParallel(
// 0. Flush to ensure these items are visible globally and picked up
// by the job.
local_marking_worklists_->Publish();
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_ROOTS);
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MINOR_MC_MARK_CLOSURE_PARALLEL);
V8::GetCurrentPlatform()
->CreateJob(
v8::TaskPriority::kUserBlocking,
......@@ -6472,7 +6474,8 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
bool was_marked_incrementally = false;
{
// TODO(v8:13012): TRACE_GC with MINOR_MC_MARK_FINISH_INCREMENTAL.
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MINOR_MC_MARK_FINISH_INCREMENTAL);
if (heap_->incremental_marking()->Stop()) {
MarkingBarrier::PublishAll(heap());
// TODO(v8:13012): TRACE_GC with MINOR_MC_MARK_FULL_CLOSURE_PARALLEL_JOIN.
......@@ -6489,7 +6492,7 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
// Mark rest on the main thread.
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_WEAK);
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_CLOSURE);
DrainMarkingWorklist();
}
......@@ -6756,7 +6759,7 @@ void MinorMarkCompactCollector::Sweep() {
ThreadKind::kMain);
StartSweepNewSpace();
}
sweeper_->StartSweeping();
sweeper_->StartSweeping(garbage_collector_);
}
} // namespace internal
......
......@@ -69,12 +69,20 @@ class Sweeper::SweeperJob final : public JobTask {
void Run(JobDelegate* delegate) final {
RwxMemoryWriteScope::SetDefaultPermissionsForNewThread();
DCHECK(sweeper_->current_collector_.has_value());
if (delegate->IsJoiningThread()) {
TRACE_GC(tracer_, GCTracer::Scope::MC_SWEEP);
TRACE_GC(tracer_, sweeper_->current_collector_ ==
GarbageCollector::MINOR_MARK_COMPACTOR
? GCTracer::Scope::MINOR_MC_SWEEP
: GCTracer::Scope::MC_SWEEP);
RunImpl(delegate);
} else {
TRACE_GC_EPOCH(tracer_, GCTracer::Scope::MC_BACKGROUND_SWEEPING,
ThreadKind::kBackground);
TRACE_GC_EPOCH(
tracer_,
sweeper_->current_collector_ == GarbageCollector::MINOR_MARK_COMPACTOR
? GCTracer::Scope::MINOR_MC_BACKGROUND_SWEEPING
: GCTracer::Scope::MC_BACKGROUND_SWEEPING,
ThreadKind::kBackground);
RunImpl(delegate);
}
}
......@@ -160,9 +168,10 @@ void Sweeper::TearDown() {
if (job_handle_ && job_handle_->IsValid()) job_handle_->Cancel();
}
void Sweeper::StartSweeping() {
void Sweeper::StartSweeping(GarbageCollector collector) {
DCHECK(local_pretenuring_feedback_.empty());
sweeping_in_progress_ = true;
current_collector_ = collector;
should_reduce_memory_ = heap_->ShouldReduceMemory();
ForAllSweepingSpaces([this](AllocationSpace space) {
// Sorting is done in order to make compaction more efficient: by sweeping
......@@ -238,6 +247,7 @@ void Sweeper::EnsureCompleted(SweepingMode sweeping_mode) {
local_pretenuring_feedback_.clear();
concurrent_sweepers_.clear();
current_collector_.reset();
sweeping_in_progress_ = false;
}
......@@ -246,14 +256,6 @@ void Sweeper::DrainSweepingWorklistForSpace(AllocationSpace space) {
ParallelSweepSpace(space, SweepingMode::kLazyOrConcurrent, 0);
}
void Sweeper::SupportConcurrentSweeping() {
ForAllSweepingSpaces([this](AllocationSpace space) {
const int kMaxPagesToSweepPerSpace = 1;
ParallelSweepSpace(space, SweepingMode::kLazyOrConcurrent, 0,
kMaxPagesToSweepPerSpace);
});
}
bool Sweeper::AreSweeperTasksRunning() {
return job_handle_ && job_handle_->IsValid() && job_handle_->IsActive();
}
......
......@@ -8,6 +8,7 @@
#include <map>
#include <vector>
#include "src/base/optional.h"
#include "src/base/platform/condition-variable.h"
#include "src/base/platform/semaphore.h"
#include "src/common/globals.h"
......@@ -101,16 +102,13 @@ class Sweeper {
// After calling this function sweeping is considered to be in progress
// and the main thread can sweep lazily, but the background sweeper tasks
// are not running yet.
void StartSweeping();
void StartSweeping(GarbageCollector collector);
V8_EXPORT_PRIVATE void StartSweeperTasks();
void EnsureCompleted(
SweepingMode sweeping_mode = SweepingMode::kLazyOrConcurrent);
void DrainSweepingWorklistForSpace(AllocationSpace space);
bool AreSweeperTasksRunning();
// Support concurrent sweepers from main thread
void SupportConcurrentSweeping();
Page* GetSweptPageSafe(PagedSpaceBase* space);
NonAtomicMarkingState* marking_state() const { return marking_state_; }
......@@ -199,6 +197,7 @@ class Sweeper {
std::atomic<bool> sweeping_in_progress_;
bool should_reduce_memory_;
Heap::PretenuringFeedbackMap local_pretenuring_feedback_;
base::Optional<GarbageCollector> current_collector_;
};
} // namespace internal
......
......@@ -528,6 +528,8 @@
F(MC_INCREMENTAL_START) \
F(MC_INCREMENTAL_SWEEPING)
#define MINOR_INCREMENTAL_SCOPES(F) F(MINOR_MC_INCREMENTAL_START)
#define TOP_MC_SCOPES(F) \
F(MC_CLEAR) \
F(MC_EPILOGUE) \
......@@ -537,8 +539,16 @@
F(MC_PROLOGUE) \
F(MC_SWEEP)
#define TOP_MINOR_MC_SCOPES(F) \
F(MINOR_MC_CLEAR) \
F(MINOR_MC_EVACUATE) \
F(MINOR_MC_FINISH) \
F(MINOR_MC_MARK) \
F(MINOR_MC_SWEEP)
#define TRACER_SCOPES(F) \
INCREMENTAL_SCOPES(F) \
MINOR_INCREMENTAL_SCOPES(F) \
F(HEAP_EMBEDDER_TRACING_EPILOGUE) \
F(HEAP_EPILOGUE) \
F(HEAP_EPILOGUE_REDUCE_NEW_SPACE) \
......@@ -580,8 +590,6 @@
F(MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAIN) \
F(MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS) \
F(MC_EVACUATE_UPDATE_POINTERS_WEAK) \
F(MC_FINISH_SWEEP_NEW_LO) \
F(MC_FINISH_SWEEP_NEW) \
F(MC_FINISH_SWEEP_ARRAY_BUFFERS) \
F(MC_MARK_CLIENT_HEAPS) \
F(MC_MARK_EMBEDDER_PROLOGUE) \
......@@ -600,13 +608,13 @@
F(MC_SWEEP_MAP) \
F(MC_SWEEP_NEW) \
F(MC_SWEEP_OLD) \
F(MC_SWEEP_FINISH_NEW_LO) \
F(MC_SWEEP_FINISH_NEW) \
F(MINOR_MARK_COMPACTOR) \
F(MINOR_MC) \
F(MINOR_MC_CLEAR) \
TOP_MINOR_MC_SCOPES(F) \
F(MINOR_MC_CLEAR_STRING_TABLE) \
F(MINOR_MC_CLEAR_WEAK_LISTS) \
F(MINOR_MC_COMPLETE_SWEEP_ARRAY_BUFFERS) \
F(MINOR_MC_EVACUATE) \
F(MINOR_MC_EVACUATE_CLEAN_UP) \
F(MINOR_MC_EVACUATE_COPY) \
F(MINOR_MC_EVACUATE_COPY_PARALLEL) \
......@@ -617,19 +625,17 @@
F(MINOR_MC_EVACUATE_UPDATE_POINTERS_PARALLEL) \
F(MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS) \
F(MINOR_MC_EVACUATE_UPDATE_POINTERS_WEAK) \
F(MINOR_MC_FINISH) \
F(MINOR_MC_FINISH_SWEEP_ARRAY_BUFFERS) \
F(MINOR_MC_FINISH_SWEEP_NEW) \
F(MINOR_MC_MARK) \
F(MINOR_MC_MARK_GLOBAL_HANDLES) \
F(MINOR_MC_MARK_FINISH_INCREMENTAL) \
F(MINOR_MC_MARK_PARALLEL) \
F(MINOR_MC_MARK_SEED) \
F(MINOR_MC_MARK_ROOTS) \
F(MINOR_MC_MARK_WEAK) \
F(MINOR_MC_MARKING_DEQUE) \
F(MINOR_MC_MARK_CLOSURE_PARALLEL) \
F(MINOR_MC_MARK_CLOSURE) \
F(MINOR_MC_RESET_LIVENESS) \
F(MINOR_MC_SWEEP) \
F(MINOR_MC_SWEEP_NEW) \
F(MINOR_MC_SWEEP_FINISH_NEW) \
F(SAFEPOINT) \
F(SCAVENGER) \
F(SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS) \
......@@ -664,6 +670,7 @@
F(MINOR_MC_BACKGROUND_EVACUATE_COPY) \
F(MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS) \
F(MINOR_MC_BACKGROUND_MARKING) \
F(MINOR_MC_BACKGROUND_SWEEPING) \
F(SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL)
#define TRACER_YOUNG_EPOCH_SCOPES(F) \
......@@ -673,6 +680,7 @@
F(MINOR_MC_BACKGROUND_EVACUATE_COPY) \
F(MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS) \
F(MINOR_MC_BACKGROUND_MARKING) \
F(MINOR_MC_BACKGROUND_SWEEPING) \
F(SCAVENGER) \
F(SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL) \
F(SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS)
......
......@@ -131,6 +131,8 @@ namespace internal {
HT(gc_incremental_marking, V8.GCIncrementalMarking, 10000, MILLISECOND) \
HT(gc_incremental_marking_start, V8.GCIncrementalMarkingStart, 10000, \
MILLISECOND) \
HT(gc_minor_incremental_marking_start, V8.GCMinorIncrementalMarkingStart, \
10000, MILLISECOND) \
HT(gc_incremental_marking_finalize, V8.GCIncrementalMarkingFinalize, 10000, \
MILLISECOND) \
HT(gc_low_memory_notification, V8.GCLowMemoryNotification, 10000, \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment