Commit be52501d authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

[heap] Add epoch to GC tracing events

This CL adds the TRACE_GC_EPOCH macro, which adds the epoch as attribute
to the trace event. Use TRACE_GC_EPOCH for top-level events, nested
events can get the information from its parent.

V8's GC needs an epoch for young and full collections, since scavenges
also occur during incremental marking. The epoch is also process-wide,
so different isolates do not reuse the same id.

Change-Id: I8889bccce51e008374b4796445a50062bd87a45d
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2565247
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71521}
parent 5fe48817
...@@ -154,7 +154,7 @@ void ArrayBufferSweeper::RequestSweep(SweepingScope scope) { ...@@ -154,7 +154,7 @@ void ArrayBufferSweeper::RequestSweep(SweepingScope scope) {
scope == SweepingScope::kYoung scope == SweepingScope::kYoung
? GCTracer::Scope::BACKGROUND_YOUNG_ARRAY_BUFFER_SWEEP ? GCTracer::Scope::BACKGROUND_YOUNG_ARRAY_BUFFER_SWEEP
: GCTracer::Scope::BACKGROUND_FULL_ARRAY_BUFFER_SWEEP; : GCTracer::Scope::BACKGROUND_FULL_ARRAY_BUFFER_SWEEP;
TRACE_GC1(heap_->tracer(), scope_id, ThreadKind::kBackground); TRACE_GC_EPOCH(heap_->tracer(), scope_id, ThreadKind::kBackground);
base::MutexGuard guard(&sweeping_mutex_); base::MutexGuard guard(&sweeping_mutex_);
job_->Sweep(); job_->Sweep();
job_finished_.NotifyAll(); job_finished_.NotifyAll();
......
...@@ -398,8 +398,8 @@ ConcurrentMarking::ConcurrentMarking(Heap* heap, ...@@ -398,8 +398,8 @@ ConcurrentMarking::ConcurrentMarking(Heap* heap,
void ConcurrentMarking::Run(JobDelegate* delegate, unsigned mark_compact_epoch, void ConcurrentMarking::Run(JobDelegate* delegate, unsigned mark_compact_epoch,
bool is_forced_gc) { bool is_forced_gc) {
TRACE_GC1(heap_->tracer(), GCTracer::Scope::MC_BACKGROUND_MARKING, TRACE_GC_EPOCH(heap_->tracer(), GCTracer::Scope::MC_BACKGROUND_MARKING,
ThreadKind::kBackground); ThreadKind::kBackground);
size_t kBytesUntilInterruptCheck = 64 * KB; size_t kBytesUntilInterruptCheck = 64 * KB;
int kObjectsUntilInterrupCheck = 1000; int kObjectsUntilInterrupCheck = 1000;
uint8_t task_id = delegate->GetTaskId() + 1; uint8_t task_id = delegate->GetTaskId() + 1;
......
...@@ -48,6 +48,14 @@ double GCTracer::MonotonicallyIncreasingTimeInMs() { ...@@ -48,6 +48,14 @@ double GCTracer::MonotonicallyIncreasingTimeInMs() {
} }
} }
CollectionEpoch GCTracer::CurrentEpoch(Scope::ScopeId scope_id) {
if (Scope::NeedsYoungEpoch(scope_id)) {
return heap_->epoch_young();
} else {
return heap_->epoch_full();
}
}
GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind) GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind)
: tracer_(tracer), scope_(scope), thread_kind_(thread_kind) { : tracer_(tracer), scope_(scope), thread_kind_(thread_kind) {
start_time_ = tracer_->MonotonicallyIncreasingTimeInMs(); start_time_ = tracer_->MonotonicallyIncreasingTimeInMs();
...@@ -95,6 +103,19 @@ const char* GCTracer::Scope::Name(ScopeId id) { ...@@ -95,6 +103,19 @@ const char* GCTracer::Scope::Name(ScopeId id) {
return nullptr; return nullptr;
} }
bool GCTracer::Scope::NeedsYoungEpoch(ScopeId id) {
#define CASE(scope) \
case Scope::scope: \
return true;
switch (id) {
TRACER_YOUNG_EPOCH_SCOPES(CASE)
default:
return false;
}
#undef CASE
UNREACHABLE();
}
GCTracer::Event::Event(Type type, GarbageCollectionReason gc_reason, GCTracer::Event::Event(Type type, GarbageCollectionReason gc_reason,
const char* collector_reason) const char* collector_reason)
: type(type), : type(type),
......
...@@ -41,6 +41,13 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects }; ...@@ -41,6 +41,13 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects };
GCTracer::Scope gc_tracer_scope(tracer, gc_tracer_scope_id, thread_kind); \ GCTracer::Scope gc_tracer_scope(tracer, gc_tracer_scope_id, thread_kind); \
TRACE_EVENT0(TRACE_GC_CATEGORIES, GCTracer::Scope::Name(gc_tracer_scope_id)) TRACE_EVENT0(TRACE_GC_CATEGORIES, GCTracer::Scope::Name(gc_tracer_scope_id))
#define TRACE_GC_EPOCH(tracer, scope_id, thread_kind) \
GCTracer::Scope::ScopeId gc_tracer_scope_id(scope_id); \
GCTracer::Scope gc_tracer_scope(tracer, gc_tracer_scope_id, thread_kind); \
CollectionEpoch epoch = tracer->CurrentEpoch(scope_id); \
TRACE_EVENT1(TRACE_GC_CATEGORIES, GCTracer::Scope::Name(gc_tracer_scope_id), \
"epoch", epoch)
// GCTracer collects and prints ONE line after each garbage collector // GCTracer collects and prints ONE line after each garbage collector
// invocation IFF --trace_gc is used. // invocation IFF --trace_gc is used.
class V8_EXPORT_PRIVATE GCTracer { class V8_EXPORT_PRIVATE GCTracer {
...@@ -94,6 +101,7 @@ class V8_EXPORT_PRIVATE GCTracer { ...@@ -94,6 +101,7 @@ class V8_EXPORT_PRIVATE GCTracer {
Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind); Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind);
~Scope(); ~Scope();
static const char* Name(ScopeId id); static const char* Name(ScopeId id);
static bool NeedsYoungEpoch(ScopeId id);
private: private:
GCTracer* tracer_; GCTracer* tracer_;
...@@ -334,6 +342,8 @@ class V8_EXPORT_PRIVATE GCTracer { ...@@ -334,6 +342,8 @@ class V8_EXPORT_PRIVATE GCTracer {
WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats(); WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats();
CollectionEpoch CurrentEpoch(Scope::ScopeId id);
private: private:
FRIEND_TEST(GCTracer, AverageSpeed); FRIEND_TEST(GCTracer, AverageSpeed);
FRIEND_TEST(GCTracerTest, AllocationThroughput); FRIEND_TEST(GCTracerTest, AllocationThroughput);
......
...@@ -104,6 +104,14 @@ ...@@ -104,6 +104,14 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
namespace {
std::atomic<CollectionEpoch> global_epoch{0};
CollectionEpoch next_epoch() {
return global_epoch.fetch_add(1, std::memory_order_relaxed) + 1;
}
} // namespace
#ifdef V8_ENABLE_THIRD_PARTY_HEAP #ifdef V8_ENABLE_THIRD_PARTY_HEAP
Isolate* Heap::GetIsolateFromWritableObject(HeapObject object) { Isolate* Heap::GetIsolateFromWritableObject(HeapObject object) {
return reinterpret_cast<Isolate*>( return reinterpret_cast<Isolate*>(
...@@ -1732,6 +1740,8 @@ int Heap::NotifyContextDisposed(bool dependant_context) { ...@@ -1732,6 +1740,8 @@ int Heap::NotifyContextDisposed(bool dependant_context) {
void Heap::StartIncrementalMarking(int gc_flags, void Heap::StartIncrementalMarking(int gc_flags,
GarbageCollectionReason gc_reason, GarbageCollectionReason gc_reason,
GCCallbackFlags gc_callback_flags) { GCCallbackFlags gc_callback_flags) {
epoch_full_ = next_epoch();
DCHECK(incremental_marking()->IsStopped()); DCHECK(incremental_marking()->IsStopped());
SafepointScope safepoint(this); SafepointScope safepoint(this);
set_current_gc_flags(gc_flags); set_current_gc_flags(gc_flags);
...@@ -1946,23 +1956,43 @@ void Heap::UpdateSurvivalStatistics(int start_new_space_size) { ...@@ -1946,23 +1956,43 @@ void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
tracer()->AddSurvivalRatio(survival_rate); tracer()->AddSurvivalRatio(survival_rate);
} }
namespace {
GCTracer::Scope::ScopeId CollectorScopeId(GarbageCollector collector) {
switch (collector) {
case MARK_COMPACTOR:
return GCTracer::Scope::ScopeId::MARK_COMPACTOR;
case MINOR_MARK_COMPACTOR:
return GCTracer::Scope::ScopeId::MINOR_MARK_COMPACTOR;
case SCAVENGER:
return GCTracer::Scope::ScopeId::SCAVENGER;
}
UNREACHABLE();
}
} // namespace
size_t Heap::PerformGarbageCollection( size_t Heap::PerformGarbageCollection(
GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) { GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
DisallowJavascriptExecution no_js(isolate()); DisallowJavascriptExecution no_js(isolate());
base::Optional<SafepointScope> optional_safepoint_scope; base::Optional<SafepointScope> optional_safepoint_scope;
UpdateCurrentEpoch(collector);
// Stop time-to-collection timer before safepoint - we do not want to measure // Stop time-to-collection timer before safepoint - we do not want to measure
// time for safepointing. // time for safepointing.
collection_barrier_->StopTimeToCollectionTimer(); collection_barrier_->StopTimeToCollectionTimer();
TRACE_GC_EPOCH(tracer(), CollectorScopeId(collector), ThreadKind::kMain);
if (FLAG_local_heaps) { if (FLAG_local_heaps) {
optional_safepoint_scope.emplace(this); optional_safepoint_scope.emplace(this);
} }
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
if (FLAG_verify_heap) { if (FLAG_verify_heap) {
Verify(); Verify();
} }
#endif #endif
tracer()->StartInSafepoint(); tracer()->StartInSafepoint();
GarbageCollectionPrologueInSafepoint(); GarbageCollectionPrologueInSafepoint();
...@@ -2042,6 +2072,14 @@ size_t Heap::PerformGarbageCollection( ...@@ -2042,6 +2072,14 @@ size_t Heap::PerformGarbageCollection(
return freed_global_handles; return freed_global_handles;
} }
void Heap::UpdateCurrentEpoch(GarbageCollector collector) {
if (IsYoungGenerationCollector(collector)) {
epoch_young_ = next_epoch();
} else if (incremental_marking()->IsStopped()) {
epoch_full_ = next_epoch();
}
}
void Heap::RecomputeLimits(GarbageCollector collector) { void Heap::RecomputeLimits(GarbageCollector collector) {
if (!((collector == MARK_COMPACTOR) || if (!((collector == MARK_COMPACTOR) ||
(HasLowYoungGenerationAllocationRate() && (HasLowYoungGenerationAllocationRate() &&
...@@ -3416,8 +3454,9 @@ void Heap::FinalizeIncrementalMarkingIncrementally( ...@@ -3416,8 +3454,9 @@ void Heap::FinalizeIncrementalMarkingIncrementally(
HistogramTimerScope incremental_marking_scope( HistogramTimerScope incremental_marking_scope(
isolate()->counters()->gc_incremental_marking_finalize()); isolate()->counters()->gc_incremental_marking_finalize());
TRACE_EVENT0("v8", "V8.GCIncrementalMarkingFinalize"); TRACE_EVENT1("v8", "V8.GCIncrementalMarkingFinalize", "epoch", epoch_full());
TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE); TRACE_GC_EPOCH(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE,
ThreadKind::kMain);
SafepointScope safepoint(this); SafepointScope safepoint(this);
InvokeIncrementalMarkingPrologueCallbacks(); InvokeIncrementalMarkingPrologueCallbacks();
......
...@@ -249,6 +249,8 @@ using EphemeronRememberedSet = ...@@ -249,6 +249,8 @@ using EphemeronRememberedSet =
std::unordered_map<EphemeronHashTable, std::unordered_set<int>, std::unordered_map<EphemeronHashTable, std::unordered_set<int>,
Object::Hasher>; Object::Hasher>;
using CollectionEpoch = uint32_t;
class Heap { class Heap {
public: public:
// Stores ephemeron entries where the EphemeronHashTable is in old-space, // Stores ephemeron entries where the EphemeronHashTable is in old-space,
...@@ -511,6 +513,8 @@ class Heap { ...@@ -511,6 +513,8 @@ class Heap {
void NotifyOldGenerationExpansion(AllocationSpace space, MemoryChunk* chunk); void NotifyOldGenerationExpansion(AllocationSpace space, MemoryChunk* chunk);
void UpdateCurrentEpoch(GarbageCollector collector);
inline Address* NewSpaceAllocationTopAddress(); inline Address* NewSpaceAllocationTopAddress();
inline Address* NewSpaceAllocationLimitAddress(); inline Address* NewSpaceAllocationLimitAddress();
inline Address* OldSpaceAllocationTopAddress(); inline Address* OldSpaceAllocationTopAddress();
...@@ -1558,6 +1562,9 @@ class Heap { ...@@ -1558,6 +1562,9 @@ class Heap {
static Isolate* GetIsolateFromWritableObject(HeapObject object); static Isolate* GetIsolateFromWritableObject(HeapObject object);
CollectionEpoch epoch_young() { return epoch_young_; }
CollectionEpoch epoch_full() { return epoch_full_; }
private: private:
using ExternalStringTableUpdaterCallback = String (*)(Heap* heap, using ExternalStringTableUpdaterCallback = String (*)(Heap* heap,
FullObjectSlot pointer); FullObjectSlot pointer);
...@@ -2331,6 +2338,11 @@ class Heap { ...@@ -2331,6 +2338,11 @@ class Heap {
std::unique_ptr<third_party_heap::Heap> tp_heap_; std::unique_ptr<third_party_heap::Heap> tp_heap_;
// We need two epochs, since there can be scavenges during incremental
// marking.
CollectionEpoch epoch_young_ = 0;
CollectionEpoch epoch_full_ = 0;
// Classes in "heap" can be friends. // Classes in "heap" can be friends.
friend class AlwaysAllocateScope; friend class AlwaysAllocateScope;
friend class ArrayBufferCollector; friend class ArrayBufferCollector;
......
...@@ -178,8 +178,10 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) { ...@@ -178,8 +178,10 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
static_cast<int>(gc_reason)); static_cast<int>(gc_reason));
HistogramTimerScope incremental_marking_scope( HistogramTimerScope incremental_marking_scope(
counters->gc_incremental_marking_start()); counters->gc_incremental_marking_start());
TRACE_EVENT0("v8", "V8.GCIncrementalMarkingStart"); TRACE_EVENT1("v8", "V8.GCIncrementalMarkingStart", "epoch",
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START); heap_->epoch_full());
TRACE_GC_EPOCH(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START,
ThreadKind::kMain);
heap_->tracer()->NotifyIncrementalMarkingStart(); heap_->tracer()->NotifyIncrementalMarkingStart();
start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs(); start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
...@@ -782,8 +784,9 @@ StepResult IncrementalMarking::AdvanceWithDeadline( ...@@ -782,8 +784,9 @@ StepResult IncrementalMarking::AdvanceWithDeadline(
StepOrigin step_origin) { StepOrigin step_origin) {
HistogramTimerScope incremental_marking_scope( HistogramTimerScope incremental_marking_scope(
heap_->isolate()->counters()->gc_incremental_marking()); heap_->isolate()->counters()->gc_incremental_marking());
TRACE_EVENT0("v8", "V8.GCIncrementalMarking"); TRACE_EVENT1("v8", "V8.GCIncrementalMarking", "epoch", heap_->epoch_full());
TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL); TRACE_GC_EPOCH(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL,
ThreadKind::kMain);
DCHECK(!IsStopped()); DCHECK(!IsStopped());
ScheduleBytesToMarkBasedOnTime(heap()->MonotonicallyIncreasingTimeInMs()); ScheduleBytesToMarkBasedOnTime(heap()->MonotonicallyIncreasingTimeInMs());
......
...@@ -3119,8 +3119,8 @@ class PageEvacuationJob : public v8::JobTask { ...@@ -3119,8 +3119,8 @@ class PageEvacuationJob : public v8::JobTask {
TRACE_GC(tracer_, evacuator->GetTracingScope()); TRACE_GC(tracer_, evacuator->GetTracingScope());
ProcessItems(delegate, evacuator); ProcessItems(delegate, evacuator);
} else { } else {
TRACE_GC1(tracer_, evacuator->GetBackgroundTracingScope(), TRACE_GC_EPOCH(tracer_, evacuator->GetBackgroundTracingScope(),
ThreadKind::kBackground); ThreadKind::kBackground);
ProcessItems(delegate, evacuator); ProcessItems(delegate, evacuator);
} }
} }
...@@ -3493,7 +3493,7 @@ class PointersUpdatingJob : public v8::JobTask { ...@@ -3493,7 +3493,7 @@ class PointersUpdatingJob : public v8::JobTask {
TRACE_GC(tracer_, scope_); TRACE_GC(tracer_, scope_);
UpdatePointers(delegate); UpdatePointers(delegate);
} else { } else {
TRACE_GC1(tracer_, background_scope_, ThreadKind::kBackground); TRACE_GC_EPOCH(tracer_, background_scope_, ThreadKind::kBackground);
UpdatePointers(delegate); UpdatePointers(delegate);
} }
} }
...@@ -4866,9 +4866,9 @@ class YoungGenerationMarkingJob : public v8::JobTask { ...@@ -4866,9 +4866,9 @@ class YoungGenerationMarkingJob : public v8::JobTask {
GCTracer::Scope::MINOR_MC_MARK_PARALLEL); GCTracer::Scope::MINOR_MC_MARK_PARALLEL);
ProcessItems(delegate); ProcessItems(delegate);
} else { } else {
TRACE_GC1(collector_->heap()->tracer(), TRACE_GC_EPOCH(collector_->heap()->tracer(),
GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING, GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING,
ThreadKind::kBackground); ThreadKind::kBackground);
ProcessItems(delegate); ProcessItems(delegate);
} }
} }
......
...@@ -160,8 +160,8 @@ class MemoryAllocator::Unmapper::UnmapFreeMemoryJob : public JobTask { ...@@ -160,8 +160,8 @@ class MemoryAllocator::Unmapper::UnmapFreeMemoryJob : public JobTask {
: unmapper_(unmapper), tracer_(isolate->heap()->tracer()) {} : unmapper_(unmapper), tracer_(isolate->heap()->tracer()) {}
void Run(JobDelegate* delegate) override { void Run(JobDelegate* delegate) override {
TRACE_GC1(tracer_, GCTracer::Scope::BACKGROUND_UNMAPPER, TRACE_GC_EPOCH(tracer_, GCTracer::Scope::BACKGROUND_UNMAPPER,
ThreadKind::kBackground); ThreadKind::kBackground);
unmapper_->PerformFreeMemoryOnQueuedChunks<FreeMode::kUncommitPooled>( unmapper_->PerformFreeMemoryOnQueuedChunks<FreeMode::kUncommitPooled>(
delegate); delegate);
if (FLAG_trace_unmapper) { if (FLAG_trace_unmapper) {
......
...@@ -182,9 +182,9 @@ void ScavengerCollector::JobTask::Run(JobDelegate* delegate) { ...@@ -182,9 +182,9 @@ void ScavengerCollector::JobTask::Run(JobDelegate* delegate) {
GCTracer::Scope::SCAVENGER_SCAVENGE_PARALLEL); GCTracer::Scope::SCAVENGER_SCAVENGE_PARALLEL);
ProcessItems(delegate, scavenger); ProcessItems(delegate, scavenger);
} else { } else {
TRACE_GC1(outer_->heap_->tracer(), TRACE_GC_EPOCH(outer_->heap_->tracer(),
GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL, GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL,
ThreadKind::kBackground); ThreadKind::kBackground);
ProcessItems(delegate, scavenger); ProcessItems(delegate, scavenger);
} }
} }
......
...@@ -86,8 +86,8 @@ class Sweeper::SweeperJob final : public JobTask { ...@@ -86,8 +86,8 @@ class Sweeper::SweeperJob final : public JobTask {
TRACE_GC(tracer_, GCTracer::Scope::MC_SWEEP); TRACE_GC(tracer_, GCTracer::Scope::MC_SWEEP);
RunImpl(delegate); RunImpl(delegate);
} else { } else {
TRACE_GC1(tracer_, GCTracer::Scope::MC_BACKGROUND_SWEEPING, TRACE_GC_EPOCH(tracer_, GCTracer::Scope::MC_BACKGROUND_SWEEPING,
ThreadKind::kBackground); ThreadKind::kBackground);
RunImpl(delegate); RunImpl(delegate);
} }
} }
...@@ -596,8 +596,8 @@ class Sweeper::IterabilityTask final : public CancelableTask { ...@@ -596,8 +596,8 @@ class Sweeper::IterabilityTask final : public CancelableTask {
private: private:
void RunInternal() final { void RunInternal() final {
TRACE_GC1(tracer_, GCTracer::Scope::MC_BACKGROUND_SWEEPING, TRACE_GC_EPOCH(tracer_, GCTracer::Scope::MC_BACKGROUND_SWEEPING,
ThreadKind::kBackground); ThreadKind::kBackground);
for (Page* page : sweeper_->iterability_list_) { for (Page* page : sweeper_->iterability_list_) {
sweeper_->MakeIterable(page); sweeper_->MakeIterable(page);
} }
......
...@@ -425,6 +425,7 @@ ...@@ -425,6 +425,7 @@
F(HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES) \ F(HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES) \
F(HEAP_PROLOGUE) \ F(HEAP_PROLOGUE) \
F(HEAP_PROLOGUE_SAFEPOINT) \ F(HEAP_PROLOGUE_SAFEPOINT) \
F(MARK_COMPACTOR) \
TOP_MC_SCOPES(F) \ TOP_MC_SCOPES(F) \
F(MC_CLEAR_DEPENDENT_CODE) \ F(MC_CLEAR_DEPENDENT_CODE) \
F(MC_CLEAR_FLUSHABLE_BYTECODE) \ F(MC_CLEAR_FLUSHABLE_BYTECODE) \
...@@ -466,6 +467,7 @@ ...@@ -466,6 +467,7 @@
F(MC_SWEEP_CODE) \ F(MC_SWEEP_CODE) \
F(MC_SWEEP_MAP) \ F(MC_SWEEP_MAP) \
F(MC_SWEEP_OLD) \ F(MC_SWEEP_OLD) \
F(MINOR_MARK_COMPACTOR) \
F(MINOR_MC) \ F(MINOR_MC) \
F(MINOR_MC_CLEAR) \ F(MINOR_MC_CLEAR) \
F(MINOR_MC_CLEAR_STRING_TABLE) \ F(MINOR_MC_CLEAR_STRING_TABLE) \
...@@ -491,6 +493,7 @@ ...@@ -491,6 +493,7 @@
F(MINOR_MC_MARKING_DEQUE) \ F(MINOR_MC_MARKING_DEQUE) \
F(MINOR_MC_RESET_LIVENESS) \ F(MINOR_MC_RESET_LIVENESS) \
F(MINOR_MC_SWEEPING) \ F(MINOR_MC_SWEEPING) \
F(SCAVENGER) \
F(SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS) \ F(SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS) \
F(SCAVENGER_FAST_PROMOTE) \ F(SCAVENGER_FAST_PROMOTE) \
F(SCAVENGER_FREE_REMEMBERED_SET) \ F(SCAVENGER_FREE_REMEMBERED_SET) \
...@@ -520,4 +523,10 @@ ...@@ -520,4 +523,10 @@
F(MINOR_MC_BACKGROUND_MARKING) \ F(MINOR_MC_BACKGROUND_MARKING) \
F(SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL) F(SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL)
#define TRACER_YOUNG_EPOCH_SCOPES(F) \
F(BACKGROUND_YOUNG_ARRAY_BUFFER_SWEEP) \
F(MINOR_MARK_COMPACTOR) \
F(SCAVENGER) \
F(SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL)
#endif // V8_INIT_HEAP_SYMBOLS_H_ #endif // V8_INIT_HEAP_SYMBOLS_H_
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment