Commit be52501d authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

[heap] Add epoch to GC tracing events

This CL adds the TRACE_GC_EPOCH macro, which adds the epoch as attribute
to the trace event. Use TRACE_GC_EPOCH for top-level events, nested
events can get the information from its parent.

V8's GC needs an epoch for young and full collections, since scavenges
also occur during incremental marking. The epoch is also process-wide,
so different isolates do not reuse the same id.

Change-Id: I8889bccce51e008374b4796445a50062bd87a45d
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2565247
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71521}
parent 5fe48817
......@@ -154,7 +154,7 @@ void ArrayBufferSweeper::RequestSweep(SweepingScope scope) {
scope == SweepingScope::kYoung
? GCTracer::Scope::BACKGROUND_YOUNG_ARRAY_BUFFER_SWEEP
: GCTracer::Scope::BACKGROUND_FULL_ARRAY_BUFFER_SWEEP;
TRACE_GC1(heap_->tracer(), scope_id, ThreadKind::kBackground);
TRACE_GC_EPOCH(heap_->tracer(), scope_id, ThreadKind::kBackground);
base::MutexGuard guard(&sweeping_mutex_);
job_->Sweep();
job_finished_.NotifyAll();
......
......@@ -398,8 +398,8 @@ ConcurrentMarking::ConcurrentMarking(Heap* heap,
void ConcurrentMarking::Run(JobDelegate* delegate, unsigned mark_compact_epoch,
bool is_forced_gc) {
TRACE_GC1(heap_->tracer(), GCTracer::Scope::MC_BACKGROUND_MARKING,
ThreadKind::kBackground);
TRACE_GC_EPOCH(heap_->tracer(), GCTracer::Scope::MC_BACKGROUND_MARKING,
ThreadKind::kBackground);
size_t kBytesUntilInterruptCheck = 64 * KB;
int kObjectsUntilInterrupCheck = 1000;
uint8_t task_id = delegate->GetTaskId() + 1;
......
......@@ -48,6 +48,14 @@ double GCTracer::MonotonicallyIncreasingTimeInMs() {
}
}
CollectionEpoch GCTracer::CurrentEpoch(Scope::ScopeId scope_id) {
if (Scope::NeedsYoungEpoch(scope_id)) {
return heap_->epoch_young();
} else {
return heap_->epoch_full();
}
}
GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind)
: tracer_(tracer), scope_(scope), thread_kind_(thread_kind) {
start_time_ = tracer_->MonotonicallyIncreasingTimeInMs();
......@@ -95,6 +103,19 @@ const char* GCTracer::Scope::Name(ScopeId id) {
return nullptr;
}
bool GCTracer::Scope::NeedsYoungEpoch(ScopeId id) {
#define CASE(scope) \
case Scope::scope: \
return true;
switch (id) {
TRACER_YOUNG_EPOCH_SCOPES(CASE)
default:
return false;
}
#undef CASE
UNREACHABLE();
}
GCTracer::Event::Event(Type type, GarbageCollectionReason gc_reason,
const char* collector_reason)
: type(type),
......
......@@ -41,6 +41,13 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects };
GCTracer::Scope gc_tracer_scope(tracer, gc_tracer_scope_id, thread_kind); \
TRACE_EVENT0(TRACE_GC_CATEGORIES, GCTracer::Scope::Name(gc_tracer_scope_id))
#define TRACE_GC_EPOCH(tracer, scope_id, thread_kind) \
GCTracer::Scope::ScopeId gc_tracer_scope_id(scope_id); \
GCTracer::Scope gc_tracer_scope(tracer, gc_tracer_scope_id, thread_kind); \
CollectionEpoch epoch = tracer->CurrentEpoch(scope_id); \
TRACE_EVENT1(TRACE_GC_CATEGORIES, GCTracer::Scope::Name(gc_tracer_scope_id), \
"epoch", epoch)
// GCTracer collects and prints ONE line after each garbage collector
// invocation IFF --trace_gc is used.
class V8_EXPORT_PRIVATE GCTracer {
......@@ -94,6 +101,7 @@ class V8_EXPORT_PRIVATE GCTracer {
Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind);
~Scope();
static const char* Name(ScopeId id);
static bool NeedsYoungEpoch(ScopeId id);
private:
GCTracer* tracer_;
......@@ -334,6 +342,8 @@ class V8_EXPORT_PRIVATE GCTracer {
WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats();
CollectionEpoch CurrentEpoch(Scope::ScopeId id);
private:
FRIEND_TEST(GCTracer, AverageSpeed);
FRIEND_TEST(GCTracerTest, AllocationThroughput);
......
......@@ -104,6 +104,14 @@
namespace v8 {
namespace internal {
namespace {
std::atomic<CollectionEpoch> global_epoch{0};
CollectionEpoch next_epoch() {
return global_epoch.fetch_add(1, std::memory_order_relaxed) + 1;
}
} // namespace
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
Isolate* Heap::GetIsolateFromWritableObject(HeapObject object) {
return reinterpret_cast<Isolate*>(
......@@ -1732,6 +1740,8 @@ int Heap::NotifyContextDisposed(bool dependant_context) {
void Heap::StartIncrementalMarking(int gc_flags,
GarbageCollectionReason gc_reason,
GCCallbackFlags gc_callback_flags) {
epoch_full_ = next_epoch();
DCHECK(incremental_marking()->IsStopped());
SafepointScope safepoint(this);
set_current_gc_flags(gc_flags);
......@@ -1946,23 +1956,43 @@ void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
tracer()->AddSurvivalRatio(survival_rate);
}
namespace {
GCTracer::Scope::ScopeId CollectorScopeId(GarbageCollector collector) {
switch (collector) {
case MARK_COMPACTOR:
return GCTracer::Scope::ScopeId::MARK_COMPACTOR;
case MINOR_MARK_COMPACTOR:
return GCTracer::Scope::ScopeId::MINOR_MARK_COMPACTOR;
case SCAVENGER:
return GCTracer::Scope::ScopeId::SCAVENGER;
}
UNREACHABLE();
}
} // namespace
size_t Heap::PerformGarbageCollection(
GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
DisallowJavascriptExecution no_js(isolate());
base::Optional<SafepointScope> optional_safepoint_scope;
UpdateCurrentEpoch(collector);
// Stop time-to-collection timer before safepoint - we do not want to measure
// time for safepointing.
collection_barrier_->StopTimeToCollectionTimer();
TRACE_GC_EPOCH(tracer(), CollectorScopeId(collector), ThreadKind::kMain);
if (FLAG_local_heaps) {
optional_safepoint_scope.emplace(this);
}
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Verify();
}
#endif
tracer()->StartInSafepoint();
GarbageCollectionPrologueInSafepoint();
......@@ -2042,6 +2072,14 @@ size_t Heap::PerformGarbageCollection(
return freed_global_handles;
}
void Heap::UpdateCurrentEpoch(GarbageCollector collector) {
if (IsYoungGenerationCollector(collector)) {
epoch_young_ = next_epoch();
} else if (incremental_marking()->IsStopped()) {
epoch_full_ = next_epoch();
}
}
void Heap::RecomputeLimits(GarbageCollector collector) {
if (!((collector == MARK_COMPACTOR) ||
(HasLowYoungGenerationAllocationRate() &&
......@@ -3416,8 +3454,9 @@ void Heap::FinalizeIncrementalMarkingIncrementally(
HistogramTimerScope incremental_marking_scope(
isolate()->counters()->gc_incremental_marking_finalize());
TRACE_EVENT0("v8", "V8.GCIncrementalMarkingFinalize");
TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE);
TRACE_EVENT1("v8", "V8.GCIncrementalMarkingFinalize", "epoch", epoch_full());
TRACE_GC_EPOCH(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE,
ThreadKind::kMain);
SafepointScope safepoint(this);
InvokeIncrementalMarkingPrologueCallbacks();
......
......@@ -249,6 +249,8 @@ using EphemeronRememberedSet =
std::unordered_map<EphemeronHashTable, std::unordered_set<int>,
Object::Hasher>;
using CollectionEpoch = uint32_t;
class Heap {
public:
// Stores ephemeron entries where the EphemeronHashTable is in old-space,
......@@ -511,6 +513,8 @@ class Heap {
void NotifyOldGenerationExpansion(AllocationSpace space, MemoryChunk* chunk);
void UpdateCurrentEpoch(GarbageCollector collector);
inline Address* NewSpaceAllocationTopAddress();
inline Address* NewSpaceAllocationLimitAddress();
inline Address* OldSpaceAllocationTopAddress();
......@@ -1558,6 +1562,9 @@ class Heap {
static Isolate* GetIsolateFromWritableObject(HeapObject object);
CollectionEpoch epoch_young() { return epoch_young_; }
CollectionEpoch epoch_full() { return epoch_full_; }
private:
using ExternalStringTableUpdaterCallback = String (*)(Heap* heap,
FullObjectSlot pointer);
......@@ -2331,6 +2338,11 @@ class Heap {
std::unique_ptr<third_party_heap::Heap> tp_heap_;
// We need two epochs, since there can be scavenges during incremental
// marking.
CollectionEpoch epoch_young_ = 0;
CollectionEpoch epoch_full_ = 0;
// Classes in "heap" can be friends.
friend class AlwaysAllocateScope;
friend class ArrayBufferCollector;
......
......@@ -178,8 +178,10 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
static_cast<int>(gc_reason));
HistogramTimerScope incremental_marking_scope(
counters->gc_incremental_marking_start());
TRACE_EVENT0("v8", "V8.GCIncrementalMarkingStart");
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START);
TRACE_EVENT1("v8", "V8.GCIncrementalMarkingStart", "epoch",
heap_->epoch_full());
TRACE_GC_EPOCH(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START,
ThreadKind::kMain);
heap_->tracer()->NotifyIncrementalMarkingStart();
start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
......@@ -782,8 +784,9 @@ StepResult IncrementalMarking::AdvanceWithDeadline(
StepOrigin step_origin) {
HistogramTimerScope incremental_marking_scope(
heap_->isolate()->counters()->gc_incremental_marking());
TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
TRACE_EVENT1("v8", "V8.GCIncrementalMarking", "epoch", heap_->epoch_full());
TRACE_GC_EPOCH(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL,
ThreadKind::kMain);
DCHECK(!IsStopped());
ScheduleBytesToMarkBasedOnTime(heap()->MonotonicallyIncreasingTimeInMs());
......
......@@ -3119,8 +3119,8 @@ class PageEvacuationJob : public v8::JobTask {
TRACE_GC(tracer_, evacuator->GetTracingScope());
ProcessItems(delegate, evacuator);
} else {
TRACE_GC1(tracer_, evacuator->GetBackgroundTracingScope(),
ThreadKind::kBackground);
TRACE_GC_EPOCH(tracer_, evacuator->GetBackgroundTracingScope(),
ThreadKind::kBackground);
ProcessItems(delegate, evacuator);
}
}
......@@ -3493,7 +3493,7 @@ class PointersUpdatingJob : public v8::JobTask {
TRACE_GC(tracer_, scope_);
UpdatePointers(delegate);
} else {
TRACE_GC1(tracer_, background_scope_, ThreadKind::kBackground);
TRACE_GC_EPOCH(tracer_, background_scope_, ThreadKind::kBackground);
UpdatePointers(delegate);
}
}
......@@ -4866,9 +4866,9 @@ class YoungGenerationMarkingJob : public v8::JobTask {
GCTracer::Scope::MINOR_MC_MARK_PARALLEL);
ProcessItems(delegate);
} else {
TRACE_GC1(collector_->heap()->tracer(),
GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING,
ThreadKind::kBackground);
TRACE_GC_EPOCH(collector_->heap()->tracer(),
GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING,
ThreadKind::kBackground);
ProcessItems(delegate);
}
}
......
......@@ -160,8 +160,8 @@ class MemoryAllocator::Unmapper::UnmapFreeMemoryJob : public JobTask {
: unmapper_(unmapper), tracer_(isolate->heap()->tracer()) {}
void Run(JobDelegate* delegate) override {
TRACE_GC1(tracer_, GCTracer::Scope::BACKGROUND_UNMAPPER,
ThreadKind::kBackground);
TRACE_GC_EPOCH(tracer_, GCTracer::Scope::BACKGROUND_UNMAPPER,
ThreadKind::kBackground);
unmapper_->PerformFreeMemoryOnQueuedChunks<FreeMode::kUncommitPooled>(
delegate);
if (FLAG_trace_unmapper) {
......
......@@ -182,9 +182,9 @@ void ScavengerCollector::JobTask::Run(JobDelegate* delegate) {
GCTracer::Scope::SCAVENGER_SCAVENGE_PARALLEL);
ProcessItems(delegate, scavenger);
} else {
TRACE_GC1(outer_->heap_->tracer(),
GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL,
ThreadKind::kBackground);
TRACE_GC_EPOCH(outer_->heap_->tracer(),
GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL,
ThreadKind::kBackground);
ProcessItems(delegate, scavenger);
}
}
......
......@@ -86,8 +86,8 @@ class Sweeper::SweeperJob final : public JobTask {
TRACE_GC(tracer_, GCTracer::Scope::MC_SWEEP);
RunImpl(delegate);
} else {
TRACE_GC1(tracer_, GCTracer::Scope::MC_BACKGROUND_SWEEPING,
ThreadKind::kBackground);
TRACE_GC_EPOCH(tracer_, GCTracer::Scope::MC_BACKGROUND_SWEEPING,
ThreadKind::kBackground);
RunImpl(delegate);
}
}
......@@ -596,8 +596,8 @@ class Sweeper::IterabilityTask final : public CancelableTask {
private:
void RunInternal() final {
TRACE_GC1(tracer_, GCTracer::Scope::MC_BACKGROUND_SWEEPING,
ThreadKind::kBackground);
TRACE_GC_EPOCH(tracer_, GCTracer::Scope::MC_BACKGROUND_SWEEPING,
ThreadKind::kBackground);
for (Page* page : sweeper_->iterability_list_) {
sweeper_->MakeIterable(page);
}
......
......@@ -425,6 +425,7 @@
F(HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES) \
F(HEAP_PROLOGUE) \
F(HEAP_PROLOGUE_SAFEPOINT) \
F(MARK_COMPACTOR) \
TOP_MC_SCOPES(F) \
F(MC_CLEAR_DEPENDENT_CODE) \
F(MC_CLEAR_FLUSHABLE_BYTECODE) \
......@@ -466,6 +467,7 @@
F(MC_SWEEP_CODE) \
F(MC_SWEEP_MAP) \
F(MC_SWEEP_OLD) \
F(MINOR_MARK_COMPACTOR) \
F(MINOR_MC) \
F(MINOR_MC_CLEAR) \
F(MINOR_MC_CLEAR_STRING_TABLE) \
......@@ -491,6 +493,7 @@
F(MINOR_MC_MARKING_DEQUE) \
F(MINOR_MC_RESET_LIVENESS) \
F(MINOR_MC_SWEEPING) \
F(SCAVENGER) \
F(SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS) \
F(SCAVENGER_FAST_PROMOTE) \
F(SCAVENGER_FREE_REMEMBERED_SET) \
......@@ -520,4 +523,10 @@
F(MINOR_MC_BACKGROUND_MARKING) \
F(SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL)
#define TRACER_YOUNG_EPOCH_SCOPES(F) \
F(BACKGROUND_YOUNG_ARRAY_BUFFER_SWEEP) \
F(MINOR_MARK_COMPACTOR) \
F(SCAVENGER) \
F(SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL)
#endif // V8_INIT_HEAP_SYMBOLS_H_
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment