Commit 73a1c635 authored by Nikolaos Papaspyrou's avatar Nikolaos Papaspyrou Committed by V8 LUCI CQ

heap: Fix the tracing of GC cycles

Conceptually, a full GC cycle completes when the sweeping phase is
finished. As sweeping is performed concurrently, this happens after
Heap::CollectGarbage has returned and, at the latest, before the next
full GC cycle begins. However, an arbitrary number of young GC cycles
may happen in the meantime. Tracing information for the sweeping phase
must be added to the corresponding full GC cycle event. Until now, this
was not done correctly: this information was added to the GCTracer's
current event and could thus be attributed to a subsequent young or full
GC cycle.

This CL introduces methods GCTracer::(Start|Stop)Cycle to delimit a
cycle (still allowing for full GC cycles to be interrupted by young GC
cycles). These methods are different from (Start|Stop)ObservablePause,
which delimit the observable pause of each GC. The events of "pending"
full GC cycles are kept until they are properly amended and reported,
when the sweeping phase is finished.

This is a reland of 4ad20bff
which was reviewed here: https://crrev.com/3404733

Bug: v8:12503
Bug: chromium:1154636
Change-Id: Icc315b53cff1f3b19b8efe49db34340a5608bcd2
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3432211Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Nikolaos Papaspyrou <nikolaos@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78911}
parent e2bdbffb
This diff is collapsed.
......@@ -51,6 +51,8 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects };
GCTracer::Scope::Name(GCTracer::Scope::ScopeId(scope_id)), \
"epoch", tracer->CurrentEpoch(scope_id))
using CollectionEpoch = uint32_t;
// GCTracer collects and prints ONE line after each garbage collector
// invocation IFF --trace_gc is used.
class V8_EXPORT_PRIVATE GCTracer {
......@@ -137,6 +139,14 @@ class V8_EXPORT_PRIVATE GCTracer {
START = 4
};
#ifdef DEBUG
// Returns true if the event corresponds to a young generation GC.
static constexpr bool IsYoungGenerationEvent(Type type) {
DCHECK_NE(START, type);
return type == SCAVENGER || type == MINOR_MARK_COMPACTOR;
}
#endif
Event(Type type, GarbageCollectionReason gc_reason,
const char* collector_reason);
......@@ -211,13 +221,25 @@ class V8_EXPORT_PRIVATE GCTracer {
explicit GCTracer(Heap* heap);
// Start collecting data.
void Start(GarbageCollector collector, GarbageCollectionReason gc_reason,
const char* collector_reason);
void StartInSafepoint();
CollectionEpoch CurrentEpoch(Scope::ScopeId id) const {
return Scope::NeedsYoungEpoch(id) ? epoch_young_ : epoch_full_;
}
// Start and stop a cycle's observable (atomic) pause.
void StartObservablePause(GarbageCollector collector,
GarbageCollectionReason gc_reason,
const char* collector_reason);
void StopObservablePause(GarbageCollector collector);
enum class MarkingType { kAtomic, kIncremental };
// Start and stop a GC cycle (collecting data and reporting results).
void StartCycle(GarbageCollector collector, GarbageCollectionReason gc_reason,
MarkingType marking);
void StopCycle(GarbageCollector collector);
void StopCycleIfPending();
// Stop collecting data and print results.
void Stop(GarbageCollector collector);
void StartInSafepoint();
void StopInSafepoint();
void NotifySweepingCompleted();
......@@ -227,6 +249,19 @@ class V8_EXPORT_PRIVATE GCTracer {
void NotifyYoungGenerationHandling(
YoungGenerationHandling young_generation_handling);
#ifdef DEBUG
// Checks if the current event is consistent with a collector.
bool IsConsistentWithCollector(GarbageCollector collector) const {
return (collector == GarbageCollector::SCAVENGER &&
current_.type == Event::SCAVENGER) ||
(collector == GarbageCollector::MINOR_MARK_COMPACTOR &&
current_.type == Event::MINOR_MARK_COMPACTOR) ||
(collector == GarbageCollector::MARK_COMPACTOR &&
(current_.type == Event::MARK_COMPACTOR ||
current_.type == Event::INCREMENTAL_MARK_COMPACTOR));
}
#endif
// Sample and accumulate bytes allocated since the last GC.
void SampleAllocation(double current_ms, size_t new_space_counter_bytes,
size_t old_generation_counter_bytes,
......@@ -353,8 +388,6 @@ class V8_EXPORT_PRIVATE GCTracer {
WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats();
#endif // defined(V8_RUNTIME_CALL_STATS)
CollectionEpoch CurrentEpoch(Scope::ScopeId id);
private:
FRIEND_TEST(GCTracer, AverageSpeed);
FRIEND_TEST(GCTracerTest, AllocationThroughput);
......@@ -428,6 +461,9 @@ class V8_EXPORT_PRIVATE GCTracer {
void ReportIncrementalMarkingStepToRecorder();
void ReportYoungCycleToRecorder();
void NewCurrentEvent(Event::Type type, GarbageCollectionReason gc_reason,
const char* collector_reason);
// Pointer to the heap that owns this tracer.
Heap* heap_;
......@@ -438,6 +474,11 @@ class V8_EXPORT_PRIVATE GCTracer {
// Previous tracer event.
Event previous_;
// We need two epochs, since there can be scavenges during incremental
// marking.
CollectionEpoch epoch_young_ = 0;
CollectionEpoch epoch_full_ = 0;
// Size of incremental marking steps (in bytes) accumulated since the end of
// the last mark compact GC.
size_t incremental_marking_bytes_;
......@@ -495,6 +536,15 @@ class V8_EXPORT_PRIVATE GCTracer {
bool metrics_report_pending_ = false;
// An ongoing GC cycle is considered pending if it has been started with
// |StartCycle()| but has not yet been finished with |StopCycle()|.
bool current_pending_ = false;
// When a full GC cycle is interrupted by a young generation GC cycle, the
// |previous_| event is used as temporary storage for the |current_| event
// that corresponded to the full GC cycle, and this field is set to true.
bool young_gc_while_full_gc_ = false;
v8::metrics::GarbageCollectionFullMainThreadBatchedIncrementalMark
incremental_mark_batched_events_;
......
......@@ -115,14 +115,6 @@
namespace v8 {
namespace internal {
namespace {
std::atomic<CollectionEpoch> global_epoch{0};
CollectionEpoch next_epoch() {
return global_epoch.fetch_add(1, std::memory_order_relaxed) + 1;
}
} // namespace
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
Isolate* Heap::GetIsolateFromWritableObject(HeapObject object) {
return reinterpret_cast<Isolate*>(
......@@ -1787,7 +1779,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
}
{
tracer()->Start(collector, gc_reason, collector_reason);
tracer()->StartObservablePause(collector, gc_reason, collector_reason);
DCHECK(AllowGarbageCollection::IsAllowed());
DisallowGarbageCollection no_gc_during_gc;
GarbageCollectionPrologue();
......@@ -1812,8 +1804,8 @@ bool Heap::CollectGarbage(AllocationSpace space,
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
tp_heap_->CollectGarbage();
} else {
freed_global_handles +=
PerformGarbageCollection(collector, gc_callback_flags);
freed_global_handles += PerformGarbageCollection(
collector, gc_reason, collector_reason, gc_callback_flags);
}
// Clear flags describing the current GC now that the current GC is
// complete. Do this before GarbageCollectionEpilogue() since that could
......@@ -1859,7 +1851,10 @@ bool Heap::CollectGarbage(AllocationSpace space,
}
}
tracer()->Stop(collector);
tracer()->StopObservablePause(collector);
if (IsYoungGenerationCollector(collector)) {
tracer()->StopCycle(collector);
}
}
// Part 3: Invoke all callbacks which should happen after the actual garbage
......@@ -1955,9 +1950,9 @@ void Heap::StartIncrementalMarking(int gc_flags,
VerifyCountersAfterSweeping();
#endif
// Now that sweeping is completed, we can update the current epoch for the new
// full collection.
UpdateEpochFull();
// Now that sweeping is completed, we can start the next full GC cycle.
tracer()->StartCycle(GarbageCollector::MARK_COMPACTOR, gc_reason,
GCTracer::MarkingType::kIncremental);
set_current_gc_flags(gc_flags);
current_gc_callback_flags_ = gc_callback_flags;
......@@ -1971,6 +1966,7 @@ void Heap::CompleteSweepingFull() {
if (cpp_heap()) {
CppHeap::From(cpp_heap())->FinishSweepingIfRunning();
}
tracer()->StopCycleIfPending();
}
void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
......@@ -2166,20 +2162,25 @@ GCTracer::Scope::ScopeId CollectorScopeId(GarbageCollector collector) {
} // namespace
size_t Heap::PerformGarbageCollection(
GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
GarbageCollector collector, GarbageCollectionReason gc_reason,
const char* collector_reason, const v8::GCCallbackFlags gc_callback_flags) {
DisallowJavascriptExecution no_js(isolate());
if (IsYoungGenerationCollector(collector)) {
CompleteSweepingYoung(collector);
tracer()->StartCycle(collector, gc_reason, GCTracer::MarkingType::kAtomic);
} else {
DCHECK_EQ(GarbageCollector::MARK_COMPACTOR, collector);
CompleteSweepingFull();
// If incremental marking has been activated, the full GC cycle has already
// started, so don't start a new one.
if (!incremental_marking_->WasActivated()) {
tracer()->StartCycle(collector, gc_reason,
GCTracer::MarkingType::kAtomic);
}
}
// The last GC cycle is done after completing sweeping. Start the next GC
// cycle.
UpdateCurrentEpoch(collector);
DCHECK(tracer()->IsConsistentWithCollector(collector));
TRACE_GC_EPOCH(tracer(), CollectorScopeId(collector), ThreadKind::kMain);
base::Optional<SafepointScope> safepoint_scope;
......@@ -2303,10 +2304,8 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
v8::Locker locker(reinterpret_cast<v8::Isolate*>(isolate()));
v8::Isolate::Scope isolate_scope(reinterpret_cast<v8::Isolate*>(isolate()));
const char* collector_reason = nullptr;
GarbageCollector collector = GarbageCollector::MARK_COMPACTOR;
tracer()->Start(collector, gc_reason, collector_reason);
tracer()->StartObservablePause(GarbageCollector::MARK_COMPACTOR, gc_reason,
nullptr);
DCHECK_NOT_NULL(isolate()->global_safepoint());
......@@ -2318,9 +2317,10 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
client->heap()->MakeHeapIterable();
});
PerformGarbageCollection(GarbageCollector::MARK_COMPACTOR);
PerformGarbageCollection(GarbageCollector::MARK_COMPACTOR, gc_reason,
nullptr);
tracer()->Stop(collector);
tracer()->StopObservablePause(GarbageCollector::MARK_COMPACTOR);
}
void Heap::CompleteSweepingYoung(GarbageCollector collector) {
......@@ -2357,16 +2357,6 @@ void Heap::EnsureSweepingCompleted(HeapObject object) {
mark_compact_collector()->EnsurePageIsSwept(page);
}
void Heap::UpdateCurrentEpoch(GarbageCollector collector) {
if (IsYoungGenerationCollector(collector)) {
epoch_young_ = next_epoch();
} else if (incremental_marking()->IsStopped()) {
epoch_full_ = next_epoch();
}
}
void Heap::UpdateEpochFull() { epoch_full_ = next_epoch(); }
void Heap::RecomputeLimits(GarbageCollector collector) {
if (!((collector == GarbageCollector::MARK_COMPACTOR) ||
(HasLowYoungGenerationAllocationRate() &&
......@@ -3808,7 +3798,9 @@ void Heap::FinalizeIncrementalMarkingIncrementally(
NestedTimedHistogramScope incremental_marking_scope(
isolate()->counters()->gc_incremental_marking_finalize());
TRACE_EVENT1("v8", "V8.GCIncrementalMarkingFinalize", "epoch", epoch_full());
TRACE_EVENT1(
"v8", "V8.GCIncrementalMarkingFinalize", "epoch",
tracer()->CurrentEpoch(GCTracer::Scope::MC_INCREMENTAL_FINALIZE));
TRACE_GC_EPOCH(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE,
ThreadKind::kMain);
......
......@@ -270,8 +270,6 @@ using EphemeronRememberedSet =
std::unordered_map<EphemeronHashTable, std::unordered_set<int>,
Object::Hasher>;
using CollectionEpoch = uint32_t;
class Heap {
public:
// Stores ephemeron entries where the EphemeronHashTable is in old-space,
......@@ -550,8 +548,6 @@ class Heap {
void NotifyOldGenerationExpansion(AllocationSpace space, MemoryChunk* chunk);
void UpdateCurrentEpoch(GarbageCollector collector);
inline Address* NewSpaceAllocationTopAddress();
inline Address* NewSpaceAllocationLimitAddress();
inline Address* OldSpaceAllocationTopAddress();
......@@ -1677,11 +1673,6 @@ class Heap {
static Isolate* GetIsolateFromWritableObject(HeapObject object);
CollectionEpoch epoch_young() { return epoch_young_; }
CollectionEpoch epoch_full() { return epoch_full_; }
void UpdateEpochFull();
// Ensure that we have swept all spaces in such a way that we can iterate
// over all objects.
void MakeHeapIterable();
......@@ -1821,7 +1812,8 @@ class Heap {
// Performs garbage collection in a safepoint.
// Returns the number of freed global handles.
size_t PerformGarbageCollection(
GarbageCollector collector,
GarbageCollector collector, GarbageCollectionReason gc_reason,
const char* collector_reason,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
// Performs garbage collection in the shared heap.
......@@ -2521,11 +2513,6 @@ class Heap {
std::unique_ptr<third_party_heap::Heap> tp_heap_;
// We need two epochs, since there can be scavenges during incremental
// marking.
CollectionEpoch epoch_young_ = 0;
CollectionEpoch epoch_full_ = 0;
// Classes in "heap" can be friends.
friend class AlwaysAllocateScope;
friend class ArrayBufferCollector;
......
......@@ -191,8 +191,9 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
static_cast<int>(gc_reason));
NestedTimedHistogramScope incremental_marking_scope(
counters->gc_incremental_marking_start());
TRACE_EVENT1("v8", "V8.GCIncrementalMarkingStart", "epoch",
heap_->epoch_full());
TRACE_EVENT1(
"v8", "V8.GCIncrementalMarkingStart", "epoch",
heap_->tracer()->CurrentEpoch(GCTracer::Scope::MC_INCREMENTAL_START));
TRACE_GC_EPOCH(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START,
ThreadKind::kMain);
heap_->tracer()->NotifyIncrementalMarkingStart();
......@@ -791,7 +792,8 @@ StepResult IncrementalMarking::AdvanceWithDeadline(
StepOrigin step_origin) {
NestedTimedHistogramScope incremental_marking_scope(
heap_->isolate()->counters()->gc_incremental_marking());
TRACE_EVENT1("v8", "V8.GCIncrementalMarking", "epoch", heap_->epoch_full());
TRACE_EVENT1("v8", "V8.GCIncrementalMarking", "epoch",
heap_->tracer()->CurrentEpoch(GCTracer::Scope::MC_INCREMENTAL));
TRACE_GC_EPOCH(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL,
ThreadKind::kMain);
DCHECK(!IsStopped());
......
......@@ -7004,6 +7004,9 @@ TEST(Regress978156) {
i::IncrementalMarking* marking = heap->incremental_marking();
if (marking->IsStopped()) {
SafepointScope scope(heap);
heap->tracer()->StartCycle(GarbageCollector::MARK_COMPACTOR,
GarbageCollectionReason::kTesting,
GCTracer::MarkingType::kIncremental);
marking->Start(i::GarbageCollectionReason::kTesting);
}
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
......
......@@ -16,11 +16,11 @@
#include <utility>
#include "src/init/v8.h"
#include "src/handles/global-handles.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/spaces.h"
#include "src/init/v8.h"
#include "src/objects/objects-inl.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/heap-utils.h"
......@@ -129,6 +129,9 @@ UNINITIALIZED_TEST(IncrementalMarkingUsingTasks) {
marking->Stop();
{
SafepointScope scope(heap);
heap->tracer()->StartCycle(GarbageCollector::MARK_COMPACTOR,
GarbageCollectionReason::kTesting,
GCTracer::MarkingType::kIncremental);
marking->Start(i::GarbageCollectionReason::kTesting);
}
CHECK(platform.PendingTask());
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment