Commit 10e811c4 authored by Leszek Swirski's avatar Leszek Swirski Committed by V8 LUCI CQ

Revert "heap: Fix the tracing of GC cycles"

This reverts commit 4ad20bff.

Reason for revert: New test seems to be failing on TSAN/incremental marking stress (https://ci.chromium.org/ui/p/v8/builders/ci/V8%20Linux64%20TSAN%20-%20stress-incremental-marking/6346/overview)

Original change's description:
> heap: Fix the tracing of GC cycles
>
> Conceptually, a full GC cycle completes when the sweeping phase is
> finished. As sweeping is performed concurrently, this happens after
> Heap::CollectGarbage has returned and, at the latest, before the next
> full GC cycle begins. However, an arbitrary number of young GC cycles
> may happen in the meantime. Tracing information for the sweeping phase
> must be added to the corresponding full GC cycle event. Until now, this
> was not done correctly: this information was added to the GCTracer's
> current event and could thus be attributed to a subsequent young or full
> GC cycle.
>
> This CL introduces methods GCTracer::(Start|Stop)Cycle to delimit a
> cycle (still allowing for full GC cycles to be interrupted by young GC
> cycles). These methods are different from (Start|Stop)ObservablePause,
> which delimit the observable pause of each GC. The events of "pending"
> full GC cycles are kept until they are properly amended and reported,
> when the sweeping phase is finished.
>
> Bug: chromium:1154636
> Change-Id: I2fbc65d4807c78656d4abc8c451043f6f86211b1
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3404733
> Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
> Reviewed-by: Omer Katz <omerkatz@chromium.org>
> Commit-Queue: Nikolaos Papaspyrou <nikolaos@chromium.org>
> Cr-Commit-Position: refs/heads/main@{#78905}

Bug: chromium:1154636
Change-Id: Id6688cfe982f9d8159c66d715b7079782a371bed
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3431489
Auto-Submit: Leszek Swirski <leszeks@chromium.org>
Bot-Commit: Rubber Stamper <rubber-stamper@appspot.gserviceaccount.com>
Owners-Override: Leszek Swirski <leszeks@chromium.org>
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78908}
parent 7f47473a
This diff is collapsed.
......@@ -51,8 +51,6 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects };
GCTracer::Scope::Name(GCTracer::Scope::ScopeId(scope_id)), \
"epoch", tracer->CurrentEpoch(scope_id))
using CollectionEpoch = uint32_t;
// GCTracer collects and prints ONE line after each garbage collector
// invocation IFF --trace_gc is used.
class V8_EXPORT_PRIVATE GCTracer {
......@@ -139,14 +137,6 @@ class V8_EXPORT_PRIVATE GCTracer {
START = 4
};
#ifdef DEBUG
// Returns true if the event corresponds to a young generation GC.
static constexpr bool IsYoungGenerationEvent(Type type) {
DCHECK_NE(START, type);
return type == SCAVENGER || type == MINOR_MARK_COMPACTOR;
}
#endif
Event(Type type, GarbageCollectionReason gc_reason,
const char* collector_reason);
......@@ -221,25 +211,13 @@ class V8_EXPORT_PRIVATE GCTracer {
explicit GCTracer(Heap* heap);
CollectionEpoch CurrentEpoch(Scope::ScopeId id) const {
return Scope::NeedsYoungEpoch(id) ? epoch_young_ : epoch_full_;
}
// Start and stop a cycle's observable (atomic) pause.
void StartObservablePause(GarbageCollector collector,
GarbageCollectionReason gc_reason,
const char* collector_reason);
void StopObservablePause(GarbageCollector collector);
enum class MarkingType { kAtomic, kIncremental };
// Start and stop a GC cycle (collecting data and reporting results).
void StartCycle(GarbageCollector collector, GarbageCollectionReason gc_reason,
MarkingType marking);
void StopCycle(GarbageCollector collector);
void StopCycleIfPending();
// Start collecting data.
void Start(GarbageCollector collector, GarbageCollectionReason gc_reason,
const char* collector_reason);
void StartInSafepoint();
// Stop collecting data and print results.
void Stop(GarbageCollector collector);
void StopInSafepoint();
void NotifySweepingCompleted();
......@@ -249,19 +227,6 @@ class V8_EXPORT_PRIVATE GCTracer {
void NotifyYoungGenerationHandling(
YoungGenerationHandling young_generation_handling);
#ifdef DEBUG
// Checks if the current event is consistent with a collector.
bool IsConsistentWithCollector(GarbageCollector collector) const {
return (collector == GarbageCollector::SCAVENGER &&
current_.type == Event::SCAVENGER) ||
(collector == GarbageCollector::MINOR_MARK_COMPACTOR &&
current_.type == Event::MINOR_MARK_COMPACTOR) ||
(collector == GarbageCollector::MARK_COMPACTOR &&
(current_.type == Event::MARK_COMPACTOR ||
current_.type == Event::INCREMENTAL_MARK_COMPACTOR));
}
#endif
// Sample and accumulate bytes allocated since the last GC.
void SampleAllocation(double current_ms, size_t new_space_counter_bytes,
size_t old_generation_counter_bytes,
......@@ -388,6 +353,8 @@ class V8_EXPORT_PRIVATE GCTracer {
WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats();
#endif // defined(V8_RUNTIME_CALL_STATS)
CollectionEpoch CurrentEpoch(Scope::ScopeId id);
private:
FRIEND_TEST(GCTracer, AverageSpeed);
FRIEND_TEST(GCTracerTest, AllocationThroughput);
......@@ -461,9 +428,6 @@ class V8_EXPORT_PRIVATE GCTracer {
void ReportIncrementalMarkingStepToRecorder();
void ReportYoungCycleToRecorder();
void NewCurrentEvent(Event::Type type, GarbageCollectionReason gc_reason,
const char* collector_reason);
// Pointer to the heap that owns this tracer.
Heap* heap_;
......@@ -474,11 +438,6 @@ class V8_EXPORT_PRIVATE GCTracer {
// Previous tracer event.
Event previous_;
// We need two epochs, since there can be scavenges during incremental
// marking.
CollectionEpoch epoch_young_ = 0;
CollectionEpoch epoch_full_ = 0;
// Size of incremental marking steps (in bytes) accumulated since the end of
// the last mark compact GC.
size_t incremental_marking_bytes_;
......@@ -536,15 +495,6 @@ class V8_EXPORT_PRIVATE GCTracer {
bool metrics_report_pending_ = false;
// An ongoing GC cycle is considered pending if it has been started with
// |StartCycle()| but has not yet been finished with |StopCycle()|.
bool current_pending_ = false;
// When a full GC cycle is interrupted by a young generation GC cycle, the
// |previous_| event is used as temporary storage for the |current_| event
// that corresponded to the full GC cycle, and this field is set to true.
bool young_gc_while_full_gc_ = false;
v8::metrics::GarbageCollectionFullMainThreadBatchedIncrementalMark
incremental_mark_batched_events_;
......
......@@ -115,6 +115,14 @@
namespace v8 {
namespace internal {
namespace {
std::atomic<CollectionEpoch> global_epoch{0};
CollectionEpoch next_epoch() {
return global_epoch.fetch_add(1, std::memory_order_relaxed) + 1;
}
} // namespace
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
Isolate* Heap::GetIsolateFromWritableObject(HeapObject object) {
return reinterpret_cast<Isolate*>(
......@@ -1779,7 +1787,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
}
{
tracer()->StartObservablePause(collector, gc_reason, collector_reason);
tracer()->Start(collector, gc_reason, collector_reason);
DCHECK(AllowGarbageCollection::IsAllowed());
DisallowGarbageCollection no_gc_during_gc;
GarbageCollectionPrologue();
......@@ -1804,8 +1812,8 @@ bool Heap::CollectGarbage(AllocationSpace space,
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
tp_heap_->CollectGarbage();
} else {
freed_global_handles += PerformGarbageCollection(
collector, gc_reason, collector_reason, gc_callback_flags);
freed_global_handles +=
PerformGarbageCollection(collector, gc_callback_flags);
}
// Clear flags describing the current GC now that the current GC is
// complete. Do this before GarbageCollectionEpilogue() since that could
......@@ -1851,10 +1859,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
}
}
tracer()->StopObservablePause(collector);
if (IsYoungGenerationCollector(collector)) {
tracer()->StopCycle(collector);
}
tracer()->Stop(collector);
}
// Part 3: Invoke all callbacks which should happen after the actual garbage
......@@ -1950,9 +1955,9 @@ void Heap::StartIncrementalMarking(int gc_flags,
VerifyCountersAfterSweeping();
#endif
// Now that sweeping is completed, we can start the next full GC cycle.
tracer()->StartCycle(GarbageCollector::MARK_COMPACTOR, gc_reason,
GCTracer::MarkingType::kIncremental);
// Now that sweeping is completed, we can update the current epoch for the new
// full collection.
UpdateEpochFull();
set_current_gc_flags(gc_flags);
current_gc_callback_flags_ = gc_callback_flags;
......@@ -1966,7 +1971,6 @@ void Heap::CompleteSweepingFull() {
if (cpp_heap()) {
CppHeap::From(cpp_heap())->FinishSweepingIfRunning();
}
tracer()->StopCycleIfPending();
}
void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
......@@ -2162,25 +2166,20 @@ GCTracer::Scope::ScopeId CollectorScopeId(GarbageCollector collector) {
} // namespace
size_t Heap::PerformGarbageCollection(
GarbageCollector collector, GarbageCollectionReason gc_reason,
const char* collector_reason, const v8::GCCallbackFlags gc_callback_flags) {
GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
DisallowJavascriptExecution no_js(isolate());
if (IsYoungGenerationCollector(collector)) {
CompleteSweepingYoung(collector);
tracer()->StartCycle(collector, gc_reason, GCTracer::MarkingType::kAtomic);
} else {
DCHECK_EQ(GarbageCollector::MARK_COMPACTOR, collector);
CompleteSweepingFull();
// If incremental marking has been activated, the full GC cycle has already
// started, so don't start a new one.
if (!incremental_marking_->WasActivated()) {
tracer()->StartCycle(collector, gc_reason,
GCTracer::MarkingType::kAtomic);
}
}
DCHECK(tracer()->IsConsistentWithCollector(collector));
// The last GC cycle is done after completing sweeping. Start the next GC
// cycle.
UpdateCurrentEpoch(collector);
TRACE_GC_EPOCH(tracer(), CollectorScopeId(collector), ThreadKind::kMain);
base::Optional<SafepointScope> safepoint_scope;
......@@ -2304,8 +2303,10 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
v8::Locker locker(reinterpret_cast<v8::Isolate*>(isolate()));
v8::Isolate::Scope isolate_scope(reinterpret_cast<v8::Isolate*>(isolate()));
tracer()->StartObservablePause(GarbageCollector::MARK_COMPACTOR, gc_reason,
nullptr);
const char* collector_reason = nullptr;
GarbageCollector collector = GarbageCollector::MARK_COMPACTOR;
tracer()->Start(collector, gc_reason, collector_reason);
DCHECK_NOT_NULL(isolate()->global_safepoint());
......@@ -2317,10 +2318,9 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
client->heap()->MakeHeapIterable();
});
PerformGarbageCollection(GarbageCollector::MARK_COMPACTOR, gc_reason,
nullptr);
PerformGarbageCollection(GarbageCollector::MARK_COMPACTOR);
tracer()->StopObservablePause(GarbageCollector::MARK_COMPACTOR);
tracer()->Stop(collector);
}
void Heap::CompleteSweepingYoung(GarbageCollector collector) {
......@@ -2357,6 +2357,16 @@ void Heap::EnsureSweepingCompleted(HeapObject object) {
mark_compact_collector()->EnsurePageIsSwept(page);
}
void Heap::UpdateCurrentEpoch(GarbageCollector collector) {
if (IsYoungGenerationCollector(collector)) {
epoch_young_ = next_epoch();
} else if (incremental_marking()->IsStopped()) {
epoch_full_ = next_epoch();
}
}
void Heap::UpdateEpochFull() { epoch_full_ = next_epoch(); }
void Heap::RecomputeLimits(GarbageCollector collector) {
if (!((collector == GarbageCollector::MARK_COMPACTOR) ||
(HasLowYoungGenerationAllocationRate() &&
......@@ -3798,9 +3808,7 @@ void Heap::FinalizeIncrementalMarkingIncrementally(
NestedTimedHistogramScope incremental_marking_scope(
isolate()->counters()->gc_incremental_marking_finalize());
TRACE_EVENT1(
"v8", "V8.GCIncrementalMarkingFinalize", "epoch",
tracer()->CurrentEpoch(GCTracer::Scope::MC_INCREMENTAL_FINALIZE));
TRACE_EVENT1("v8", "V8.GCIncrementalMarkingFinalize", "epoch", epoch_full());
TRACE_GC_EPOCH(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE,
ThreadKind::kMain);
......
......@@ -270,6 +270,8 @@ using EphemeronRememberedSet =
std::unordered_map<EphemeronHashTable, std::unordered_set<int>,
Object::Hasher>;
using CollectionEpoch = uint32_t;
class Heap {
public:
// Stores ephemeron entries where the EphemeronHashTable is in old-space,
......@@ -548,6 +550,8 @@ class Heap {
void NotifyOldGenerationExpansion(AllocationSpace space, MemoryChunk* chunk);
void UpdateCurrentEpoch(GarbageCollector collector);
inline Address* NewSpaceAllocationTopAddress();
inline Address* NewSpaceAllocationLimitAddress();
inline Address* OldSpaceAllocationTopAddress();
......@@ -1673,6 +1677,11 @@ class Heap {
static Isolate* GetIsolateFromWritableObject(HeapObject object);
CollectionEpoch epoch_young() { return epoch_young_; }
CollectionEpoch epoch_full() { return epoch_full_; }
void UpdateEpochFull();
// Ensure that we have swept all spaces in such a way that we can iterate
// over all objects.
void MakeHeapIterable();
......@@ -1812,8 +1821,7 @@ class Heap {
// Performs garbage collection in a safepoint.
// Returns the number of freed global handles.
size_t PerformGarbageCollection(
GarbageCollector collector, GarbageCollectionReason gc_reason,
const char* collector_reason,
GarbageCollector collector,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
// Performs garbage collection in the shared heap.
......@@ -2513,6 +2521,11 @@ class Heap {
std::unique_ptr<third_party_heap::Heap> tp_heap_;
// We need two epochs, since there can be scavenges during incremental
// marking.
CollectionEpoch epoch_young_ = 0;
CollectionEpoch epoch_full_ = 0;
// Classes in "heap" can be friends.
friend class AlwaysAllocateScope;
friend class ArrayBufferCollector;
......
......@@ -191,9 +191,8 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
static_cast<int>(gc_reason));
NestedTimedHistogramScope incremental_marking_scope(
counters->gc_incremental_marking_start());
TRACE_EVENT1(
"v8", "V8.GCIncrementalMarkingStart", "epoch",
heap_->tracer()->CurrentEpoch(GCTracer::Scope::MC_INCREMENTAL_START));
TRACE_EVENT1("v8", "V8.GCIncrementalMarkingStart", "epoch",
heap_->epoch_full());
TRACE_GC_EPOCH(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START,
ThreadKind::kMain);
heap_->tracer()->NotifyIncrementalMarkingStart();
......@@ -792,8 +791,7 @@ StepResult IncrementalMarking::AdvanceWithDeadline(
StepOrigin step_origin) {
NestedTimedHistogramScope incremental_marking_scope(
heap_->isolate()->counters()->gc_incremental_marking());
TRACE_EVENT1("v8", "V8.GCIncrementalMarking", "epoch",
heap_->tracer()->CurrentEpoch(GCTracer::Scope::MC_INCREMENTAL));
TRACE_EVENT1("v8", "V8.GCIncrementalMarking", "epoch", heap_->epoch_full());
TRACE_GC_EPOCH(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL,
ThreadKind::kMain);
DCHECK(!IsStopped());
......
......@@ -7004,9 +7004,6 @@ TEST(Regress978156) {
i::IncrementalMarking* marking = heap->incremental_marking();
if (marking->IsStopped()) {
SafepointScope scope(heap);
heap->tracer()->StartCycle(GarbageCollector::MARK_COMPACTOR,
GarbageCollectionReason::kTesting,
GCTracer::MarkingType::kIncremental);
marking->Start(i::GarbageCollectionReason::kTesting);
}
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
......
......@@ -16,11 +16,11 @@
#include <utility>
#include "src/init/v8.h"
#include "src/handles/global-handles.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/spaces.h"
#include "src/init/v8.h"
#include "src/objects/objects-inl.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/heap-utils.h"
......@@ -129,9 +129,6 @@ UNINITIALIZED_TEST(IncrementalMarkingUsingTasks) {
marking->Stop();
{
SafepointScope scope(heap);
heap->tracer()->StartCycle(GarbageCollector::MARK_COMPACTOR,
GarbageCollectionReason::kTesting,
GCTracer::MarkingType::kIncremental);
marking->Start(i::GarbageCollectionReason::kTesting);
}
CHECK(platform.PendingTask());
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment