Commit 9899864a authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Make GarbageCollector an enum class

GCTracer::Scope and GCTracer::Event shadow GarbageCollector's
MARK_COMPACTOR, etc.

Bug: v8:12244, v8:12245
Change-Id: Ibe60fb03ba35c9a9e057cadc7b8f557d9db9437f
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3182226
Auto-Submit: Dominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77076}
parent bf17ee0a
...@@ -877,7 +877,7 @@ enum MinimumCapacity { ...@@ -877,7 +877,7 @@ enum MinimumCapacity {
USE_CUSTOM_MINIMUM_CAPACITY USE_CUSTOM_MINIMUM_CAPACITY
}; };
enum GarbageCollector { SCAVENGER, MARK_COMPACTOR, MINOR_MARK_COMPACTOR }; enum class GarbageCollector { SCAVENGER, MARK_COMPACTOR, MINOR_MARK_COMPACTOR };
enum class CompactionSpaceKind { enum class CompactionSpaceKind {
kNone, kNone,
......
...@@ -257,14 +257,14 @@ void GCTracer::Start(GarbageCollector collector, ...@@ -257,14 +257,14 @@ void GCTracer::Start(GarbageCollector collector,
previous_ = current_; previous_ = current_;
switch (collector) { switch (collector) {
case SCAVENGER: case GarbageCollector::SCAVENGER:
current_ = Event(Event::SCAVENGER, gc_reason, collector_reason); current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
break; break;
case MINOR_MARK_COMPACTOR: case GarbageCollector::MINOR_MARK_COMPACTOR:
current_ = current_ =
Event(Event::MINOR_MARK_COMPACTOR, gc_reason, collector_reason); Event(Event::MINOR_MARK_COMPACTOR, gc_reason, collector_reason);
break; break;
case MARK_COMPACTOR: case GarbageCollector::MARK_COMPACTOR:
if (heap_->incremental_marking()->WasActivated()) { if (heap_->incremental_marking()->WasActivated()) {
current_ = Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, current_ = Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason,
collector_reason); collector_reason);
...@@ -344,10 +344,11 @@ void GCTracer::Stop(GarbageCollector collector) { ...@@ -344,10 +344,11 @@ void GCTracer::Stop(GarbageCollector collector) {
} }
DCHECK_LE(0, start_counter_); DCHECK_LE(0, start_counter_);
DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) || DCHECK((collector == GarbageCollector::SCAVENGER &&
(collector == MINOR_MARK_COMPACTOR && current_.type == Event::SCAVENGER) ||
(collector == GarbageCollector::MINOR_MARK_COMPACTOR &&
current_.type == Event::MINOR_MARK_COMPACTOR) || current_.type == Event::MINOR_MARK_COMPACTOR) ||
(collector == MARK_COMPACTOR && (collector == GarbageCollector::MARK_COMPACTOR &&
(current_.type == Event::MARK_COMPACTOR || (current_.type == Event::MARK_COMPACTOR ||
current_.type == Event::INCREMENTAL_MARK_COMPACTOR))); current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
......
...@@ -461,18 +461,18 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space, ...@@ -461,18 +461,18 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
if (space != NEW_SPACE && space != NEW_LO_SPACE) { if (space != NEW_SPACE && space != NEW_LO_SPACE) {
isolate_->counters()->gc_compactor_caused_by_request()->Increment(); isolate_->counters()->gc_compactor_caused_by_request()->Increment();
*reason = "GC in old space requested"; *reason = "GC in old space requested";
return MARK_COMPACTOR; return GarbageCollector::MARK_COMPACTOR;
} }
if (FLAG_gc_global || ShouldStressCompaction() || !new_space()) { if (FLAG_gc_global || ShouldStressCompaction() || !new_space()) {
*reason = "GC in old space forced by flags"; *reason = "GC in old space forced by flags";
return MARK_COMPACTOR; return GarbageCollector::MARK_COMPACTOR;
} }
if (incremental_marking()->NeedsFinalization() && if (incremental_marking()->NeedsFinalization() &&
AllocationLimitOvershotByLargeMargin()) { AllocationLimitOvershotByLargeMargin()) {
*reason = "Incremental marking needs finalization"; *reason = "Incremental marking needs finalization";
return MARK_COMPACTOR; return GarbageCollector::MARK_COMPACTOR;
} }
if (!CanPromoteYoungAndExpandOldGeneration(0)) { if (!CanPromoteYoungAndExpandOldGeneration(0)) {
...@@ -480,7 +480,7 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space, ...@@ -480,7 +480,7 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
->gc_compactor_caused_by_oldspace_exhaustion() ->gc_compactor_caused_by_oldspace_exhaustion()
->Increment(); ->Increment();
*reason = "scavenge might not succeed"; *reason = "scavenge might not succeed";
return MARK_COMPACTOR; return GarbageCollector::MARK_COMPACTOR;
} }
// Default // Default
...@@ -1252,7 +1252,7 @@ void Heap::DeoptMarkedAllocationSites() { ...@@ -1252,7 +1252,7 @@ void Heap::DeoptMarkedAllocationSites() {
} }
void Heap::GarbageCollectionEpilogueInSafepoint(GarbageCollector collector) { void Heap::GarbageCollectionEpilogueInSafepoint(GarbageCollector collector) {
if (collector == MARK_COMPACTOR) { if (collector == GarbageCollector::MARK_COMPACTOR) {
memory_pressure_level_.store(MemoryPressureLevel::kNone, memory_pressure_level_.store(MemoryPressureLevel::kNone,
std::memory_order_relaxed); std::memory_order_relaxed);
} }
...@@ -1726,7 +1726,7 @@ bool Heap::CollectGarbage(AllocationSpace space, ...@@ -1726,7 +1726,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
size_t committed_memory_before = 0; size_t committed_memory_before = 0;
if (collector == MARK_COMPACTOR) { if (collector == GarbageCollector::MARK_COMPACTOR) {
committed_memory_before = CommittedOldGenerationMemory(); committed_memory_before = CommittedOldGenerationMemory();
if (cpp_heap()) { if (cpp_heap()) {
// CppHeap needs a stack marker at the top of all entry points to allow // CppHeap needs a stack marker at the top of all entry points to allow
...@@ -1763,8 +1763,9 @@ bool Heap::CollectGarbage(AllocationSpace space, ...@@ -1763,8 +1763,9 @@ bool Heap::CollectGarbage(AllocationSpace space,
PROFILE(isolate_, CodeMovingGCEvent()); PROFILE(isolate_, CodeMovingGCEvent());
} }
GCType gc_type = collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact GCType gc_type = collector == GarbageCollector::MARK_COMPACTOR
: kGCTypeScavenge; ? kGCTypeMarkSweepCompact
: kGCTypeScavenge;
{ {
GCCallbacksScope scope(this); GCCallbacksScope scope(this);
// Temporary override any embedder stack state as callbacks may create // Temporary override any embedder stack state as callbacks may create
...@@ -1817,17 +1818,19 @@ bool Heap::CollectGarbage(AllocationSpace space, ...@@ -1817,17 +1818,19 @@ bool Heap::CollectGarbage(AllocationSpace space,
CallGCEpilogueCallbacks(gc_type, gc_callback_flags); CallGCEpilogueCallbacks(gc_type, gc_callback_flags);
} }
} }
if (collector == MARK_COMPACTOR || collector == SCAVENGER) { if (collector == GarbageCollector::MARK_COMPACTOR ||
collector == GarbageCollector::SCAVENGER) {
tracer()->RecordGCPhasesHistograms(gc_type_timer); tracer()->RecordGCPhasesHistograms(gc_type_timer);
} }
} }
GarbageCollectionEpilogue(); GarbageCollectionEpilogue();
if (collector == MARK_COMPACTOR && FLAG_track_detached_contexts) { if (collector == GarbageCollector::MARK_COMPACTOR &&
FLAG_track_detached_contexts) {
isolate()->CheckDetachedContextsAfterGC(); isolate()->CheckDetachedContextsAfterGC();
} }
if (collector == MARK_COMPACTOR) { if (collector == GarbageCollector::MARK_COMPACTOR) {
// Calculate used memory first, then committed memory. Following code // Calculate used memory first, then committed memory. Following code
// assumes that committed >= used, which might not hold when this is // assumes that committed >= used, which might not hold when this is
// calculated in the wrong order and background threads allocate // calculated in the wrong order and background threads allocate
...@@ -1856,7 +1859,7 @@ bool Heap::CollectGarbage(AllocationSpace space, ...@@ -1856,7 +1859,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
tracer()->Stop(collector); tracer()->Stop(collector);
} }
if (collector == MARK_COMPACTOR && if (collector == GarbageCollector::MARK_COMPACTOR &&
(gc_callback_flags & (kGCCallbackFlagForced | (gc_callback_flags & (kGCCallbackFlagForced |
kGCCallbackFlagCollectAllAvailableGarbage)) != 0) { kGCCallbackFlagCollectAllAvailableGarbage)) != 0) {
isolate()->CountUsage(v8::Isolate::kForcedGC); isolate()->CountUsage(v8::Isolate::kForcedGC);
...@@ -2148,11 +2151,11 @@ void Heap::UpdateSurvivalStatistics(int start_new_space_size) { ...@@ -2148,11 +2151,11 @@ void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
namespace { namespace {
GCTracer::Scope::ScopeId CollectorScopeId(GarbageCollector collector) { GCTracer::Scope::ScopeId CollectorScopeId(GarbageCollector collector) {
switch (collector) { switch (collector) {
case MARK_COMPACTOR: case GarbageCollector::MARK_COMPACTOR:
return GCTracer::Scope::ScopeId::MARK_COMPACTOR; return GCTracer::Scope::ScopeId::MARK_COMPACTOR;
case MINOR_MARK_COMPACTOR: case GarbageCollector::MINOR_MARK_COMPACTOR:
return GCTracer::Scope::ScopeId::MINOR_MARK_COMPACTOR; return GCTracer::Scope::ScopeId::MINOR_MARK_COMPACTOR;
case SCAVENGER: case GarbageCollector::SCAVENGER:
return GCTracer::Scope::ScopeId::SCAVENGER; return GCTracer::Scope::ScopeId::SCAVENGER;
} }
UNREACHABLE(); UNREACHABLE();
...@@ -2199,13 +2202,13 @@ size_t Heap::PerformGarbageCollection( ...@@ -2199,13 +2202,13 @@ size_t Heap::PerformGarbageCollection(
NewSpaceSize() + (new_lo_space() ? new_lo_space()->SizeOfObjects() : 0); NewSpaceSize() + (new_lo_space() ? new_lo_space()->SizeOfObjects() : 0);
switch (collector) { switch (collector) {
case MARK_COMPACTOR: case GarbageCollector::MARK_COMPACTOR:
MarkCompact(); MarkCompact();
break; break;
case MINOR_MARK_COMPACTOR: case GarbageCollector::MINOR_MARK_COMPACTOR:
MinorMarkCompact(); MinorMarkCompact();
break; break;
case SCAVENGER: case GarbageCollector::SCAVENGER:
Scavenge(); Scavenge();
break; break;
} }
...@@ -2215,14 +2218,14 @@ size_t Heap::PerformGarbageCollection( ...@@ -2215,14 +2218,14 @@ size_t Heap::PerformGarbageCollection(
UpdateSurvivalStatistics(static_cast<int>(start_young_generation_size)); UpdateSurvivalStatistics(static_cast<int>(start_young_generation_size));
ConfigureInitialOldGenerationSize(); ConfigureInitialOldGenerationSize();
if (collector != MARK_COMPACTOR) { if (collector != GarbageCollector::MARK_COMPACTOR) {
// Objects that died in the new space might have been accounted // Objects that died in the new space might have been accounted
// as bytes marked ahead of schedule by the incremental marker. // as bytes marked ahead of schedule by the incremental marker.
incremental_marking()->UpdateMarkedBytesAfterScavenge( incremental_marking()->UpdateMarkedBytesAfterScavenge(
start_young_generation_size - SurvivedYoungObjectSize()); start_young_generation_size - SurvivedYoungObjectSize());
} }
if (!fast_promotion_mode_ || collector == MARK_COMPACTOR) { if (!fast_promotion_mode_ || collector == GarbageCollector::MARK_COMPACTOR) {
ComputeFastPromotionMode(); ComputeFastPromotionMode();
} }
...@@ -2243,7 +2246,7 @@ size_t Heap::PerformGarbageCollection( ...@@ -2243,7 +2246,7 @@ size_t Heap::PerformGarbageCollection(
isolate_->global_handles()->InvokeFirstPassWeakCallbacks(); isolate_->global_handles()->InvokeFirstPassWeakCallbacks();
} }
if (collector == MARK_COMPACTOR) { if (collector == GarbageCollector::MARK_COMPACTOR) {
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EMBEDDER_TRACING_EPILOGUE); TRACE_GC(tracer(), GCTracer::Scope::HEAP_EMBEDDER_TRACING_EPILOGUE);
// TraceEpilogue may trigger operations that invalidate global handles. It // TraceEpilogue may trigger operations that invalidate global handles. It
// has to be called *after* all other operations that potentially touch and // has to be called *after* all other operations that potentially touch and
...@@ -2282,7 +2285,7 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator, ...@@ -2282,7 +2285,7 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
base::MutexGuard guard(isolate()->client_isolate_mutex()); base::MutexGuard guard(isolate()->client_isolate_mutex());
const char* collector_reason = nullptr; const char* collector_reason = nullptr;
GarbageCollector collector = MARK_COMPACTOR; GarbageCollector collector = GarbageCollector::MARK_COMPACTOR;
tracer()->Start(collector, gc_reason, collector_reason); tracer()->Start(collector, gc_reason, collector_reason);
...@@ -2300,7 +2303,7 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator, ...@@ -2300,7 +2303,7 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
client_heap->shared_map_allocator_->FreeLinearAllocationArea(); client_heap->shared_map_allocator_->FreeLinearAllocationArea();
}); });
PerformGarbageCollection(MARK_COMPACTOR); PerformGarbageCollection(GarbageCollector::MARK_COMPACTOR);
isolate()->IterateClientIsolates([initiator](Isolate* client) { isolate()->IterateClientIsolates([initiator](Isolate* client) {
GlobalSafepoint::StopMainThread stop_main_thread = GlobalSafepoint::StopMainThread stop_main_thread =
...@@ -2357,7 +2360,7 @@ void Heap::UpdateCurrentEpoch(GarbageCollector collector) { ...@@ -2357,7 +2360,7 @@ void Heap::UpdateCurrentEpoch(GarbageCollector collector) {
void Heap::UpdateEpochFull() { epoch_full_ = next_epoch(); } void Heap::UpdateEpochFull() { epoch_full_ = next_epoch(); }
void Heap::RecomputeLimits(GarbageCollector collector) { void Heap::RecomputeLimits(GarbageCollector collector) {
if (!((collector == MARK_COMPACTOR) || if (!((collector == GarbageCollector::MARK_COMPACTOR) ||
(HasLowYoungGenerationAllocationRate() && (HasLowYoungGenerationAllocationRate() &&
old_generation_size_configured_))) { old_generation_size_configured_))) {
return; return;
...@@ -2389,7 +2392,7 @@ void Heap::RecomputeLimits(GarbageCollector collector) { ...@@ -2389,7 +2392,7 @@ void Heap::RecomputeLimits(GarbageCollector collector) {
size_t new_space_capacity = NewSpaceCapacity(); size_t new_space_capacity = NewSpaceCapacity();
HeapGrowingMode mode = CurrentHeapGrowingMode(); HeapGrowingMode mode = CurrentHeapGrowingMode();
if (collector == MARK_COMPACTOR) { if (collector == GarbageCollector::MARK_COMPACTOR) {
external_memory_.ResetAfterGC(); external_memory_.ResetAfterGC();
set_old_generation_allocation_limit( set_old_generation_allocation_limit(
......
...@@ -474,24 +474,26 @@ class Heap { ...@@ -474,24 +474,26 @@ class Heap {
} }
static inline bool IsYoungGenerationCollector(GarbageCollector collector) { static inline bool IsYoungGenerationCollector(GarbageCollector collector) {
return collector == SCAVENGER || collector == MINOR_MARK_COMPACTOR; return collector == GarbageCollector::SCAVENGER ||
collector == GarbageCollector::MINOR_MARK_COMPACTOR;
} }
static inline GarbageCollector YoungGenerationCollector() { static inline GarbageCollector YoungGenerationCollector() {
#if ENABLE_MINOR_MC #if ENABLE_MINOR_MC
return (FLAG_minor_mc) ? MINOR_MARK_COMPACTOR : SCAVENGER; return (FLAG_minor_mc) ? GarbageCollector::MINOR_MARK_COMPACTOR
: GarbageCollector::SCAVENGER;
#else #else
return SCAVENGER; return GarbageCollector::SCAVENGER;
#endif // ENABLE_MINOR_MC #endif // ENABLE_MINOR_MC
} }
static inline const char* CollectorName(GarbageCollector collector) { static inline const char* CollectorName(GarbageCollector collector) {
switch (collector) { switch (collector) {
case SCAVENGER: case GarbageCollector::SCAVENGER:
return "Scavenger"; return "Scavenger";
case MARK_COMPACTOR: case GarbageCollector::MARK_COMPACTOR:
return "Mark-Compact"; return "Mark-Compact";
case MINOR_MARK_COMPACTOR: case GarbageCollector::MINOR_MARK_COMPACTOR:
return "Minor Mark-Compact"; return "Minor Mark-Compact";
} }
return "Unknown collector"; return "Unknown collector";
......
...@@ -3899,7 +3899,7 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -3899,7 +3899,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
void UpdateUntypedPointers() { void UpdateUntypedPointers() {
if (chunk_->slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() != nullptr) { if (chunk_->slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() != nullptr) {
DCHECK_IMPLIES( DCHECK_IMPLIES(
collector == MARK_COMPACTOR, collector == GarbageCollector::MARK_COMPACTOR,
chunk_->SweepingDone() && chunk_->SweepingDone() &&
chunk_->sweeping_slot_set<AccessMode::NON_ATOMIC>() == nullptr); chunk_->sweeping_slot_set<AccessMode::NON_ATOMIC>() == nullptr);
...@@ -3912,9 +3912,9 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -3912,9 +3912,9 @@ class RememberedSetUpdatingItem : public UpdatingItem {
}, },
SlotSet::FREE_EMPTY_BUCKETS); SlotSet::FREE_EMPTY_BUCKETS);
DCHECK_IMPLIES( DCHECK_IMPLIES(collector == GarbageCollector::MARK_COMPACTOR &&
collector == MARK_COMPACTOR && FLAG_always_promote_young_mc, FLAG_always_promote_young_mc,
slots == 0); slots == 0);
if (slots == 0) { if (slots == 0) {
chunk_->ReleaseSlotSet<OLD_TO_NEW>(); chunk_->ReleaseSlotSet<OLD_TO_NEW>();
...@@ -3923,7 +3923,7 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -3923,7 +3923,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
if (chunk_->sweeping_slot_set<AccessMode::NON_ATOMIC>()) { if (chunk_->sweeping_slot_set<AccessMode::NON_ATOMIC>()) {
DCHECK_IMPLIES( DCHECK_IMPLIES(
collector == MARK_COMPACTOR, collector == GarbageCollector::MARK_COMPACTOR,
!chunk_->SweepingDone() && !chunk_->SweepingDone() &&
(chunk_->slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>()) == (chunk_->slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>()) ==
nullptr); nullptr);
...@@ -3938,9 +3938,9 @@ class RememberedSetUpdatingItem : public UpdatingItem { ...@@ -3938,9 +3938,9 @@ class RememberedSetUpdatingItem : public UpdatingItem {
}, },
SlotSet::FREE_EMPTY_BUCKETS); SlotSet::FREE_EMPTY_BUCKETS);
DCHECK_IMPLIES( DCHECK_IMPLIES(collector == GarbageCollector::MARK_COMPACTOR &&
collector == MARK_COMPACTOR && FLAG_always_promote_young_mc, FLAG_always_promote_young_mc,
slots == 0); slots == 0);
if (slots == 0) { if (slots == 0) {
chunk_->ReleaseSweepingSlotSet(); chunk_->ReleaseSweepingSlotSet();
...@@ -4042,8 +4042,8 @@ std::unique_ptr<UpdatingItem> MarkCompactCollector::CreateToSpaceUpdatingItem( ...@@ -4042,8 +4042,8 @@ std::unique_ptr<UpdatingItem> MarkCompactCollector::CreateToSpaceUpdatingItem(
std::unique_ptr<UpdatingItem> std::unique_ptr<UpdatingItem>
MarkCompactCollector::CreateRememberedSetUpdatingItem( MarkCompactCollector::CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) { MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) {
return std::make_unique< return std::make_unique<RememberedSetUpdatingItem<
RememberedSetUpdatingItem<NonAtomicMarkingState, MARK_COMPACTOR>>( NonAtomicMarkingState, GarbageCollector::MARK_COMPACTOR>>(
heap(), non_atomic_marking_state(), chunk, updating_mode); heap(), non_atomic_marking_state(), chunk, updating_mode);
} }
...@@ -4991,8 +4991,8 @@ MinorMarkCompactCollector::CreateToSpaceUpdatingItem(MemoryChunk* chunk, ...@@ -4991,8 +4991,8 @@ MinorMarkCompactCollector::CreateToSpaceUpdatingItem(MemoryChunk* chunk,
std::unique_ptr<UpdatingItem> std::unique_ptr<UpdatingItem>
MinorMarkCompactCollector::CreateRememberedSetUpdatingItem( MinorMarkCompactCollector::CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) { MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) {
return std::make_unique< return std::make_unique<RememberedSetUpdatingItem<
RememberedSetUpdatingItem<NonAtomicMarkingState, MINOR_MARK_COMPACTOR>>( NonAtomicMarkingState, GarbageCollector::MINOR_MARK_COMPACTOR>>(
heap(), non_atomic_marking_state(), chunk, updating_mode); heap(), non_atomic_marking_state(), chunk, updating_mode);
} }
......
...@@ -179,10 +179,10 @@ TEST_F(GCTracerTest, RegularScope) { ...@@ -179,10 +179,10 @@ TEST_F(GCTracerTest, RegularScope) {
EXPECT_DOUBLE_EQ(0.0, tracer->current_.scopes[GCTracer::Scope::MC_MARK]); EXPECT_DOUBLE_EQ(0.0, tracer->current_.scopes[GCTracer::Scope::MC_MARK]);
// Sample not added because it's not within a started tracer. // Sample not added because it's not within a started tracer.
tracer->AddScopeSample(GCTracer::Scope::MC_MARK, 100); tracer->AddScopeSample(GCTracer::Scope::MC_MARK, 100);
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting, tracer->Start(GarbageCollector::MARK_COMPACTOR,
"collector unittest"); GarbageCollectionReason::kTesting, "collector unittest");
tracer->AddScopeSample(GCTracer::Scope::MC_MARK, 100); tracer->AddScopeSample(GCTracer::Scope::MC_MARK, 100);
tracer->Stop(MARK_COMPACTOR); tracer->Stop(GarbageCollector::MARK_COMPACTOR);
EXPECT_DOUBLE_EQ(100.0, tracer->current_.scopes[GCTracer::Scope::MC_MARK]); EXPECT_DOUBLE_EQ(100.0, tracer->current_.scopes[GCTracer::Scope::MC_MARK]);
} }
...@@ -194,12 +194,12 @@ TEST_F(GCTracerTest, IncrementalScope) { ...@@ -194,12 +194,12 @@ TEST_F(GCTracerTest, IncrementalScope) {
0.0, tracer->current_.scopes[GCTracer::Scope::MC_INCREMENTAL_FINALIZE]); 0.0, tracer->current_.scopes[GCTracer::Scope::MC_INCREMENTAL_FINALIZE]);
// Sample is added because its ScopeId is listed as incremental sample. // Sample is added because its ScopeId is listed as incremental sample.
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 100); tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 100);
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting, tracer->Start(GarbageCollector::MARK_COMPACTOR,
"collector unittest"); GarbageCollectionReason::kTesting, "collector unittest");
// Switch to incremental MC to enable writing back incremental scopes. // Switch to incremental MC to enable writing back incremental scopes.
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR; tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 100); tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 100);
tracer->Stop(MARK_COMPACTOR); tracer->Stop(GarbageCollector::MARK_COMPACTOR);
EXPECT_DOUBLE_EQ( EXPECT_DOUBLE_EQ(
200.0, tracer->current_.scopes[GCTracer::Scope::MC_INCREMENTAL_FINALIZE]); 200.0, tracer->current_.scopes[GCTracer::Scope::MC_INCREMENTAL_FINALIZE]);
} }
...@@ -211,15 +211,15 @@ TEST_F(GCTracerTest, IncrementalMarkingDetails) { ...@@ -211,15 +211,15 @@ TEST_F(GCTracerTest, IncrementalMarkingDetails) {
// Round 1. // Round 1.
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 50); tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 50);
// Scavenger has no impact on incremental marking details. // Scavenger has no impact on incremental marking details.
tracer->Start(SCAVENGER, GarbageCollectionReason::kTesting, tracer->Start(GarbageCollector::SCAVENGER, GarbageCollectionReason::kTesting,
"collector unittest");
tracer->Stop(SCAVENGER);
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting,
"collector unittest"); "collector unittest");
tracer->Stop(GarbageCollector::SCAVENGER);
tracer->Start(GarbageCollector::MARK_COMPACTOR,
GarbageCollectionReason::kTesting, "collector unittest");
// Switch to incremental MC to enable writing back incremental scopes. // Switch to incremental MC to enable writing back incremental scopes.
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR; tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 100); tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 100);
tracer->Stop(MARK_COMPACTOR); tracer->Stop(GarbageCollector::MARK_COMPACTOR);
EXPECT_DOUBLE_EQ( EXPECT_DOUBLE_EQ(
100, 100,
tracer->current_ tracer->current_
...@@ -239,12 +239,12 @@ TEST_F(GCTracerTest, IncrementalMarkingDetails) { ...@@ -239,12 +239,12 @@ TEST_F(GCTracerTest, IncrementalMarkingDetails) {
// Round 2. Numbers should be reset. // Round 2. Numbers should be reset.
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 13); tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 13);
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 15); tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 15);
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting, tracer->Start(GarbageCollector::MARK_COMPACTOR,
"collector unittest"); GarbageCollectionReason::kTesting, "collector unittest");
// Switch to incremental MC to enable writing back incremental scopes. // Switch to incremental MC to enable writing back incremental scopes.
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR; tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 122); tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 122);
tracer->Stop(MARK_COMPACTOR); tracer->Stop(GarbageCollector::MARK_COMPACTOR);
EXPECT_DOUBLE_EQ( EXPECT_DOUBLE_EQ(
122, 122,
tracer->current_ tracer->current_
...@@ -276,24 +276,24 @@ TEST_F(GCTracerTest, IncrementalMarkingSpeed) { ...@@ -276,24 +276,24 @@ TEST_F(GCTracerTest, IncrementalMarkingSpeed) {
EXPECT_EQ(1000000 / 100, EXPECT_EQ(1000000 / 100,
tracer->IncrementalMarkingSpeedInBytesPerMillisecond()); tracer->IncrementalMarkingSpeedInBytesPerMillisecond());
// Scavenger has no impact on incremental marking details. // Scavenger has no impact on incremental marking details.
tracer->Start(SCAVENGER, GarbageCollectionReason::kTesting, tracer->Start(GarbageCollector::SCAVENGER, GarbageCollectionReason::kTesting,
"collector unittest"); "collector unittest");
tracer->Stop(SCAVENGER); tracer->Stop(GarbageCollector::SCAVENGER);
// 1000000 bytes in 100ms. // 1000000 bytes in 100ms.
tracer->AddIncrementalMarkingStep(100, 1000000); tracer->AddIncrementalMarkingStep(100, 1000000);
EXPECT_EQ(300, tracer->incremental_marking_duration_); EXPECT_EQ(300, tracer->incremental_marking_duration_);
EXPECT_EQ(3000000u, tracer->incremental_marking_bytes_); EXPECT_EQ(3000000u, tracer->incremental_marking_bytes_);
EXPECT_EQ(1000000 / 100, EXPECT_EQ(1000000 / 100,
tracer->IncrementalMarkingSpeedInBytesPerMillisecond()); tracer->IncrementalMarkingSpeedInBytesPerMillisecond());
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting, tracer->Start(GarbageCollector::MARK_COMPACTOR,
"collector unittest"); GarbageCollectionReason::kTesting, "collector unittest");
// Switch to incremental MC. // Switch to incremental MC.
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR; tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
// 1000000 bytes in 100ms. // 1000000 bytes in 100ms.
tracer->AddIncrementalMarkingStep(100, 1000000); tracer->AddIncrementalMarkingStep(100, 1000000);
EXPECT_EQ(400, tracer->incremental_marking_duration_); EXPECT_EQ(400, tracer->incremental_marking_duration_);
EXPECT_EQ(4000000u, tracer->incremental_marking_bytes_); EXPECT_EQ(4000000u, tracer->incremental_marking_bytes_);
tracer->Stop(MARK_COMPACTOR); tracer->Stop(GarbageCollector::MARK_COMPACTOR);
EXPECT_EQ(400, tracer->current_.incremental_marking_duration); EXPECT_EQ(400, tracer->current_.incremental_marking_duration);
EXPECT_EQ(4000000u, tracer->current_.incremental_marking_bytes); EXPECT_EQ(4000000u, tracer->current_.incremental_marking_bytes);
EXPECT_EQ(0, tracer->incremental_marking_duration_); EXPECT_EQ(0, tracer->incremental_marking_duration_);
...@@ -303,11 +303,11 @@ TEST_F(GCTracerTest, IncrementalMarkingSpeed) { ...@@ -303,11 +303,11 @@ TEST_F(GCTracerTest, IncrementalMarkingSpeed) {
// Round 2. // Round 2.
tracer->AddIncrementalMarkingStep(2000, 1000); tracer->AddIncrementalMarkingStep(2000, 1000);
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting, tracer->Start(GarbageCollector::MARK_COMPACTOR,
"collector unittest"); GarbageCollectionReason::kTesting, "collector unittest");
// Switch to incremental MC. // Switch to incremental MC.
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR; tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
tracer->Stop(MARK_COMPACTOR); tracer->Stop(GarbageCollector::MARK_COMPACTOR);
EXPECT_DOUBLE_EQ((4000000.0 / 400 + 1000.0 / 2000) / 2, EXPECT_DOUBLE_EQ((4000000.0 / 400 + 1000.0 / 2000) / 2,
static_cast<double>( static_cast<double>(
tracer->IncrementalMarkingSpeedInBytesPerMillisecond())); tracer->IncrementalMarkingSpeedInBytesPerMillisecond()));
...@@ -352,13 +352,13 @@ TEST_F(GCTracerTest, MutatorUtilization) { ...@@ -352,13 +352,13 @@ TEST_F(GCTracerTest, MutatorUtilization) {
TEST_F(GCTracerTest, BackgroundScavengerScope) { TEST_F(GCTracerTest, BackgroundScavengerScope) {
GCTracer* tracer = i_isolate()->heap()->tracer(); GCTracer* tracer = i_isolate()->heap()->tracer();
tracer->ResetForTesting(); tracer->ResetForTesting();
tracer->Start(SCAVENGER, GarbageCollectionReason::kTesting, tracer->Start(GarbageCollector::SCAVENGER, GarbageCollectionReason::kTesting,
"collector unittest"); "collector unittest");
tracer->AddScopeSampleBackground( tracer->AddScopeSampleBackground(
GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL, 10); GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL, 10);
tracer->AddScopeSampleBackground( tracer->AddScopeSampleBackground(
GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL, 1); GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL, 1);
tracer->Stop(SCAVENGER); tracer->Stop(GarbageCollector::SCAVENGER);
EXPECT_DOUBLE_EQ( EXPECT_DOUBLE_EQ(
11, tracer->current_ 11, tracer->current_
.scopes[GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL]); .scopes[GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL]);
...@@ -367,8 +367,8 @@ TEST_F(GCTracerTest, BackgroundScavengerScope) { ...@@ -367,8 +367,8 @@ TEST_F(GCTracerTest, BackgroundScavengerScope) {
TEST_F(GCTracerTest, BackgroundMinorMCScope) { TEST_F(GCTracerTest, BackgroundMinorMCScope) {
GCTracer* tracer = i_isolate()->heap()->tracer(); GCTracer* tracer = i_isolate()->heap()->tracer();
tracer->ResetForTesting(); tracer->ResetForTesting();
tracer->Start(MINOR_MARK_COMPACTOR, GarbageCollectionReason::kTesting, tracer->Start(GarbageCollector::MINOR_MARK_COMPACTOR,
"collector unittest"); GarbageCollectionReason::kTesting, "collector unittest");
tracer->AddScopeSampleBackground(GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING, tracer->AddScopeSampleBackground(GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING,
10); 10);
tracer->AddScopeSampleBackground(GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING, tracer->AddScopeSampleBackground(GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING,
...@@ -381,7 +381,7 @@ TEST_F(GCTracerTest, BackgroundMinorMCScope) { ...@@ -381,7 +381,7 @@ TEST_F(GCTracerTest, BackgroundMinorMCScope) {
GCTracer::Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS, 30); GCTracer::Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS, 30);
tracer->AddScopeSampleBackground( tracer->AddScopeSampleBackground(
GCTracer::Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS, 3); GCTracer::Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS, 3);
tracer->Stop(MINOR_MARK_COMPACTOR); tracer->Stop(GarbageCollector::MINOR_MARK_COMPACTOR);
EXPECT_DOUBLE_EQ( EXPECT_DOUBLE_EQ(
11, 11,
tracer->current_.scopes[GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING]); tracer->current_.scopes[GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING]);
...@@ -401,14 +401,14 @@ TEST_F(GCTracerTest, BackgroundMajorMCScope) { ...@@ -401,14 +401,14 @@ TEST_F(GCTracerTest, BackgroundMajorMCScope) {
200); 200);
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_MARKING, 10); tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_MARKING, 10);
// Scavenger should not affect the major mark-compact scopes. // Scavenger should not affect the major mark-compact scopes.
tracer->Start(SCAVENGER, GarbageCollectionReason::kTesting, tracer->Start(GarbageCollector::SCAVENGER, GarbageCollectionReason::kTesting,
"collector unittest"); "collector unittest");
tracer->Stop(SCAVENGER); tracer->Stop(GarbageCollector::SCAVENGER);
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_SWEEPING, 20); tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_SWEEPING, 20);
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_MARKING, 1); tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_MARKING, 1);
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_SWEEPING, 2); tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_SWEEPING, 2);
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting, tracer->Start(GarbageCollector::MARK_COMPACTOR,
"collector unittest"); GarbageCollectionReason::kTesting, "collector unittest");
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_EVACUATE_COPY, tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_EVACUATE_COPY,
30); 30);
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_EVACUATE_COPY, tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_EVACUATE_COPY,
...@@ -417,7 +417,7 @@ TEST_F(GCTracerTest, BackgroundMajorMCScope) { ...@@ -417,7 +417,7 @@ TEST_F(GCTracerTest, BackgroundMajorMCScope) {
GCTracer::Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS, 40); GCTracer::Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS, 40);
tracer->AddScopeSampleBackground( tracer->AddScopeSampleBackground(
GCTracer::Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS, 4); GCTracer::Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS, 4);
tracer->Stop(MARK_COMPACTOR); tracer->Stop(GarbageCollector::MARK_COMPACTOR);
EXPECT_DOUBLE_EQ( EXPECT_DOUBLE_EQ(
111, tracer->current_.scopes[GCTracer::Scope::MC_BACKGROUND_MARKING]); 111, tracer->current_.scopes[GCTracer::Scope::MC_BACKGROUND_MARKING]);
EXPECT_DOUBLE_EQ( EXPECT_DOUBLE_EQ(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment