Commit 9899864a authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Make GarbageCollector an enum class

GCTracer::Scope and GCTracer::Event shadow GarbageCollector's
MARK_COMPACTOR, etc.

Bug: v8:12244, v8:12245
Change-Id: Ibe60fb03ba35c9a9e057cadc7b8f557d9db9437f
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3182226
Auto-Submit: Dominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77076}
parent bf17ee0a
......@@ -877,7 +877,7 @@ enum MinimumCapacity {
USE_CUSTOM_MINIMUM_CAPACITY
};
enum GarbageCollector { SCAVENGER, MARK_COMPACTOR, MINOR_MARK_COMPACTOR };
enum class GarbageCollector { SCAVENGER, MARK_COMPACTOR, MINOR_MARK_COMPACTOR };
enum class CompactionSpaceKind {
kNone,
......
......@@ -257,14 +257,14 @@ void GCTracer::Start(GarbageCollector collector,
previous_ = current_;
switch (collector) {
case SCAVENGER:
case GarbageCollector::SCAVENGER:
current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
break;
case MINOR_MARK_COMPACTOR:
case GarbageCollector::MINOR_MARK_COMPACTOR:
current_ =
Event(Event::MINOR_MARK_COMPACTOR, gc_reason, collector_reason);
break;
case MARK_COMPACTOR:
case GarbageCollector::MARK_COMPACTOR:
if (heap_->incremental_marking()->WasActivated()) {
current_ = Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason,
collector_reason);
......@@ -344,10 +344,11 @@ void GCTracer::Stop(GarbageCollector collector) {
}
DCHECK_LE(0, start_counter_);
DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) ||
(collector == MINOR_MARK_COMPACTOR &&
DCHECK((collector == GarbageCollector::SCAVENGER &&
current_.type == Event::SCAVENGER) ||
(collector == GarbageCollector::MINOR_MARK_COMPACTOR &&
current_.type == Event::MINOR_MARK_COMPACTOR) ||
(collector == MARK_COMPACTOR &&
(collector == GarbageCollector::MARK_COMPACTOR &&
(current_.type == Event::MARK_COMPACTOR ||
current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
......
......@@ -461,18 +461,18 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
if (space != NEW_SPACE && space != NEW_LO_SPACE) {
isolate_->counters()->gc_compactor_caused_by_request()->Increment();
*reason = "GC in old space requested";
return MARK_COMPACTOR;
return GarbageCollector::MARK_COMPACTOR;
}
if (FLAG_gc_global || ShouldStressCompaction() || !new_space()) {
*reason = "GC in old space forced by flags";
return MARK_COMPACTOR;
return GarbageCollector::MARK_COMPACTOR;
}
if (incremental_marking()->NeedsFinalization() &&
AllocationLimitOvershotByLargeMargin()) {
*reason = "Incremental marking needs finalization";
return MARK_COMPACTOR;
return GarbageCollector::MARK_COMPACTOR;
}
if (!CanPromoteYoungAndExpandOldGeneration(0)) {
......@@ -480,7 +480,7 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
->gc_compactor_caused_by_oldspace_exhaustion()
->Increment();
*reason = "scavenge might not succeed";
return MARK_COMPACTOR;
return GarbageCollector::MARK_COMPACTOR;
}
// Default
......@@ -1252,7 +1252,7 @@ void Heap::DeoptMarkedAllocationSites() {
}
void Heap::GarbageCollectionEpilogueInSafepoint(GarbageCollector collector) {
if (collector == MARK_COMPACTOR) {
if (collector == GarbageCollector::MARK_COMPACTOR) {
memory_pressure_level_.store(MemoryPressureLevel::kNone,
std::memory_order_relaxed);
}
......@@ -1726,7 +1726,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
size_t committed_memory_before = 0;
if (collector == MARK_COMPACTOR) {
if (collector == GarbageCollector::MARK_COMPACTOR) {
committed_memory_before = CommittedOldGenerationMemory();
if (cpp_heap()) {
// CppHeap needs a stack marker at the top of all entry points to allow
......@@ -1763,8 +1763,9 @@ bool Heap::CollectGarbage(AllocationSpace space,
PROFILE(isolate_, CodeMovingGCEvent());
}
GCType gc_type = collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact
: kGCTypeScavenge;
GCType gc_type = collector == GarbageCollector::MARK_COMPACTOR
? kGCTypeMarkSweepCompact
: kGCTypeScavenge;
{
GCCallbacksScope scope(this);
// Temporary override any embedder stack state as callbacks may create
......@@ -1817,17 +1818,19 @@ bool Heap::CollectGarbage(AllocationSpace space,
CallGCEpilogueCallbacks(gc_type, gc_callback_flags);
}
}
if (collector == MARK_COMPACTOR || collector == SCAVENGER) {
if (collector == GarbageCollector::MARK_COMPACTOR ||
collector == GarbageCollector::SCAVENGER) {
tracer()->RecordGCPhasesHistograms(gc_type_timer);
}
}
GarbageCollectionEpilogue();
if (collector == MARK_COMPACTOR && FLAG_track_detached_contexts) {
if (collector == GarbageCollector::MARK_COMPACTOR &&
FLAG_track_detached_contexts) {
isolate()->CheckDetachedContextsAfterGC();
}
if (collector == MARK_COMPACTOR) {
if (collector == GarbageCollector::MARK_COMPACTOR) {
// Calculate used memory first, then committed memory. Following code
// assumes that committed >= used, which might not hold when this is
// calculated in the wrong order and background threads allocate
......@@ -1856,7 +1859,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
tracer()->Stop(collector);
}
if (collector == MARK_COMPACTOR &&
if (collector == GarbageCollector::MARK_COMPACTOR &&
(gc_callback_flags & (kGCCallbackFlagForced |
kGCCallbackFlagCollectAllAvailableGarbage)) != 0) {
isolate()->CountUsage(v8::Isolate::kForcedGC);
......@@ -2148,11 +2151,11 @@ void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
namespace {
GCTracer::Scope::ScopeId CollectorScopeId(GarbageCollector collector) {
switch (collector) {
case MARK_COMPACTOR:
case GarbageCollector::MARK_COMPACTOR:
return GCTracer::Scope::ScopeId::MARK_COMPACTOR;
case MINOR_MARK_COMPACTOR:
case GarbageCollector::MINOR_MARK_COMPACTOR:
return GCTracer::Scope::ScopeId::MINOR_MARK_COMPACTOR;
case SCAVENGER:
case GarbageCollector::SCAVENGER:
return GCTracer::Scope::ScopeId::SCAVENGER;
}
UNREACHABLE();
......@@ -2199,13 +2202,13 @@ size_t Heap::PerformGarbageCollection(
NewSpaceSize() + (new_lo_space() ? new_lo_space()->SizeOfObjects() : 0);
switch (collector) {
case MARK_COMPACTOR:
case GarbageCollector::MARK_COMPACTOR:
MarkCompact();
break;
case MINOR_MARK_COMPACTOR:
case GarbageCollector::MINOR_MARK_COMPACTOR:
MinorMarkCompact();
break;
case SCAVENGER:
case GarbageCollector::SCAVENGER:
Scavenge();
break;
}
......@@ -2215,14 +2218,14 @@ size_t Heap::PerformGarbageCollection(
UpdateSurvivalStatistics(static_cast<int>(start_young_generation_size));
ConfigureInitialOldGenerationSize();
if (collector != MARK_COMPACTOR) {
if (collector != GarbageCollector::MARK_COMPACTOR) {
// Objects that died in the new space might have been accounted
// as bytes marked ahead of schedule by the incremental marker.
incremental_marking()->UpdateMarkedBytesAfterScavenge(
start_young_generation_size - SurvivedYoungObjectSize());
}
if (!fast_promotion_mode_ || collector == MARK_COMPACTOR) {
if (!fast_promotion_mode_ || collector == GarbageCollector::MARK_COMPACTOR) {
ComputeFastPromotionMode();
}
......@@ -2243,7 +2246,7 @@ size_t Heap::PerformGarbageCollection(
isolate_->global_handles()->InvokeFirstPassWeakCallbacks();
}
if (collector == MARK_COMPACTOR) {
if (collector == GarbageCollector::MARK_COMPACTOR) {
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EMBEDDER_TRACING_EPILOGUE);
// TraceEpilogue may trigger operations that invalidate global handles. It
// has to be called *after* all other operations that potentially touch and
......@@ -2282,7 +2285,7 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
base::MutexGuard guard(isolate()->client_isolate_mutex());
const char* collector_reason = nullptr;
GarbageCollector collector = MARK_COMPACTOR;
GarbageCollector collector = GarbageCollector::MARK_COMPACTOR;
tracer()->Start(collector, gc_reason, collector_reason);
......@@ -2300,7 +2303,7 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
client_heap->shared_map_allocator_->FreeLinearAllocationArea();
});
PerformGarbageCollection(MARK_COMPACTOR);
PerformGarbageCollection(GarbageCollector::MARK_COMPACTOR);
isolate()->IterateClientIsolates([initiator](Isolate* client) {
GlobalSafepoint::StopMainThread stop_main_thread =
......@@ -2357,7 +2360,7 @@ void Heap::UpdateCurrentEpoch(GarbageCollector collector) {
void Heap::UpdateEpochFull() { epoch_full_ = next_epoch(); }
void Heap::RecomputeLimits(GarbageCollector collector) {
if (!((collector == MARK_COMPACTOR) ||
if (!((collector == GarbageCollector::MARK_COMPACTOR) ||
(HasLowYoungGenerationAllocationRate() &&
old_generation_size_configured_))) {
return;
......@@ -2389,7 +2392,7 @@ void Heap::RecomputeLimits(GarbageCollector collector) {
size_t new_space_capacity = NewSpaceCapacity();
HeapGrowingMode mode = CurrentHeapGrowingMode();
if (collector == MARK_COMPACTOR) {
if (collector == GarbageCollector::MARK_COMPACTOR) {
external_memory_.ResetAfterGC();
set_old_generation_allocation_limit(
......
......@@ -474,24 +474,26 @@ class Heap {
}
static inline bool IsYoungGenerationCollector(GarbageCollector collector) {
return collector == SCAVENGER || collector == MINOR_MARK_COMPACTOR;
return collector == GarbageCollector::SCAVENGER ||
collector == GarbageCollector::MINOR_MARK_COMPACTOR;
}
static inline GarbageCollector YoungGenerationCollector() {
#if ENABLE_MINOR_MC
return (FLAG_minor_mc) ? MINOR_MARK_COMPACTOR : SCAVENGER;
return (FLAG_minor_mc) ? GarbageCollector::MINOR_MARK_COMPACTOR
: GarbageCollector::SCAVENGER;
#else
return SCAVENGER;
return GarbageCollector::SCAVENGER;
#endif // ENABLE_MINOR_MC
}
static inline const char* CollectorName(GarbageCollector collector) {
switch (collector) {
case SCAVENGER:
case GarbageCollector::SCAVENGER:
return "Scavenger";
case MARK_COMPACTOR:
case GarbageCollector::MARK_COMPACTOR:
return "Mark-Compact";
case MINOR_MARK_COMPACTOR:
case GarbageCollector::MINOR_MARK_COMPACTOR:
return "Minor Mark-Compact";
}
return "Unknown collector";
......
......@@ -3899,7 +3899,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
void UpdateUntypedPointers() {
if (chunk_->slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() != nullptr) {
DCHECK_IMPLIES(
collector == MARK_COMPACTOR,
collector == GarbageCollector::MARK_COMPACTOR,
chunk_->SweepingDone() &&
chunk_->sweeping_slot_set<AccessMode::NON_ATOMIC>() == nullptr);
......@@ -3912,9 +3912,9 @@ class RememberedSetUpdatingItem : public UpdatingItem {
},
SlotSet::FREE_EMPTY_BUCKETS);
DCHECK_IMPLIES(
collector == MARK_COMPACTOR && FLAG_always_promote_young_mc,
slots == 0);
DCHECK_IMPLIES(collector == GarbageCollector::MARK_COMPACTOR &&
FLAG_always_promote_young_mc,
slots == 0);
if (slots == 0) {
chunk_->ReleaseSlotSet<OLD_TO_NEW>();
......@@ -3923,7 +3923,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
if (chunk_->sweeping_slot_set<AccessMode::NON_ATOMIC>()) {
DCHECK_IMPLIES(
collector == MARK_COMPACTOR,
collector == GarbageCollector::MARK_COMPACTOR,
!chunk_->SweepingDone() &&
(chunk_->slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>()) ==
nullptr);
......@@ -3938,9 +3938,9 @@ class RememberedSetUpdatingItem : public UpdatingItem {
},
SlotSet::FREE_EMPTY_BUCKETS);
DCHECK_IMPLIES(
collector == MARK_COMPACTOR && FLAG_always_promote_young_mc,
slots == 0);
DCHECK_IMPLIES(collector == GarbageCollector::MARK_COMPACTOR &&
FLAG_always_promote_young_mc,
slots == 0);
if (slots == 0) {
chunk_->ReleaseSweepingSlotSet();
......@@ -4042,8 +4042,8 @@ std::unique_ptr<UpdatingItem> MarkCompactCollector::CreateToSpaceUpdatingItem(
std::unique_ptr<UpdatingItem>
MarkCompactCollector::CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) {
return std::make_unique<
RememberedSetUpdatingItem<NonAtomicMarkingState, MARK_COMPACTOR>>(
return std::make_unique<RememberedSetUpdatingItem<
NonAtomicMarkingState, GarbageCollector::MARK_COMPACTOR>>(
heap(), non_atomic_marking_state(), chunk, updating_mode);
}
......@@ -4991,8 +4991,8 @@ MinorMarkCompactCollector::CreateToSpaceUpdatingItem(MemoryChunk* chunk,
std::unique_ptr<UpdatingItem>
MinorMarkCompactCollector::CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) {
return std::make_unique<
RememberedSetUpdatingItem<NonAtomicMarkingState, MINOR_MARK_COMPACTOR>>(
return std::make_unique<RememberedSetUpdatingItem<
NonAtomicMarkingState, GarbageCollector::MINOR_MARK_COMPACTOR>>(
heap(), non_atomic_marking_state(), chunk, updating_mode);
}
......
......@@ -179,10 +179,10 @@ TEST_F(GCTracerTest, RegularScope) {
EXPECT_DOUBLE_EQ(0.0, tracer->current_.scopes[GCTracer::Scope::MC_MARK]);
// Sample not added because it's not within a started tracer.
tracer->AddScopeSample(GCTracer::Scope::MC_MARK, 100);
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting,
"collector unittest");
tracer->Start(GarbageCollector::MARK_COMPACTOR,
GarbageCollectionReason::kTesting, "collector unittest");
tracer->AddScopeSample(GCTracer::Scope::MC_MARK, 100);
tracer->Stop(MARK_COMPACTOR);
tracer->Stop(GarbageCollector::MARK_COMPACTOR);
EXPECT_DOUBLE_EQ(100.0, tracer->current_.scopes[GCTracer::Scope::MC_MARK]);
}
......@@ -194,12 +194,12 @@ TEST_F(GCTracerTest, IncrementalScope) {
0.0, tracer->current_.scopes[GCTracer::Scope::MC_INCREMENTAL_FINALIZE]);
// Sample is added because its ScopeId is listed as incremental sample.
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 100);
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting,
"collector unittest");
tracer->Start(GarbageCollector::MARK_COMPACTOR,
GarbageCollectionReason::kTesting, "collector unittest");
// Switch to incremental MC to enable writing back incremental scopes.
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 100);
tracer->Stop(MARK_COMPACTOR);
tracer->Stop(GarbageCollector::MARK_COMPACTOR);
EXPECT_DOUBLE_EQ(
200.0, tracer->current_.scopes[GCTracer::Scope::MC_INCREMENTAL_FINALIZE]);
}
......@@ -211,15 +211,15 @@ TEST_F(GCTracerTest, IncrementalMarkingDetails) {
// Round 1.
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 50);
// Scavenger has no impact on incremental marking details.
tracer->Start(SCAVENGER, GarbageCollectionReason::kTesting,
"collector unittest");
tracer->Stop(SCAVENGER);
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting,
tracer->Start(GarbageCollector::SCAVENGER, GarbageCollectionReason::kTesting,
"collector unittest");
tracer->Stop(GarbageCollector::SCAVENGER);
tracer->Start(GarbageCollector::MARK_COMPACTOR,
GarbageCollectionReason::kTesting, "collector unittest");
// Switch to incremental MC to enable writing back incremental scopes.
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 100);
tracer->Stop(MARK_COMPACTOR);
tracer->Stop(GarbageCollector::MARK_COMPACTOR);
EXPECT_DOUBLE_EQ(
100,
tracer->current_
......@@ -239,12 +239,12 @@ TEST_F(GCTracerTest, IncrementalMarkingDetails) {
// Round 2. Numbers should be reset.
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 13);
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 15);
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting,
"collector unittest");
tracer->Start(GarbageCollector::MARK_COMPACTOR,
GarbageCollectionReason::kTesting, "collector unittest");
// Switch to incremental MC to enable writing back incremental scopes.
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 122);
tracer->Stop(MARK_COMPACTOR);
tracer->Stop(GarbageCollector::MARK_COMPACTOR);
EXPECT_DOUBLE_EQ(
122,
tracer->current_
......@@ -276,24 +276,24 @@ TEST_F(GCTracerTest, IncrementalMarkingSpeed) {
EXPECT_EQ(1000000 / 100,
tracer->IncrementalMarkingSpeedInBytesPerMillisecond());
// Scavenger has no impact on incremental marking details.
tracer->Start(SCAVENGER, GarbageCollectionReason::kTesting,
tracer->Start(GarbageCollector::SCAVENGER, GarbageCollectionReason::kTesting,
"collector unittest");
tracer->Stop(SCAVENGER);
tracer->Stop(GarbageCollector::SCAVENGER);
// 1000000 bytes in 100ms.
tracer->AddIncrementalMarkingStep(100, 1000000);
EXPECT_EQ(300, tracer->incremental_marking_duration_);
EXPECT_EQ(3000000u, tracer->incremental_marking_bytes_);
EXPECT_EQ(1000000 / 100,
tracer->IncrementalMarkingSpeedInBytesPerMillisecond());
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting,
"collector unittest");
tracer->Start(GarbageCollector::MARK_COMPACTOR,
GarbageCollectionReason::kTesting, "collector unittest");
// Switch to incremental MC.
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
// 1000000 bytes in 100ms.
tracer->AddIncrementalMarkingStep(100, 1000000);
EXPECT_EQ(400, tracer->incremental_marking_duration_);
EXPECT_EQ(4000000u, tracer->incremental_marking_bytes_);
tracer->Stop(MARK_COMPACTOR);
tracer->Stop(GarbageCollector::MARK_COMPACTOR);
EXPECT_EQ(400, tracer->current_.incremental_marking_duration);
EXPECT_EQ(4000000u, tracer->current_.incremental_marking_bytes);
EXPECT_EQ(0, tracer->incremental_marking_duration_);
......@@ -303,11 +303,11 @@ TEST_F(GCTracerTest, IncrementalMarkingSpeed) {
// Round 2.
tracer->AddIncrementalMarkingStep(2000, 1000);
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting,
"collector unittest");
tracer->Start(GarbageCollector::MARK_COMPACTOR,
GarbageCollectionReason::kTesting, "collector unittest");
// Switch to incremental MC.
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
tracer->Stop(MARK_COMPACTOR);
tracer->Stop(GarbageCollector::MARK_COMPACTOR);
EXPECT_DOUBLE_EQ((4000000.0 / 400 + 1000.0 / 2000) / 2,
static_cast<double>(
tracer->IncrementalMarkingSpeedInBytesPerMillisecond()));
......@@ -352,13 +352,13 @@ TEST_F(GCTracerTest, MutatorUtilization) {
TEST_F(GCTracerTest, BackgroundScavengerScope) {
GCTracer* tracer = i_isolate()->heap()->tracer();
tracer->ResetForTesting();
tracer->Start(SCAVENGER, GarbageCollectionReason::kTesting,
tracer->Start(GarbageCollector::SCAVENGER, GarbageCollectionReason::kTesting,
"collector unittest");
tracer->AddScopeSampleBackground(
GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL, 10);
tracer->AddScopeSampleBackground(
GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL, 1);
tracer->Stop(SCAVENGER);
tracer->Stop(GarbageCollector::SCAVENGER);
EXPECT_DOUBLE_EQ(
11, tracer->current_
.scopes[GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL]);
......@@ -367,8 +367,8 @@ TEST_F(GCTracerTest, BackgroundScavengerScope) {
TEST_F(GCTracerTest, BackgroundMinorMCScope) {
GCTracer* tracer = i_isolate()->heap()->tracer();
tracer->ResetForTesting();
tracer->Start(MINOR_MARK_COMPACTOR, GarbageCollectionReason::kTesting,
"collector unittest");
tracer->Start(GarbageCollector::MINOR_MARK_COMPACTOR,
GarbageCollectionReason::kTesting, "collector unittest");
tracer->AddScopeSampleBackground(GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING,
10);
tracer->AddScopeSampleBackground(GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING,
......@@ -381,7 +381,7 @@ TEST_F(GCTracerTest, BackgroundMinorMCScope) {
GCTracer::Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS, 30);
tracer->AddScopeSampleBackground(
GCTracer::Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS, 3);
tracer->Stop(MINOR_MARK_COMPACTOR);
tracer->Stop(GarbageCollector::MINOR_MARK_COMPACTOR);
EXPECT_DOUBLE_EQ(
11,
tracer->current_.scopes[GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING]);
......@@ -401,14 +401,14 @@ TEST_F(GCTracerTest, BackgroundMajorMCScope) {
200);
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_MARKING, 10);
// Scavenger should not affect the major mark-compact scopes.
tracer->Start(SCAVENGER, GarbageCollectionReason::kTesting,
tracer->Start(GarbageCollector::SCAVENGER, GarbageCollectionReason::kTesting,
"collector unittest");
tracer->Stop(SCAVENGER);
tracer->Stop(GarbageCollector::SCAVENGER);
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_SWEEPING, 20);
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_MARKING, 1);
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_SWEEPING, 2);
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting,
"collector unittest");
tracer->Start(GarbageCollector::MARK_COMPACTOR,
GarbageCollectionReason::kTesting, "collector unittest");
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_EVACUATE_COPY,
30);
tracer->AddScopeSampleBackground(GCTracer::Scope::MC_BACKGROUND_EVACUATE_COPY,
......@@ -417,7 +417,7 @@ TEST_F(GCTracerTest, BackgroundMajorMCScope) {
GCTracer::Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS, 40);
tracer->AddScopeSampleBackground(
GCTracer::Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS, 4);
tracer->Stop(MARK_COMPACTOR);
tracer->Stop(GarbageCollector::MARK_COMPACTOR);
EXPECT_DOUBLE_EQ(
111, tracer->current_.scopes[GCTracer::Scope::MC_BACKGROUND_MARKING]);
EXPECT_DOUBLE_EQ(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment