Commit 655866de authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Replace IncrementalMarking::IsRunning() with IsMarking()

IsRunning() and IsMarking() are now equivalent. So IsRunning() can be
removed in favor of IsMarking().

IsComplete() is also renamed to IsMarkingComplete().

Bug: v8:12775
Change-Id: Ife88be4d674af055590ba5178ec1e410f8fa89d9
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3849833Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82665}
parent d650d085
......@@ -457,7 +457,7 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
return GarbageCollector::MARK_COMPACTOR;
}
if (incremental_marking()->IsComplete() &&
if (incremental_marking()->IsMarkingComplete() &&
AllocationLimitOvershotByLargeMargin()) {
*reason = "Incremental marking needs finalization";
return GarbageCollector::MARK_COMPACTOR;
......@@ -2042,7 +2042,7 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
}
void Heap::StartIncrementalMarkingIfAllocationLimitIsReachedBackground() {
if (incremental_marking()->IsRunning() ||
if (incremental_marking()->IsMarking() ||
!incremental_marking()->CanBeStarted()) {
return;
}
......@@ -2232,7 +2232,7 @@ size_t Heap::PerformGarbageCollection(
// If incremental marking has been activated, the full GC cycle has already
// started, so don't start a new one.
if (!incremental_marking_->IsRunning()) {
if (!incremental_marking_->IsMarking()) {
tracer()->StartCycle(collector, gc_reason, collector_reason,
GCTracer::MarkingType::kAtomic);
}
......@@ -2240,7 +2240,7 @@ size_t Heap::PerformGarbageCollection(
tracer()->StartAtomicPause();
if (!Heap::IsYoungGenerationCollector(collector) &&
incremental_marking_->IsRunning()) {
incremental_marking_->IsMarking()) {
tracer()->UpdateCurrentEvent(gc_reason, collector_reason);
}
......@@ -3766,7 +3766,7 @@ size_t Heap::NewSpaceCapacity() {
void Heap::FinalizeIncrementalMarkingIfComplete(
GarbageCollectionReason gc_reason) {
if (incremental_marking()->IsComplete()) {
if (incremental_marking()->IsMarkingComplete()) {
CollectAllGarbage(current_gc_flags_, gc_reason, current_gc_callback_flags_);
}
}
......@@ -5450,7 +5450,7 @@ bool Heap::IsMainThreadParked(LocalHeap* local_heap) {
bool Heap::IsMarkingComplete(LocalHeap* local_heap) {
if (!local_heap || !local_heap->is_main_thread()) return false;
return incremental_marking()->IsComplete();
return incremental_marking()->IsMarkingComplete();
}
Heap::HeapGrowingMode Heap::CurrentHeapGrowingMode() {
......
......@@ -95,14 +95,14 @@ void IncrementalMarkingJob::Task::RunInternal() {
job_->is_task_pending_ = false;
}
if (incremental_marking->IsRunning()) {
if (incremental_marking->IsMarking()) {
// All objects are initialized at that point.
heap->new_space()->MarkLabStartInitialized();
heap->new_lo_space()->ResetPendingObject();
heap->incremental_marking()->AdvanceAndFinalizeIfComplete();
if (incremental_marking->IsRunning()) {
if (incremental_marking->IsMarking()) {
// TODO(v8:12775): It is quite suprising that we schedule the task
// immediately here. This was introduced since delayed task were
// unreliable at some point. Investigate whether this is still the case
......
......@@ -675,7 +675,7 @@ void IncrementalMarking::AdvanceForTesting(double max_step_size_in_ms) {
void IncrementalMarking::AdvanceOnAllocation() {
DCHECK_EQ(heap_->gc_state(), Heap::NOT_IN_GC);
DCHECK(FLAG_incremental_marking);
DCHECK(IsRunning());
DCHECK(IsMarking());
// Code using an AlwaysAllocateScope assumes that the GC state does not
// change; that implies that no marking steps must be performed.
......@@ -686,10 +686,7 @@ void IncrementalMarking::AdvanceOnAllocation() {
ScheduleBytesToMarkBasedOnAllocation();
Step(kMaxStepSizeInMs, StepOrigin::kV8);
if (IsComplete()) {
// TODO(v8:12775): Try to remove.
FastForwardSchedule();
if (IsMarkingComplete()) {
// Marking cannot be finalized here. Schedule a completion task instead.
if (!ShouldWaitForTask()) {
// When task isn't run soon enough, fall back to stack guard to force
......@@ -701,7 +698,7 @@ void IncrementalMarking::AdvanceOnAllocation() {
}
bool IncrementalMarking::ShouldFinalize() const {
DCHECK(IsRunning());
DCHECK(IsMarking());
return heap()
->mark_compact_collector()
......@@ -806,7 +803,7 @@ void IncrementalMarking::Step(double max_step_size_in_ms,
heap_->tracer()->CurrentEpoch(GCTracer::Scope::MC_INCREMENTAL));
TRACE_GC_EPOCH(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL,
ThreadKind::kMain);
DCHECK(IsRunning());
DCHECK(IsMarking());
double start = heap_->MonotonicallyIncreasingTimeInMs();
size_t bytes_to_process = 0;
......
......@@ -91,10 +91,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
void NotifyLeftTrimming(HeapObject from, HeapObject to);
bool IsStopped() const { return !IsRunning(); }
bool IsRunning() const { return is_marking_; }
bool IsMarking() const { return IsRunning(); }
bool IsComplete() const { return IsMarking() && ShouldFinalize(); }
bool IsStopped() const { return !IsMarking(); }
bool IsMarking() const { return is_marking_; }
bool IsMarkingComplete() const { return IsMarking() && ShouldFinalize(); }
bool CollectionRequested() const {
return collection_requested_via_stack_guard_;
......
......@@ -172,28 +172,29 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
CHECK(FLAG_incremental_marking);
i::IncrementalMarking* marking = heap->incremental_marking();
i::MarkCompactCollector* collector = heap->mark_compact_collector();
if (collector->sweeping_in_progress()) {
SafepointScope scope(heap);
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
CHECK(marking->IsMarking() || marking->IsStopped() || marking->IsComplete());
if (marking->IsStopped()) {
heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
i::GarbageCollectionReason::kTesting);
}
CHECK(marking->IsMarking() || marking->IsComplete());
CHECK(marking->IsMarking());
if (!force_completion) return;
SafepointScope scope(heap);
MarkingBarrier::PublishAll(heap);
marking->MarkRootsForTesting();
while (!marking->IsComplete()) {
while (!marking->IsMarkingComplete()) {
marking->AdvanceForTesting(kStepSizeInMs);
}
CHECK(marking->IsComplete());
CHECK(marking->IsMarkingComplete());
}
void SimulateFullSpace(v8::internal::PagedSpace* space) {
......
......@@ -2484,7 +2484,7 @@ TEST(InstanceOfStubWriteBarrier) {
while (!marking_state->IsBlack(f->code())) {
// Discard any pending GC requests otherwise we will get GC when we enter
// code below.
CHECK(!marking->IsComplete());
CHECK(!marking->IsMarkingComplete());
marking->AdvanceForTesting(kStepSizeInMs);
}
......@@ -2578,7 +2578,7 @@ TEST(IdleNotificationFinishMarking) {
CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count);
const double kStepSizeInMs = 100;
while (!marking->IsComplete()) {
while (!marking->IsMarkingComplete()) {
marking->AdvanceForTesting(kStepSizeInMs);
}
......@@ -3972,7 +3972,7 @@ TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
}
heap::SimulateIncrementalMarking(CcTest::heap());
CHECK(marking->IsComplete());
CHECK(marking->IsMarkingComplete());
}
......@@ -5773,7 +5773,7 @@ TEST(Regress598319) {
// Now we search for a state where we are in incremental marking and have
// only partially marked the large object.
const double kSmallStepSizeInMs = 0.1;
while (!marking->IsComplete()) {
while (!marking->IsMarkingComplete()) {
marking->AdvanceForTesting(kSmallStepSizeInMs);
ProgressBar& progress_bar = page->ProgressBar();
if (progress_bar.IsEnabled() && progress_bar.Value() > 0) {
......@@ -5795,10 +5795,10 @@ TEST(Regress598319) {
// Finish marking with bigger steps to speed up test.
const double kLargeStepSizeInMs = 1000;
while (!marking->IsComplete()) {
while (!marking->IsMarkingComplete()) {
marking->AdvanceForTesting(kLargeStepSizeInMs);
}
CHECK(marking->IsComplete());
CHECK(marking->IsMarkingComplete());
// All objects need to be black after marking. If a white object crossed the
// progress bar, we would fail here.
......@@ -5883,10 +5883,10 @@ TEST(Regress615489) {
isolate->factory()->NewFixedArray(500, AllocationType::kOld)->Size();
}
const double kStepSizeInMs = 100;
while (!marking->IsComplete()) {
while (!marking->IsMarkingComplete()) {
marking->AdvanceForTesting(kStepSizeInMs);
}
CHECK(marking->IsComplete());
CHECK(marking->IsMarkingComplete());
intptr_t size_before = heap->SizeOfObjects();
CcTest::CollectAllGarbage();
intptr_t size_after = heap->SizeOfObjects();
......@@ -5941,7 +5941,7 @@ TEST(Regress631969) {
// Finish incremental marking.
const double kStepSizeInMs = 100;
IncrementalMarking* marking = heap->incremental_marking();
while (!marking->IsComplete()) {
while (!marking->IsMarkingComplete()) {
marking->AdvanceForTesting(kStepSizeInMs);
}
......
......@@ -17,23 +17,24 @@ void HeapInternalsBase::SimulateIncrementalMarking(Heap* heap,
CHECK(FLAG_incremental_marking);
i::IncrementalMarking* marking = heap->incremental_marking();
i::MarkCompactCollector* collector = heap->mark_compact_collector();
if (collector->sweeping_in_progress()) {
SafepointScope scope(heap);
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
CHECK(marking->IsMarking() || marking->IsStopped() || marking->IsComplete());
if (marking->IsStopped()) {
heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
i::GarbageCollectionReason::kTesting);
}
CHECK(marking->IsMarking() || marking->IsComplete());
CHECK(marking->IsMarking());
if (!force_completion) return;
while (!marking->IsComplete()) {
while (!marking->IsMarkingComplete()) {
marking->AdvanceForTesting(kStepSizeInMs);
}
CHECK(marking->IsComplete());
CHECK(marking->IsMarkingComplete());
}
void HeapInternalsBase::SimulateFullSpace(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment