Commit 89f8435e authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

[heap] Simplify finalization of incremental marking

IM::Finalize() was merely finishing marking through the incremental
marking in the atomic pause. Avoid the Hurry() call since the marking
worklists would anyways be drained with parallel marking.

Bug: v8:12775
Change-Id: Ice72a8bb5f900368eadec7f62bf18e03d568454b
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3574547Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79831}
parent e36e6a88
......@@ -574,31 +574,9 @@ StepResult IncrementalMarking::EmbedderStep(double expected_duration_ms,
: StepResult::kMoreWorkRemaining;
}
void IncrementalMarking::Hurry() {
if (!local_marking_worklists()->IsEmpty()) {
double start = 0.0;
if (FLAG_trace_incremental_marking) {
start = heap_->MonotonicallyIncreasingTimeInMs();
if (FLAG_trace_incremental_marking) {
heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Hurry\n");
}
}
collector_->ProcessMarkingWorklist(0);
SetState(COMPLETE);
if (FLAG_trace_incremental_marking) {
double end = heap_->MonotonicallyIncreasingTimeInMs();
double delta = end - start;
if (FLAG_trace_incremental_marking) {
heap()->isolate()->PrintWithTimestamp(
"[IncrementalMarking] Complete (hurry), spent %d ms.\n",
static_cast<int>(delta));
}
}
}
}
bool IncrementalMarking::Stop() {
if (IsStopped()) return false;
void IncrementalMarking::Stop() {
if (IsStopped()) return;
if (FLAG_trace_incremental_marking) {
int old_generation_size_mb =
static_cast<int>(heap()->OldGenerationSizeOfObjects() / MB);
......@@ -626,21 +604,16 @@ void IncrementalMarking::Stop() {
FinishBlackAllocation();
// Merge live bytes counters of background threads
for (auto pair : background_live_bytes_) {
for (const auto& pair : background_live_bytes_) {
MemoryChunk* memory_chunk = pair.first;
intptr_t live_bytes = pair.second;
if (live_bytes) {
marking_state()->IncrementLiveBytes(memory_chunk, live_bytes);
}
}
background_live_bytes_.clear();
}
void IncrementalMarking::Finalize() {
Hurry();
Stop();
return true;
}
void IncrementalMarking::FinalizeMarking(CompletionAction action) {
......
......@@ -135,18 +135,14 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
bool WasActivated();
void Start(GarbageCollectionReason gc_reason);
// Returns true if incremental marking was running and false otherwise.
bool Stop();
void FinalizeIncrementally();
void UpdateMarkingWorklistAfterYoungGenGC();
void UpdateMarkedBytesAfterScavenge(size_t dead_bytes_in_new_space);
void Hurry();
void Finalize();
void Stop();
void FinalizeMarking(CompletionAction action);
void MarkingComplete(CompletionAction action);
......
......@@ -966,8 +966,6 @@ void MarkCompactCollector::AbortCompaction() {
}
void MarkCompactCollector::Prepare() {
was_marked_incrementally_ = heap()->incremental_marking()->IsMarking();
#ifdef DEBUG
DCHECK(state_ == IDLE);
state_ = PREPARE_GC;
......@@ -975,7 +973,7 @@ void MarkCompactCollector::Prepare() {
DCHECK(!sweeping_in_progress());
if (!was_marked_incrementally_) {
if (!heap()->incremental_marking()->IsMarking()) {
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_EMBEDDER_PROLOGUE);
auto embedder_flags = heap_->flags_for_embedder_tracer();
......@@ -2358,14 +2356,12 @@ void MarkCompactCollector::MarkLiveObjects() {
// with the C stack limit check.
PostponeInterruptsScope postpone(isolate());
bool was_marked_incrementally = false;
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_FINISH_INCREMENTAL);
IncrementalMarking* incremental_marking = heap_->incremental_marking();
if (was_marked_incrementally_) {
incremental_marking->Finalize();
if (heap_->incremental_marking()->Stop()) {
MarkingBarrier::PublishAll(heap());
} else {
CHECK(incremental_marking->IsStopped());
was_marked_incrementally = true;
}
}
......@@ -2475,7 +2471,11 @@ void MarkCompactCollector::MarkLiveObjects() {
&IsUnmarkedHeapObject);
}
}
if (was_marked_incrementally_) {
if (was_marked_incrementally) {
// Disable the marking barrier after concurrent/parallel marking has
// finished as it will reset page flags that share the same bitmap as
// the evacuation candidate bit.
MarkingBarrier::DeactivateAll(heap());
GlobalHandles::DisableMarkingBarrier(heap()->isolate());
}
......
......@@ -785,7 +785,6 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
const bool is_shared_heap_;
bool was_marked_incrementally_ = false;
bool evacuation_ = false;
// True if we are collecting slots to perform evacuation from evacuation
// candidates.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment