Commit fd7c100a authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

[heap] Avoid re-scanning roots during incremental finalization step

Incremental finalization previously used to rescan roots to avoid any
new work showing up in the atomic pause.

With concurrent marking we should be way faster in finalizing, so that
we can save ourselves this work. In particular, if we finalize in the
same JS execution we would be doubling work as the atomic pause
anyways needs to rescan all roots.

Bug: v8:12775
Change-Id: I58a5a931da72c8d5c8aee4cd5dad4512954668b4
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3570427Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79784}
parent d73ed7bb
......@@ -2437,6 +2437,7 @@ class Heap {
friend class ScavengeTaskObserver;
friend class IgnoreLocalGCRequests;
friend class IncrementalMarking;
friend class IncrementalMarkingRootMarkingVisitor;
friend class IncrementalMarkingJob;
friend class LargeObjectSpace;
friend class LocalHeap;
......
......@@ -41,14 +41,6 @@ bool IncrementalMarking::WhiteToGreyAndPush(HeapObject obj) {
return false;
}
void IncrementalMarking::MarkRootObject(Root root, HeapObject obj) {
if (heap_->incremental_marking()->WhiteToGreyAndPush(obj)) {
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainingRoot(root, obj);
}
}
}
void IncrementalMarking::RestartIfNotMarking() {
if (state_ == COMPLETE) {
state_ = MARKING;
......
......@@ -103,41 +103,6 @@ void IncrementalMarking::NotifyLeftTrimming(HeapObject from, HeapObject to) {
DCHECK(marking_state()->IsBlack(to));
}
class IncrementalMarkingRootMarkingVisitor : public RootVisitor {
public:
explicit IncrementalMarkingRootMarkingVisitor(
IncrementalMarking* incremental_marking)
: heap_(incremental_marking->heap()) {}
void VisitRootPointer(Root root, const char* description,
FullObjectSlot p) override {
DCHECK(!MapWord::IsPacked((*p).ptr()));
MarkObjectByPointer(root, p);
}
void VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) override {
for (FullObjectSlot p = start; p < end; ++p) {
DCHECK(!MapWord::IsPacked((*p).ptr()));
MarkObjectByPointer(root, p);
}
}
private:
void MarkObjectByPointer(Root root, FullObjectSlot p) {
Object object = *p;
if (!object.IsHeapObject()) return;
DCHECK(!MapWord::IsPacked(object.ptr()));
HeapObject heap_object = HeapObject::cast(object);
BasicMemoryChunk* target_page =
BasicMemoryChunk::FromHeapObject(heap_object);
if (target_page->InSharedHeap()) return;
heap_->incremental_marking()->MarkRootObject(root, heap_object);
}
Heap* heap_;
};
bool IncrementalMarking::WasActivated() { return was_activated_; }
bool IncrementalMarking::CanBeActivated() {
......@@ -214,6 +179,58 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
incremental_marking_job()->Start(heap_);
}
class IncrementalMarkingRootMarkingVisitor final : public RootVisitor {
public:
explicit IncrementalMarkingRootMarkingVisitor(Heap* heap)
: heap_(heap), incremental_marking_(heap->incremental_marking()) {}
void VisitRootPointer(Root root, const char* description,
FullObjectSlot p) override {
DCHECK(!MapWord::IsPacked((*p).ptr()));
MarkObjectByPointer(root, p);
}
void VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) override {
for (FullObjectSlot p = start; p < end; ++p) {
DCHECK(!MapWord::IsPacked((*p).ptr()));
MarkObjectByPointer(root, p);
}
}
private:
void MarkObjectByPointer(Root root, FullObjectSlot p) {
Object object = *p;
if (!object.IsHeapObject()) return;
DCHECK(!MapWord::IsPacked(object.ptr()));
HeapObject heap_object = HeapObject::cast(object);
if (heap_object.InSharedHeap()) return;
if (incremental_marking_->WhiteToGreyAndPush(heap_object)) {
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainingRoot(root, heap_object);
}
}
}
Heap* const heap_;
IncrementalMarking* const incremental_marking_;
};
namespace {
void MarkRoots(Heap* heap) {
IncrementalMarkingRootMarkingVisitor visitor(heap);
heap->IterateRoots(
&visitor,
base::EnumSet<SkipRoot>{SkipRoot::kStack, SkipRoot::kMainThreadHandles,
SkipRoot::kWeak});
}
} // namespace
void IncrementalMarking::MarkRootsForTesting() { MarkRoots(heap_); }
void IncrementalMarking::StartMarking() {
if (heap_->isolate()->serializer_enabled()) {
......@@ -256,7 +273,7 @@ void IncrementalMarking::StartMarking() {
StartBlackAllocation();
MarkRoots();
MarkRoots(heap_);
if (FLAG_concurrent_marking && !heap_->IsTearingDown()) {
heap_->concurrent_marking()->ScheduleJob();
......@@ -332,17 +349,6 @@ void IncrementalMarking::EnsureBlackAllocated(Address allocated, size_t size) {
}
}
void IncrementalMarking::MarkRoots() {
DCHECK(!finalize_marking_completed_);
DCHECK(IsMarking());
IncrementalMarkingRootMarkingVisitor visitor(this);
heap_->IterateRoots(
&visitor,
base::EnumSet<SkipRoot>{SkipRoot::kStack, SkipRoot::kMainThreadHandles,
SkipRoot::kWeak});
}
bool IncrementalMarking::ShouldRetainMap(Map map, int age) {
if (age == 0) {
// The map has aged. Do not retain this map.
......@@ -413,12 +419,6 @@ void IncrementalMarking::FinalizeIncrementally() {
double start = heap_->MonotonicallyIncreasingTimeInMs();
// After finishing incremental marking, we try to discover all unmarked
// objects to reduce the marking load in the final pause.
// 1) We scan and mark the roots again to find all changes to the root set.
// 2) Age and retain maps embedded in optimized code.
MarkRoots();
// Map retaining is needed for performance, not correctness,
// so we can do it only once at the beginning of the finalization.
RetainMaps();
......
......@@ -176,9 +176,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
// from white to grey.
V8_INLINE bool WhiteToGreyAndPush(HeapObject obj);
// Marks object referenced from roots.
V8_INLINE void MarkRootObject(Root root, HeapObject obj);
// This function is used to color the object black before it undergoes an
// unsafe layout change. This is a part of synchronization protocol with
// the concurrent marker.
......@@ -221,6 +218,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
background_live_bytes_[chunk] += by;
}
void MarkRootsForTesting();
private:
class Observer : public AllocationObserver {
public:
......@@ -240,7 +239,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
void PauseBlackAllocation();
void FinishBlackAllocation();
void MarkRoots();
bool ShouldRetainMap(Map map, int age);
// Retain dying maps for <FLAG_retain_maps_for_n_gc> garbage collections to
// increase chances of reusing of map transition tree in future.
......
......@@ -189,6 +189,7 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
i::StepOrigin::kV8);
if (marking->IsReadyToOverApproximateWeakClosure()) {
SafepointScope scope(heap);
marking->MarkRootsForTesting();
marking->FinalizeIncrementally();
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment