Commit 08b3bed2 authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

[heap] Introduce PromoteYoungGenerationGC

This CL separates logic for promoting all of new space during fast
promotion out of the heap and into a new dedicated
PromoteYoungGenerationGC class.

It currently assumes SemiSpaceNewSpace and will need to be extended with
support for PagedNewSpace.

Bug: v8:12612
Change-Id: I0e65c034b444634a31b3c00df0a4b558612f023f
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3644610Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80492}
parent 8278cb50
...@@ -1502,6 +1502,8 @@ filegroup( ...@@ -1502,6 +1502,8 @@ filegroup(
"src/heap/parallel-work-item.h", "src/heap/parallel-work-item.h",
"src/heap/parked-scope.h", "src/heap/parked-scope.h",
"src/heap/progress-bar.h", "src/heap/progress-bar.h",
"src/heap/promote-young-generation.cc",
"src/heap/promote-young-generation.h",
"src/heap/read-only-heap-inl.h", "src/heap/read-only-heap-inl.h",
"src/heap/read-only-heap.cc", "src/heap/read-only-heap.cc",
"src/heap/read-only-heap.h", "src/heap/read-only-heap.h",
......
...@@ -3069,6 +3069,7 @@ v8_header_set("v8_internal_headers") { ...@@ -3069,6 +3069,7 @@ v8_header_set("v8_internal_headers") {
"src/heap/parallel-work-item.h", "src/heap/parallel-work-item.h",
"src/heap/parked-scope.h", "src/heap/parked-scope.h",
"src/heap/progress-bar.h", "src/heap/progress-bar.h",
"src/heap/promote-young-generation.h",
"src/heap/read-only-heap-inl.h", "src/heap/read-only-heap-inl.h",
"src/heap/read-only-heap.h", "src/heap/read-only-heap.h",
"src/heap/read-only-spaces.h", "src/heap/read-only-spaces.h",
...@@ -4329,6 +4330,7 @@ v8_source_set("v8_base_without_compiler") { ...@@ -4329,6 +4330,7 @@ v8_source_set("v8_base_without_compiler") {
"src/heap/object-stats.cc", "src/heap/object-stats.cc",
"src/heap/objects-visiting.cc", "src/heap/objects-visiting.cc",
"src/heap/paged-spaces.cc", "src/heap/paged-spaces.cc",
"src/heap/promote-young-generation.cc",
"src/heap/read-only-heap.cc", "src/heap/read-only-heap.cc",
"src/heap/read-only-spaces.cc", "src/heap/read-only-spaces.cc",
"src/heap/safepoint.cc", "src/heap/safepoint.cc",
......
...@@ -70,6 +70,7 @@ ...@@ -70,6 +70,7 @@
#include "src/heap/objects-visiting.h" #include "src/heap/objects-visiting.h"
#include "src/heap/paged-spaces-inl.h" #include "src/heap/paged-spaces-inl.h"
#include "src/heap/parked-scope.h" #include "src/heap/parked-scope.h"
#include "src/heap/promote-young-generation.h"
#include "src/heap/read-only-heap.h" #include "src/heap/read-only-heap.h"
#include "src/heap/remembered-set.h" #include "src/heap/remembered-set.h"
#include "src/heap/safepoint.h" #include "src/heap/safepoint.h"
...@@ -2298,16 +2299,15 @@ size_t Heap::PerformGarbageCollection( ...@@ -2298,16 +2299,15 @@ size_t Heap::PerformGarbageCollection(
size_t start_young_generation_size = size_t start_young_generation_size =
NewSpaceSize() + (new_lo_space() ? new_lo_space()->SizeOfObjects() : 0); NewSpaceSize() + (new_lo_space() ? new_lo_space()->SizeOfObjects() : 0);
switch (collector) { if (collector == GarbageCollector::MARK_COMPACTOR) {
case GarbageCollector::MARK_COMPACTOR: MarkCompact();
MarkCompact(); } else if (fast_promotion_mode_ && CanPromoteYoungAndExpandOldGeneration(0)) {
break; PromoteYoungGeneration();
case GarbageCollector::MINOR_MARK_COMPACTOR: } else if (collector == GarbageCollector::MINOR_MARK_COMPACTOR) {
MinorMarkCompact(); MinorMarkCompact();
break; } else {
case GarbageCollector::SCAVENGER: DCHECK_EQ(GarbageCollector::SCAVENGER, collector);
Scavenge(); Scavenge();
break;
} }
ProcessPretenuringFeedback(); ProcessPretenuringFeedback();
...@@ -2683,58 +2683,10 @@ void Heap::CheckNewSpaceExpansionCriteria() { ...@@ -2683,58 +2683,10 @@ void Heap::CheckNewSpaceExpansionCriteria() {
new_lo_space()->SetCapacity(new_space()->Capacity()); new_lo_space()->SetCapacity(new_space()->Capacity());
} }
void Heap::EvacuateYoungGeneration() { void Heap::PromoteYoungGeneration() {
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_FAST_PROMOTE); tracer()->NotifyYoungGenerationHandling(
base::MutexGuard guard(relocation_mutex()); YoungGenerationHandling::kFastPromotionDuringScavenge);
// Young generation garbage collection is orthogonal from full GC marking. It promote_young_generation_gc_->EvacuateYoungGeneration();
// is possible that objects that are currently being processed for marking are
// reclaimed in the young generation GC that interleaves concurrent marking.
// Pause concurrent markers to allow processing them using
// `UpdateMarkingWorklistAfterYoungGenGC()`.
ConcurrentMarking::PauseScope pause_js_marking(concurrent_marking());
CppHeap::PauseConcurrentMarkingScope pause_cpp_marking(
CppHeap::From(cpp_heap_));
if (!FLAG_concurrent_marking) {
DCHECK(fast_promotion_mode_);
DCHECK(CanPromoteYoungAndExpandOldGeneration(0));
}
SemiSpaceNewSpace* semi_space_new_space =
SemiSpaceNewSpace::From(new_space());
// Move pages from new->old generation.
PageRange range(semi_space_new_space->first_allocatable_address(),
semi_space_new_space->top());
for (auto it = range.begin(); it != range.end();) {
Page* p = (*++it)->prev_page();
semi_space_new_space->from_space().RemovePage(p);
Page::ConvertNewToOld(p);
if (incremental_marking()->IsMarking())
mark_compact_collector()->RecordLiveSlotsOnPage(p);
}
// Reset new space.
if (!semi_space_new_space->Rebalance()) {
FatalProcessOutOfMemory("NewSpace::Rebalance");
}
semi_space_new_space->ResetLinearAllocationArea();
semi_space_new_space->set_age_mark(semi_space_new_space->top());
for (auto it = new_lo_space()->begin(); it != new_lo_space()->end();) {
LargePage* page = *it;
// Increment has to happen after we save the page, because it is going to
// be removed below.
it++;
lo_space()->PromoteNewLargeObject(page);
}
// Fix up special trackers.
external_string_table_.PromoteYoung();
// GlobalHandles are updated in PostGarbageCollectonProcessing
size_t promoted = new_space()->Size() + new_lo_space()->Size();
IncrementYoungSurvivorsCounter(promoted);
IncrementPromotedObjectsSize(promoted);
IncrementSemiSpaceCopiedObjectSize(0);
} }
void Heap::Scavenge() { void Heap::Scavenge() {
...@@ -2746,12 +2698,6 @@ void Heap::Scavenge() { ...@@ -2746,12 +2698,6 @@ void Heap::Scavenge() {
"[IncrementalMarking] Scavenge during marking.\n"); "[IncrementalMarking] Scavenge during marking.\n");
} }
if (fast_promotion_mode_ && CanPromoteYoungAndExpandOldGeneration(0)) {
tracer()->NotifyYoungGenerationHandling(
YoungGenerationHandling::kFastPromotionDuringScavenge);
EvacuateYoungGeneration();
return;
}
tracer()->NotifyYoungGenerationHandling( tracer()->NotifyYoungGenerationHandling(
YoungGenerationHandling::kRegularScavenge); YoungGenerationHandling::kRegularScavenge);
...@@ -5762,6 +5708,7 @@ void Heap::SetUp(LocalHeap* main_thread_local_heap) { ...@@ -5762,6 +5708,7 @@ void Heap::SetUp(LocalHeap* main_thread_local_heap) {
mark_compact_collector_.reset(new MarkCompactCollector(this)); mark_compact_collector_.reset(new MarkCompactCollector(this));
scavenger_collector_.reset(new ScavengerCollector(this)); scavenger_collector_.reset(new ScavengerCollector(this));
promote_young_generation_gc_.reset(new PromoteYoungGenerationGC(this));
minor_mark_compact_collector_.reset(new MinorMarkCompactCollector(this)); minor_mark_compact_collector_.reset(new MinorMarkCompactCollector(this));
incremental_marking_.reset( incremental_marking_.reset(
...@@ -6168,6 +6115,7 @@ void Heap::TearDown() { ...@@ -6168,6 +6115,7 @@ void Heap::TearDown() {
} }
scavenger_collector_.reset(); scavenger_collector_.reset();
promote_young_generation_gc_.reset();
array_buffer_sweeper_.reset(); array_buffer_sweeper_.reset();
incremental_marking_.reset(); incremental_marking_.reset();
concurrent_marking_.reset(); concurrent_marking_.reset();
......
...@@ -109,6 +109,7 @@ class ObjectIterator; ...@@ -109,6 +109,7 @@ class ObjectIterator;
class ObjectStats; class ObjectStats;
class Page; class Page;
class PagedSpace; class PagedSpace;
class PromoteYoungGenerationGC;
class ReadOnlyHeap; class ReadOnlyHeap;
class RootVisitor; class RootVisitor;
class RwxMemoryWriteScope; class RwxMemoryWriteScope;
...@@ -1941,7 +1942,7 @@ class Heap { ...@@ -1941,7 +1942,7 @@ class Heap {
// Performs a minor collection in new generation. // Performs a minor collection in new generation.
void Scavenge(); void Scavenge();
void EvacuateYoungGeneration(); void PromoteYoungGeneration();
void UpdateYoungReferencesInExternalStringTable( void UpdateYoungReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func); ExternalStringTableUpdaterCallback updater_func);
...@@ -2324,6 +2325,7 @@ class Heap { ...@@ -2324,6 +2325,7 @@ class Heap {
std::unique_ptr<MarkCompactCollector> mark_compact_collector_; std::unique_ptr<MarkCompactCollector> mark_compact_collector_;
std::unique_ptr<MinorMarkCompactCollector> minor_mark_compact_collector_; std::unique_ptr<MinorMarkCompactCollector> minor_mark_compact_collector_;
std::unique_ptr<ScavengerCollector> scavenger_collector_; std::unique_ptr<ScavengerCollector> scavenger_collector_;
std::unique_ptr<PromoteYoungGenerationGC> promote_young_generation_gc_;
std::unique_ptr<ArrayBufferSweeper> array_buffer_sweeper_; std::unique_ptr<ArrayBufferSweeper> array_buffer_sweeper_;
std::unique_ptr<MemoryAllocator> memory_allocator_; std::unique_ptr<MemoryAllocator> memory_allocator_;
...@@ -2487,6 +2489,7 @@ class Heap { ...@@ -2487,6 +2489,7 @@ class Heap {
friend class ObjectStatsCollector; friend class ObjectStatsCollector;
friend class Page; friend class Page;
friend class PagedSpace; friend class PagedSpace;
friend class PromoteYoungGenerationGC;
friend class ReadOnlyRoots; friend class ReadOnlyRoots;
friend class Scavenger; friend class Scavenger;
friend class ScavengerCollector; friend class ScavengerCollector;
......
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/promote-young-generation.h"
#include "src/heap/concurrent-marking.h"
#include "src/heap/cppgc-js/cpp-heap.h"
#include "src/heap/gc-tracer-inl.h"
#include "src/heap/heap.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/large-spaces.h"
#include "src/heap/mark-compact.h"
#include "src/heap/new-spaces.h"
#include "src/heap/paged-spaces-inl.h"
namespace v8 {
namespace internal {
void PromoteYoungGenerationGC::EvacuateYoungGeneration() {
TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_FAST_PROMOTE);
base::MutexGuard guard(heap_->relocation_mutex());
// Young generation garbage collection is orthogonal from full GC marking. It
// is possible that objects that are currently being processed for marking are
// reclaimed in the young generation GC that interleaves concurrent marking.
// Pause concurrent markers to allow processing them using
// `UpdateMarkingWorklistAfterYoungGenGC()`.
ConcurrentMarking::PauseScope pause_js_marking(heap_->concurrent_marking());
CppHeap::PauseConcurrentMarkingScope pause_cpp_marking(
CppHeap::From(heap_->cpp_heap()));
if (!FLAG_concurrent_marking) {
DCHECK(heap_->fast_promotion_mode_);
DCHECK(heap_->CanPromoteYoungAndExpandOldGeneration(0));
}
SemiSpaceNewSpace* semi_space_new_space =
SemiSpaceNewSpace::From(heap_->new_space());
// Move pages from new->old generation.
PageRange range(semi_space_new_space->first_allocatable_address(),
semi_space_new_space->top());
for (auto it = range.begin(); it != range.end();) {
Page* p = (*++it)->prev_page();
semi_space_new_space->from_space().RemovePage(p);
Page::ConvertNewToOld(p);
if (heap_->incremental_marking()->IsMarking())
heap_->mark_compact_collector()->RecordLiveSlotsOnPage(p);
}
// Reset new space.
if (!semi_space_new_space->Rebalance()) {
V8::FatalProcessOutOfMemory(heap_->isolate(), "NewSpace::Rebalance", true);
}
semi_space_new_space->ResetLinearAllocationArea();
semi_space_new_space->set_age_mark(semi_space_new_space->top());
for (auto it = heap_->new_lo_space()->begin();
it != heap_->new_lo_space()->end();) {
LargePage* page = *it;
// Increment has to happen after we save the page, because it is going to
// be removed below.
it++;
heap_->lo_space()->PromoteNewLargeObject(page);
}
// Fix up special trackers.
heap_->external_string_table_.PromoteYoung();
// GlobalHandles are updated in PostGarbageCollectonProcessing
size_t promoted = heap_->new_space()->Size() + heap_->new_lo_space()->Size();
heap_->IncrementYoungSurvivorsCounter(promoted);
heap_->IncrementPromotedObjectsSize(promoted);
heap_->IncrementSemiSpaceCopiedObjectSize(0);
}
} // namespace internal
} // namespace v8
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_PROMOTE_YOUNG_GENERATION_H_
#define V8_HEAP_PROMOTE_YOUNG_GENERATION_H_
namespace v8 {
namespace internal {
class Heap;
/**
* `PromoteYoungGenerationGC` is a special GC mode used in fast promotion mode
* to quickly promote all objects in new space to old space, thus evacuating all
* of new space and leaving it empty.
*/
class PromoteYoungGenerationGC {
public:
explicit PromoteYoungGenerationGC(Heap* heap) : heap_(heap) {}
void EvacuateYoungGeneration();
private:
Heap* const heap_;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_PROMOTE_YOUNG_GENERATION_H_
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment