Commit 68b3b65e authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

[heap] Eliminate fast promotion mode

Bug: v8:12612
Change-Id: Iaf967da524413b6701aa93fa471d79e2a82e43e2
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3805064Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82147}
parent 9861ce1d
......@@ -1524,8 +1524,6 @@ filegroup(
"src/heap/parallel-work-item.h",
"src/heap/parked-scope.h",
"src/heap/progress-bar.h",
"src/heap/promote-young-generation.cc",
"src/heap/promote-young-generation.h",
"src/heap/read-only-heap-inl.h",
"src/heap/read-only-heap.cc",
"src/heap/read-only-heap.h",
......
......@@ -3159,7 +3159,6 @@ v8_header_set("v8_internal_headers") {
"src/heap/parallel-work-item.h",
"src/heap/parked-scope.h",
"src/heap/progress-bar.h",
"src/heap/promote-young-generation.h",
"src/heap/read-only-heap-inl.h",
"src/heap/read-only-heap.h",
"src/heap/read-only-spaces.h",
......@@ -4523,7 +4522,6 @@ v8_source_set("v8_base_without_compiler") {
"src/heap/object-stats.cc",
"src/heap/objects-visiting.cc",
"src/heap/paged-spaces.cc",
"src/heap/promote-young-generation.cc",
"src/heap/read-only-heap.cc",
"src/heap/read-only-spaces.cc",
"src/heap/safepoint.cc",
......
......@@ -1464,8 +1464,6 @@ DEFINE_BOOL(randomize_all_allocations, false,
DEFINE_BOOL(manual_evacuation_candidates_selection, false,
"Test mode only flag. It allows an unit test to select evacuation "
"candidates pages (requires --stress_compaction).")
DEFINE_BOOL(fast_promotion_new_space, false,
"fast promote new space on high survival rates")
DEFINE_BOOL(clear_free_memory, false, "initialize free memory with 0")
......
......@@ -71,7 +71,6 @@
#include "src/heap/objects-visiting.h"
#include "src/heap/paged-spaces-inl.h"
#include "src/heap/parked-scope.h"
#include "src/heap/promote-young-generation.h"
#include "src/heap/read-only-heap.h"
#include "src/heap/remembered-set.h"
#include "src/heap/safepoint.h"
......@@ -2294,8 +2293,6 @@ size_t Heap::PerformGarbageCollection(
if (collector == GarbageCollector::MARK_COMPACTOR) {
MarkCompact();
} else if (fast_promotion_mode_ && CanPromoteYoungAndExpandOldGeneration(0)) {
PromoteYoungGeneration();
} else if (collector == GarbageCollector::MINOR_MARK_COMPACTOR) {
MinorMarkCompact();
} else {
......@@ -2315,10 +2312,6 @@ size_t Heap::PerformGarbageCollection(
start_young_generation_size - SurvivedYoungObjectSize());
}
if (!fast_promotion_mode_ || collector == GarbageCollector::MARK_COMPACTOR) {
ComputeFastPromotionMode();
}
isolate_->counters()->objs_since_last_young()->Set(0);
isolate_->eternal_handles()->PostGarbageCollectionProcessing();
......@@ -2674,12 +2667,6 @@ void Heap::CheckNewSpaceExpansionCriteria() {
new_lo_space()->SetCapacity(new_space()->Capacity());
}
void Heap::PromoteYoungGeneration() {
tracer()->NotifyYoungGenerationHandling(
YoungGenerationHandling::kFastPromotionDuringScavenge);
promote_young_generation_gc_->EvacuateYoungGeneration();
}
void Heap::Scavenge() {
DCHECK_NOT_NULL(new_space());
DCHECK_IMPLIES(FLAG_separate_gc_phases, !incremental_marking()->IsMarking());
......@@ -2734,23 +2721,6 @@ void Heap::Scavenge() {
SetGCState(NOT_IN_GC);
}
void Heap::ComputeFastPromotionMode() {
if (!new_space_) return;
const size_t survived_in_new_space =
survived_last_scavenge_ * 100 / NewSpaceCapacity();
fast_promotion_mode_ =
!FLAG_optimize_for_size && FLAG_fast_promotion_new_space &&
!ShouldReduceMemory() && new_space_->IsAtMaximumCapacity() &&
survived_in_new_space >= kMinPromotedPercentForFastPromotionMode;
if (FLAG_trace_gc_verbose && !FLAG_trace_gc_ignore_scavenger) {
PrintIsolate(isolate(), "Fast promotion mode: %s survival rate: %zu%%\n",
fast_promotion_mode_ ? "true" : "false",
survived_in_new_space);
}
}
void Heap::UnprotectAndRegisterMemoryChunk(MemoryChunk* chunk,
UnprotectMemoryOrigin origin) {
if (!write_protect_code_memory()) return;
......@@ -5716,7 +5686,6 @@ void Heap::SetUp(LocalHeap* main_thread_local_heap) {
mark_compact_collector_.reset(new MarkCompactCollector(this));
scavenger_collector_.reset(new ScavengerCollector(this));
promote_young_generation_gc_.reset(new PromoteYoungGenerationGC(this));
minor_mark_compact_collector_.reset(new MinorMarkCompactCollector(this));
incremental_marking_.reset(
......@@ -6131,7 +6100,6 @@ void Heap::TearDown() {
}
scavenger_collector_.reset();
promote_young_generation_gc_.reset();
array_buffer_sweeper_.reset();
incremental_marking_.reset();
concurrent_marking_.reset();
......
......@@ -110,7 +110,6 @@ class ObjectIterator;
class ObjectStats;
class Page;
class PagedSpace;
class PromoteYoungGenerationGC;
class ReadOnlyHeap;
class RootVisitor;
class RwxMemoryWriteScope;
......@@ -1907,8 +1906,6 @@ class Heap {
bool InvokeNearHeapLimitCallback();
void ComputeFastPromotionMode();
void InvokeIncrementalMarkingPrologueCallbacks();
void InvokeIncrementalMarkingEpilogueCallbacks();
......@@ -1947,7 +1944,6 @@ class Heap {
// Performs a minor collection in new generation.
void Scavenge();
void PromoteYoungGeneration();
void UpdateYoungReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func);
......@@ -2320,7 +2316,6 @@ class Heap {
std::unique_ptr<MarkCompactCollector> mark_compact_collector_;
std::unique_ptr<MinorMarkCompactCollector> minor_mark_compact_collector_;
std::unique_ptr<ScavengerCollector> scavenger_collector_;
std::unique_ptr<PromoteYoungGenerationGC> promote_young_generation_gc_;
std::unique_ptr<ArrayBufferSweeper> array_buffer_sweeper_;
std::unique_ptr<MemoryAllocator> memory_allocator_;
......@@ -2425,8 +2420,6 @@ class Heap {
int max_regular_code_object_size_ = 0;
bool fast_promotion_mode_ = false;
// Used for testing purposes.
bool force_oom_ = false;
bool force_gc_on_next_allocation_ = false;
......@@ -2480,7 +2473,6 @@ class Heap {
friend class ObjectStatsCollector;
friend class Page;
friend class PagedSpaceBase;
friend class PromoteYoungGenerationGC;
friend class ReadOnlyRoots;
friend class Scavenger;
friend class ScavengerCollector;
......
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/promote-young-generation.h"
#include "src/heap/concurrent-marking.h"
#include "src/heap/cppgc-js/cpp-heap.h"
#include "src/heap/gc-tracer-inl.h"
#include "src/heap/heap.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/large-spaces.h"
#include "src/heap/mark-compact.h"
#include "src/heap/new-spaces.h"
#include "src/heap/paged-spaces-inl.h"
namespace v8 {
namespace internal {
void PromoteYoungGenerationGC::EvacuateYoungGeneration() {
TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_FAST_PROMOTE);
base::MutexGuard guard(heap_->relocation_mutex());
// Young generation garbage collection is orthogonal from full GC marking. It
// is possible that objects that are currently being processed for marking are
// reclaimed in the young generation GC that interleaves concurrent marking.
// Pause concurrent markers to allow processing them using
// `UpdateMarkingWorklistAfterYoungGenGC()`.
ConcurrentMarking::PauseScope pause_js_marking(heap_->concurrent_marking());
CppHeap::PauseConcurrentMarkingScope pause_cpp_marking(
CppHeap::From(heap_->cpp_heap()));
if (!FLAG_concurrent_marking) {
DCHECK(heap_->fast_promotion_mode_);
DCHECK(heap_->CanPromoteYoungAndExpandOldGeneration(0));
}
NewSpace* new_space = heap_->new_space();
// Move pages from new->old generation.
PageRange range(new_space->first_allocatable_address(), new_space->top());
for (auto it = range.begin(); it != range.end();) {
Page* p = (*++it)->prev_page();
new_space->PromotePageToOldSpace(p);
if (heap_->incremental_marking()->IsMarking())
heap_->mark_compact_collector()->RecordLiveSlotsOnPage(p);
p->ClearFlag(Page::PAGE_NEW_OLD_PROMOTION);
}
// Reset new space.
new_space->EvacuatePrologue();
if (!new_space->EnsureCurrentCapacity()) {
V8::FatalProcessOutOfMemory(
heap_->isolate(), "NewSpace::EnsureCurrentCapacity", V8::kHeapOOM);
}
new_space->EvacuateEpilogue();
for (auto it = heap_->new_lo_space()->begin();
it != heap_->new_lo_space()->end();) {
LargePage* page = *it;
// Increment has to happen after we save the page, because it is going to
// be removed below.
it++;
heap_->lo_space()->PromoteNewLargeObject(page);
}
// Fix up special trackers.
heap_->external_string_table_.PromoteYoung();
// GlobalHandles are updated in PostGarbageCollectionProcessing
size_t promoted = heap_->new_space()->Size() + heap_->new_lo_space()->Size();
heap_->IncrementYoungSurvivorsCounter(promoted);
heap_->IncrementPromotedObjectsSize(promoted);
heap_->IncrementSemiSpaceCopiedObjectSize(0);
}
} // namespace internal
} // namespace v8
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_PROMOTE_YOUNG_GENERATION_H_
#define V8_HEAP_PROMOTE_YOUNG_GENERATION_H_
namespace v8 {
namespace internal {
class Heap;
/**
* `PromoteYoungGenerationGC` is a special GC mode used in fast promotion mode
* to quickly promote all objects in new space to old space, thus evacuating all
* of new space and leaving it empty.
*/
class PromoteYoungGenerationGC {
public:
explicit PromoteYoungGenerationGC(Heap* heap) : heap_(heap) {}
void EvacuateYoungGeneration();
private:
Heap* const heap_;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_PROMOTE_YOUNG_GENERATION_H_
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment