Commit 377df357 authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

[heap] Move retaining of maps to the atomic pause

The phase is generally sub-ms. What's left as a follow up is to remove
the finalization step that schedules a finalization step (including
embedder callbacks) through a stack guard.

Bug: v8:12775
Change-Id: I35f36e5ba07f9acb4e92acf2a414559ccd6ad9bd
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3663081
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80716}
parent 13e5c7bd
......@@ -6330,10 +6330,8 @@ void Heap::AddRetainedMap(Handle<NativeContext> context, Handle<Map> map) {
if (array->IsFull()) {
CompactRetainedMaps(*array);
}
array =
WeakArrayList::AddToEnd(isolate(), array, MaybeObjectHandle::Weak(map));
array = WeakArrayList::AddToEnd(
isolate(), array,
isolate(), array, MaybeObjectHandle::Weak(map),
MaybeObjectHandle(Smi::FromInt(FLAG_retain_maps_for_n_gc), isolate()));
if (*array != context->retained_maps()) {
context->set_retained_maps(*array);
......
......@@ -363,87 +363,17 @@ void IncrementalMarking::EnsureBlackAllocated(Address allocated, size_t size) {
}
}
bool IncrementalMarking::ShouldRetainMap(Map map, int age) {
if (age == 0) {
// The map has aged. Do not retain this map.
return false;
}
Object constructor = map.GetConstructor();
if (!constructor.IsHeapObject() ||
marking_state()->IsWhite(HeapObject::cast(constructor))) {
// The constructor is dead, no new objects with this map can
// be created. Do not retain this map.
return false;
}
return true;
}
void IncrementalMarking::RetainMaps() {
// Do not retain dead maps if flag disables it or there is
// - memory pressure (reduce_memory_footprint_),
// - GC is requested by tests or dev-tools (abort_incremental_marking_).
bool map_retaining_is_disabled = heap()->ShouldReduceMemory() ||
FLAG_retain_maps_for_n_gc == 0;
std::vector<WeakArrayList> retained_maps_list = heap()->FindAllRetainedMaps();
for (WeakArrayList retained_maps : retained_maps_list) {
int length = retained_maps.length();
for (int i = 0; i < length; i += 2) {
MaybeObject value = retained_maps.Get(i);
HeapObject map_heap_object;
if (!value->GetHeapObjectIfWeak(&map_heap_object)) {
continue;
}
int age = retained_maps.Get(i + 1).ToSmi().value();
int new_age;
Map map = Map::cast(map_heap_object);
if (!map_retaining_is_disabled && marking_state()->IsWhite(map)) {
if (ShouldRetainMap(map, age)) {
WhiteToGreyAndPush(map);
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainingRoot(Root::kRetainMaps, map);
}
}
Object prototype = map.prototype();
if (age > 0 && prototype.IsHeapObject() &&
marking_state()->IsWhite(HeapObject::cast(prototype))) {
// The prototype is not marked, age the map.
new_age = age - 1;
} else {
// The prototype and the constructor are marked, this map keeps only
// transition tree alive, not JSObjects. Do not age the map.
new_age = age;
}
} else {
new_age = FLAG_retain_maps_for_n_gc;
}
// Compact the array and update the age.
if (new_age != age) {
retained_maps.Set(i + 1, MaybeObject::FromSmi(Smi::FromInt(new_age)));
}
}
}
}
void IncrementalMarking::FinalizeIncrementally() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE_BODY);
DCHECK(!finalize_marking_completed_);
DCHECK(IsMarking());
double start = heap_->MonotonicallyIncreasingTimeInMs();
// Map retaining is needed for performance, not correctness,
// so we can do it only once at the beginning of the finalization.
RetainMaps();
// TODO(v8:12775): Remove the finalization step.
finalize_marking_completed_ = true;
if (FLAG_trace_incremental_marking) {
double end = heap_->MonotonicallyIncreasingTimeInMs();
double delta = end - start;
heap()->isolate()->PrintWithTimestamp(
"[IncrementalMarking] Finalize incrementally spent %.1f ms.\n", delta);
"[IncrementalMarking] Finalize incrementally.\n");
}
}
......
......@@ -235,11 +235,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
void PauseBlackAllocation();
void FinishBlackAllocation();
bool ShouldRetainMap(Map map, int age);
// Retain dying maps for <FLAG_retain_maps_for_n_gc> garbage collections to
// increase chances of reusing of map transition tree in future.
void RetainMaps();
void PublishWriteBarrierWorklists();
// Updates scheduled_bytes_to_mark_ to ensure marking progress based on
......
......@@ -2445,6 +2445,74 @@ void MarkCompactCollector::RecordObjectStats() {
heap()->dead_object_stats_->ClearObjectStats();
}
namespace {
bool ShouldRetainMap(MarkCompactCollector::MarkingState* marking_state, Map map,
int age) {
if (age == 0) {
// The map has aged. Do not retain this map.
return false;
}
Object constructor = map.GetConstructor();
if (!constructor.IsHeapObject() ||
marking_state->IsWhite(HeapObject::cast(constructor))) {
// The constructor is dead, no new objects with this map can
// be created. Do not retain this map.
return false;
}
return true;
}
} // namespace
void MarkCompactCollector::RetainMaps() {
// Retaining maps increases the chances of reusing map transitions at some
// memory cost, hence disable it when trying to reduce memory footprint more
// aggressively.
const bool should_retain_maps =
!heap()->ShouldReduceMemory() && FLAG_retain_maps_for_n_gc != 0;
for (WeakArrayList retained_maps : heap()->FindAllRetainedMaps()) {
DCHECK_EQ(0, retained_maps.length() % 2);
for (int i = 0; i < retained_maps.length(); i += 2) {
MaybeObject value = retained_maps.Get(i);
HeapObject map_heap_object;
if (!value->GetHeapObjectIfWeak(&map_heap_object)) {
continue;
}
int age = retained_maps.Get(i + 1).ToSmi().value();
int new_age;
Map map = Map::cast(map_heap_object);
if (should_retain_maps && marking_state()->IsWhite(map)) {
if (ShouldRetainMap(marking_state(), map, age)) {
if (marking_state()->WhiteToGrey(map)) {
local_marking_worklists()->Push(map);
}
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainingRoot(Root::kRetainMaps, map);
}
}
Object prototype = map.prototype();
if (age > 0 && prototype.IsHeapObject() &&
marking_state()->IsWhite(HeapObject::cast(prototype))) {
// The prototype is not marked, age the map.
new_age = age - 1;
} else {
// The prototype and the constructor are marked, this map keeps only
// transition tree alive, not JSObjects. Do not age the map.
new_age = age;
}
} else {
new_age = FLAG_retain_maps_for_n_gc;
}
// Compact the array and update the age.
if (new_age != age) {
retained_maps.Set(i + 1, MaybeObject::FromSmi(Smi::FromInt(new_age)));
}
}
}
}
void MarkCompactCollector::MarkLiveObjects() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK);
// The recursive GC marker detects when it is nearing stack overflow,
......@@ -2481,6 +2549,11 @@ void MarkCompactCollector::MarkLiveObjects() {
MarkObjectsFromClientHeaps();
}
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_RETAIN_MAPS);
RetainMaps();
}
if (FLAG_parallel_marking) {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_FULL_CLOSURE_PARALLEL);
parallel_marking_ = true;
......
......@@ -652,6 +652,10 @@ class MarkCompactCollector final {
// heap object.
static bool IsUnmarkedHeapObject(Heap* heap, FullObjectSlot p);
// Retain dying maps for `FLAG_retain_maps_for_n_gc` garbage collections to
// increase chances of reusing of map transition tree in future.
void RetainMaps();
// Clear non-live references in weak cells, transition and descriptor arrays,
// and deoptimize dependent code of non-live maps.
void ClearNonLiveReferences();
......
......@@ -571,6 +571,7 @@
F(MC_MARK_FINISH_INCREMENTAL) \
F(MC_MARK_FULL_CLOSURE_PARALLEL) \
F(MC_MARK_FULL_CLOSURE_PARALLEL_JOIN) \
F(MC_MARK_RETAIN_MAPS) \
F(MC_MARK_ROOTS) \
F(MC_MARK_FULL_CLOSURE) \
F(MC_MARK_WEAK_CLOSURE_EPHEMERON_MARKING) \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment