Commit 496d0596 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Detect ineffective GCs near the heap limit.

Currently V8 can enter CPU thrashing GC loop near the heap limit. In
such cases it is better to trigger an out-of-memory failure earlier to
avoid wasting CPU time and to avoid unresponsiveness.

This patch adds a mechanism for tracking consecutive ineffective GCs.
A GC is considered ineffective if the heap size after the GC is still
close to the heap limit and if the average mutator utilization dropped
below a fixed threshold.

V8 execution is aborted after four consecutive ineffective GCs.

Bug: chromium:824214
Change-Id: I647032707d49e5383e1317c5e7616dd57077ea32
Reviewed-on: https://chromium-review.googlesource.com/978178
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52244}
parent 1ef6c437
......@@ -698,6 +698,8 @@ DEFINE_BOOL(concurrent_sweeping, true, "use concurrent sweeping")
DEFINE_BOOL(parallel_compaction, true, "use parallel compaction")
DEFINE_BOOL(parallel_pointer_update, true,
"use parallel pointer update during compaction")
DEFINE_BOOL(detect_ineffective_gcs_near_heap_limit, true,
"trigger out-of-memory failure to avoid GC storm near heap limit")
DEFINE_BOOL(trace_incremental_marking, false,
"trace progress of the incremental marking")
DEFINE_BOOL(trace_stress_marking, false, "trace stress marking progress")
......
......@@ -181,6 +181,7 @@ Heap::Heap()
max_marking_limit_reached_(0.0),
ms_count_(0),
gc_count_(0),
consecutive_ineffective_mark_compacts_(0),
mmap_region_base_(0),
remembered_unmapped_pages_index_(0),
old_generation_allocation_limit_(initial_old_generation_size_),
......@@ -1725,6 +1726,8 @@ bool Heap::PerformGarbageCollection(
external_memory_at_last_mark_compact_ = external_memory_;
external_memory_limit_ = external_memory_ + kExternalAllocationSoftLimit;
SetOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed);
CheckIneffectiveMarkCompact(
old_gen_size, tracer()->AverageMarkCompactMutatorUtilization());
} else if (HasLowYoungGenerationAllocationRate() &&
old_generation_size_configured_) {
DampenOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed);
......@@ -4270,6 +4273,34 @@ bool Heap::HasLowAllocationRate() {
HasLowOldGenerationAllocationRate();
}
bool Heap::IsIneffectiveMarkCompact(size_t old_generation_size,
double mutator_utilization) {
const double kHighHeapPercentage = 0.8;
const double kLowMutatorUtilization = 0.4;
return old_generation_size >=
kHighHeapPercentage * max_old_generation_size_ &&
mutator_utilization < kLowMutatorUtilization;
}
void Heap::CheckIneffectiveMarkCompact(size_t old_generation_size,
double mutator_utilization) {
const int kMaxConsecutiveIneffectiveMarkCompacts = 4;
if (!FLAG_detect_ineffective_gcs_near_heap_limit) return;
if (!IsIneffectiveMarkCompact(old_generation_size, mutator_utilization)) {
consecutive_ineffective_mark_compacts_ = 0;
return;
}
++consecutive_ineffective_mark_compacts_;
if (consecutive_ineffective_mark_compacts_ ==
kMaxConsecutiveIneffectiveMarkCompacts) {
if (InvokeNearHeapLimitCallback()) {
// The callback increased the heap limit.
consecutive_ineffective_mark_compacts_ = 0;
return;
}
FatalProcessOutOfMemory("Ineffective mark-compacts near heap limit");
}
}
bool Heap::HasHighFragmentation() {
size_t used = PromotedSpaceSizeOfObjects();
......
......@@ -2018,6 +2018,11 @@ class Heap {
bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
bool IsIneffectiveMarkCompact(size_t old_generation_size,
double mutator_utilization);
void CheckIneffectiveMarkCompact(size_t old_generation_size,
double mutator_utilization);
// ===========================================================================
// Growing strategy. =========================================================
// ===========================================================================
......@@ -2481,6 +2486,10 @@ class Heap {
// How many gc happened.
unsigned int gc_count_;
// The number of Mark-Compact garbage collections that are considered as
// ineffective. See IsIneffectiveMarkCompact() predicate.
int consecutive_ineffective_mark_compacts_;
static const uintptr_t kMmapRegionMask = 0xFFFFFFFFu;
uintptr_t mmap_region_base_;
......
......@@ -5934,6 +5934,41 @@ UNINITIALIZED_TEST(OutOfMemory) {
}
}
UNINITIALIZED_TEST(OutOfMemoryIneffectiveGC) {
if (!FLAG_detect_ineffective_gcs_near_heap_limit) return;
if (FLAG_stress_incremental_marking) return;
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) return;
#endif
FLAG_max_old_space_size = kHeapLimit / MB;
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
v8::Isolate* isolate = v8::Isolate::New(create_params);
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
oom_isolate = i_isolate;
isolate->SetOOMErrorHandler(OOMCallback);
Factory* factory = i_isolate->factory();
Heap* heap = i_isolate->heap();
heap->CollectAllGarbage(Heap::kNoGCFlags, GarbageCollectionReason::kTesting);
{
HandleScope scope(i_isolate);
while (heap->PromotedSpaceSizeOfObjects() <
heap->MaxOldGenerationSize() * 0.85) {
factory->NewFixedArray(100, TENURED);
}
{
int initial_ms_count = heap->ms_count();
while (heap->ms_count() < initial_ms_count + 10) {
HandleScope inner_scope(i_isolate);
factory->NewFixedArray(100, TENURED);
}
CHECK_GE(heap->tracer()->AverageMarkCompactMutatorUtilization(), 0.4);
}
}
isolate->Dispose();
}
HEAP_TEST(Regress779503) {
// The following regression test ensures that the Scavenger does not allocate
// over invalid slots. More specific, the Scavenger should not sweep a page
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment