Commit cf52c101 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Optimize for memory usage near OOM.

This patch enables ShouldOptimizeForMemoryUsage if the old generation
size is within 1/8th of the max old generation size.

This patch also passes the reduce-memory flag to incremental marking
whenever ShouldOptimizeForMemoryUsage is enabled.

Bug: chromium:824214
Change-Id: I5cfc0566ca0e23dfa1b8c0439a4e67424ddc852d
Reviewed-on: https://chromium-review.googlesource.com/973524
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52333}
parent dbdede01
......@@ -1236,7 +1236,7 @@ void Heap::ReportExternalMemoryPressure() {
}
if (incremental_marking()->IsStopped()) {
if (incremental_marking()->CanBeActivated()) {
StartIncrementalMarking(i::Heap::kNoGCFlags,
StartIncrementalMarking(GCFlagsForIncrementalMarking(),
GarbageCollectionReason::kExternalMemoryPressure,
kGCCallbackFlagsForExternalMemory);
} else {
......@@ -1371,7 +1371,8 @@ bool Heap::CollectGarbage(AllocationSpace space,
if (IsYoungGenerationCollector(collector) &&
!ShouldAbortIncrementalMarking()) {
StartIncrementalMarkingIfAllocationLimitIsReached(
kNoGCFlags, kGCCallbackScheduleIdleGarbageCollection);
GCFlagsForIncrementalMarking(),
kGCCallbackScheduleIdleGarbageCollection);
}
return next_gc_likely_to_collect_more;
......@@ -4323,8 +4324,9 @@ bool Heap::HasHighFragmentation(size_t used, size_t committed) {
}
bool Heap::ShouldOptimizeForMemoryUsage() {
const size_t kOldGenerationSlack = max_old_generation_size_ / 8;
return FLAG_optimize_for_size || isolate()->IsIsolateInBackground() ||
HighMemoryPressure();
HighMemoryPressure() || !CanExpandOldGeneration(kOldGenerationSlack);
}
void Heap::ActivateMemoryReducerIfNeeded() {
......
......@@ -1237,6 +1237,11 @@ class Heap {
// Incremental marking API. ==================================================
// ===========================================================================
int GCFlagsForIncrementalMarking() {
return ShouldOptimizeForMemoryUsage() ? kReduceMemoryFootprintMask
: kNoGCFlags;
}
// Start incremental marking and ensure that idle time handler can perform
// incremental steps.
void StartIdleIncrementalMarking(
......
......@@ -49,7 +49,7 @@ void IncrementalMarkingJob::Task::RunInternal() {
if (incremental_marking->IsStopped()) {
if (heap->IncrementalMarkingLimitReached() !=
Heap::IncrementalMarkingLimit::kNoLimit) {
heap->StartIncrementalMarking(Heap::kNoGCFlags,
heap->StartIncrementalMarking(heap->GCFlagsForIncrementalMarking(),
GarbageCollectionReason::kIdleTask,
kGCCallbackScheduleIdleGarbageCollection);
}
......
......@@ -1857,7 +1857,8 @@ bool PagedSpace::RefillLinearAllocationAreaFromFreeList(size_t size_in_bytes) {
if (!is_local()) {
heap()->StartIncrementalMarkingIfAllocationLimitIsReached(
Heap::kNoGCFlags, kGCCallbackScheduleIdleGarbageCollection);
heap()->GCFlagsForIncrementalMarking(),
kGCCallbackScheduleIdleGarbageCollection);
}
size_t new_node_size = 0;
......@@ -3279,7 +3280,8 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size,
}
heap()->StartIncrementalMarkingIfAllocationLimitIsReached(
Heap::kNoGCFlags, kGCCallbackScheduleIdleGarbageCollection);
heap()->GCFlagsForIncrementalMarking(),
kGCCallbackScheduleIdleGarbageCollection);
heap()->CreateFillerObjectAt(object->address(), object_size,
ClearRecordedSlots::kNo);
if (heap()->incremental_marking()->black_allocation()) {
......
......@@ -6016,6 +6016,7 @@ struct OutOfMemoryState {
bool oom_triggered;
size_t old_generation_capacity_at_oom;
size_t memory_allocator_size_at_oom;
size_t new_space_capacity_at_oom;
};
size_t NearHeapLimitCallback(void* raw_state, size_t current_heap_limit,
......@@ -6025,6 +6026,7 @@ size_t NearHeapLimitCallback(void* raw_state, size_t current_heap_limit,
state->oom_triggered = true;
state->old_generation_capacity_at_oom = heap->OldGenerationCapacity();
state->memory_allocator_size_at_oom = heap->memory_allocator()->Size();
state->new_space_capacity_at_oom = heap->new_space()->Capacity();
return initial_heap_limit + 100 * MB;
}
......@@ -6061,13 +6063,13 @@ UNINITIALIZED_TEST(OutOfMemorySmallObjects) {
}
}
CHECK_LE(state.old_generation_capacity_at_oom,
kOldGenerationLimit + heap->new_space()->Capacity());
kOldGenerationLimit + state.new_space_capacity_at_oom);
CHECK_LE(kOldGenerationLimit, state.old_generation_capacity_at_oom +
heap->new_space()->Capacity());
state.new_space_capacity_at_oom);
CHECK_LE(
state.memory_allocator_size_at_oom,
MemoryAllocatorSizeFromHeapCapacity(state.old_generation_capacity_at_oom +
2 * heap->new_space()->Capacity()));
2 * state.new_space_capacity_at_oom));
reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
}
......@@ -6101,7 +6103,7 @@ UNINITIALIZED_TEST(OutOfMemoryLargeObjects) {
CHECK_LE(
state.memory_allocator_size_at_oom,
MemoryAllocatorSizeFromHeapCapacity(state.old_generation_capacity_at_oom +
2 * heap->new_space()->Capacity()));
2 * state.new_space_capacity_at_oom));
reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment