Commit fdc0aa0c authored by ulan's avatar ulan Committed by Commit bot

[heap] Ensure finalization of incremental marking even if all allocations

come from the runtime.

This patch fixes an issue of heap growing to max capacity when incremental
marking is finished but cannot finalize due to GC stack guard not triggering.

It can happen if all allocations come from the runtime, for example,
from JSON parser or compiler.

Now before expanding the heap we check if we are above the allocation limit
and the incremental marking needs to be finalized. If so we do not expand
the heap and force GC, which will finalize the incremental marking.
The check is performed for paged spaces and large-object space.

BUG=chromium:670675

Review-Url: https://codereview.chromium.org/2552613004
Cr-Commit-Position: refs/heads/master@{#41524}
parent ca74343a
......@@ -5300,12 +5300,16 @@ void Heap::DampenOldGenerationAllocationLimit(size_t old_gen_size,
// major GC. It happens when the old generation allocation limit is reached and
// - either we need to optimize for memory usage,
// - or the incremental marking is not in progress and we cannot start it.
bool Heap::ShouldExpandOldGenerationOnAllocationFailure() {
bool Heap::ShouldExpandOldGenerationOnSlowAllocation() {
if (always_allocate() || OldGenerationSpaceAvailable() > 0) return true;
// We reached the old generation allocation limit.
if (ShouldOptimizeForMemoryUsage()) return false;
if (incremental_marking()->NeedsFinalization()) {
return false;
}
if (incremental_marking()->IsStopped() &&
IncrementalMarkingLimitReached() == IncrementalMarkingLimit::kNoLimit) {
// We cannot start incremental marking.
......
......@@ -1860,7 +1860,7 @@ class Heap {
return OldGenerationCapacity() + slack >= MaxOldGenerationSize();
}
bool ShouldExpandOldGenerationOnAllocationFailure();
bool ShouldExpandOldGenerationOnSlowAllocation();
enum class IncrementalMarkingLimit { kNoLimit, kSoftLimit, kHardLimit };
IncrementalMarkingLimit IncrementalMarkingLimitReached();
......
......@@ -66,6 +66,11 @@ class IncrementalMarking {
return request_type_ == FINALIZATION && !finalize_marking_completed_;
}
inline bool NeedsFinalization() {
return IsMarking() &&
(request_type_ == FINALIZATION || request_type_ == COMPLETE_MARKING);
}
GCRequestType request_type() const { return request_type_; }
void reset_request_type() { request_type_ = NONE; }
......
......@@ -2839,7 +2839,7 @@ HeapObject* PagedSpace::SlowAllocateRaw(int size_in_bytes) {
}
}
if (heap()->ShouldExpandOldGenerationOnAllocationFailure() && Expand()) {
if (heap()->ShouldExpandOldGenerationOnSlowAllocation() && Expand()) {
DCHECK((CountTotalPages() > 1) ||
(static_cast<size_t>(size_in_bytes) <= free_list_.Available()));
return free_list_.Allocate(static_cast<size_t>(size_in_bytes));
......@@ -2955,7 +2955,8 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size,
Executability executable) {
// Check if we want to force a GC before growing the old space further.
// If so, fail the allocation.
if (!heap()->CanExpandOldGeneration(object_size)) {
if (!heap()->CanExpandOldGeneration(object_size) ||
!heap()->ShouldExpandOldGenerationOnSlowAllocation()) {
return AllocationResult::Retry(identity());
}
......
......@@ -33,6 +33,7 @@
V(Regress538257) \
V(Regress589413) \
V(Regress658718) \
V(Regress670675) \
V(WriteBarriersInCopyJSObject)
#define HEAP_TEST(Name) \
......
......@@ -7048,5 +7048,37 @@ TEST(RememberedSetRemoveRange) {
});
}
HEAP_TEST(Regress670675) {
if (!FLAG_incremental_marking) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Heap* heap = CcTest::heap();
Isolate* isolate = heap->isolate();
i::MarkCompactCollector* collector = heap->mark_compact_collector();
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
}
i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
if (marking->IsStopped()) {
marking->Start(i::GarbageCollectionReason::kTesting);
}
size_t array_length = Page::kPageSize / kPointerSize + 100;
size_t n = heap->OldGenerationSpaceAvailable() / array_length;
for (size_t i = 0; i < n + 10; i++) {
{
HandleScope inner_scope(isolate);
isolate->factory()->NewFixedArray(static_cast<int>(array_length));
}
if (marking->IsStopped()) break;
double deadline = heap->MonotonicallyIncreasingTimeInMs() + 1;
marking->AdvanceIncrementalMarking(
deadline, IncrementalMarking::GC_VIA_STACK_GUARD,
IncrementalMarking::FORCE_COMPLETION, StepOrigin::kV8);
}
DCHECK(marking->IsStopped());
}
} // namespace internal
} // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment