Commit e7aa6f91 authored by ulan's avatar ulan Committed by Commit bot

[heap] Exclude the owner of the linear allocation area from evacuation.

This ensures that incremental marking step does not change the top and limit
pointers of the old space, which is needed for allocation folding.

For more info see:
https://bugs.chromium.org/p/chromium/issues/detail?id=659165#c13

BUG=chromium:659165

Review-Url: https://codereview.chromium.org/2469273002
Cr-Commit-Position: refs/heads/master@{#40720}
parent 0eeee7ce
...@@ -290,9 +290,6 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) { ...@@ -290,9 +290,6 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
TraceFragmentation(heap()->map_space()); TraceFragmentation(heap()->map_space());
} }
heap()->old_space()->EvictEvacuationCandidatesFromLinearAllocationArea();
heap()->code_space()->EvictEvacuationCandidatesFromLinearAllocationArea();
compacting_ = evacuation_candidates_.length() > 0; compacting_ = evacuation_candidates_.length() > 0;
} }
...@@ -641,8 +638,12 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { ...@@ -641,8 +638,12 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
DCHECK(!sweeping_in_progress()); DCHECK(!sweeping_in_progress());
DCHECK(!FLAG_concurrent_sweeping || DCHECK(!FLAG_concurrent_sweeping ||
sweeper().IsSweepingCompleted(space->identity())); sweeper().IsSweepingCompleted(space->identity()));
Page* owner_of_linear_allocation_area =
space->top() == space->limit()
? nullptr
: Page::FromAllocationAreaAddress(space->top());
for (Page* p : *space) { for (Page* p : *space) {
if (p->NeverEvacuate()) continue; if (p->NeverEvacuate() || p == owner_of_linear_allocation_area) continue;
// Invariant: Evacuation candidates are just created when marking is // Invariant: Evacuation candidates are just created when marking is
// started. This means that sweeping has finished. Furthermore, at the end // started. This means that sweeping has finished. Furthermore, at the end
// of a GC all evacuation candidates are cleared and their slot buffers are // of a GC all evacuation candidates are cleared and their slot buffers are
......
...@@ -2800,20 +2800,6 @@ void PagedSpace::RepairFreeListsAfterDeserialization() { ...@@ -2800,20 +2800,6 @@ void PagedSpace::RepairFreeListsAfterDeserialization() {
} }
void PagedSpace::EvictEvacuationCandidatesFromLinearAllocationArea() {
if (allocation_info_.top() >= allocation_info_.limit()) return;
if (!Page::FromAllocationAreaAddress(allocation_info_.top())->CanAllocate()) {
// Create filler object to keep page iterable if it was iterable.
int remaining =
static_cast<int>(allocation_info_.limit() - allocation_info_.top());
heap()->CreateFillerObjectAt(allocation_info_.top(), remaining,
ClearRecordedSlots::kNo);
allocation_info_.Reset(nullptr, nullptr);
}
}
HeapObject* PagedSpace::SweepAndRetryAllocation(int size_in_bytes) { HeapObject* PagedSpace::SweepAndRetryAllocation(int size_in_bytes) {
MarkCompactCollector* collector = heap()->mark_compact_collector(); MarkCompactCollector* collector = heap()->mark_compact_collector();
if (collector->sweeping_in_progress()) { if (collector->sweeping_in_progress()) {
......
...@@ -2067,8 +2067,6 @@ class PagedSpace : public Space { ...@@ -2067,8 +2067,6 @@ class PagedSpace : public Space {
Page* FirstPage() { return anchor_.next_page(); } Page* FirstPage() { return anchor_.next_page(); }
Page* LastPage() { return anchor_.prev_page(); } Page* LastPage() { return anchor_.prev_page(); }
void EvictEvacuationCandidatesFromLinearAllocationArea();
bool CanExpand(size_t size); bool CanExpand(size_t size);
// Returns the number of total pages in this space. // Returns the number of total pages in this space.
......
...@@ -193,6 +193,21 @@ void GcAndSweep(Heap* heap, AllocationSpace space) { ...@@ -193,6 +193,21 @@ void GcAndSweep(Heap* heap, AllocationSpace space) {
} }
} }
void ForceEvacuationCandidate(Page* page) {
CHECK(FLAG_manual_evacuation_candidates_selection);
page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
PagedSpace* space = static_cast<PagedSpace*>(page->owner());
Address top = space->top();
Address limit = space->limit();
if (top < limit && Page::FromAllocationAreaAddress(top) == page) {
// Create filler object to keep page iterable if it was iterable.
int remaining = static_cast<int>(limit - top);
space->heap()->CreateFillerObjectAt(top, remaining,
ClearRecordedSlots::kNo);
space->SetTopAndLimit(nullptr, nullptr);
}
}
} // namespace heap } // namespace heap
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -50,6 +50,8 @@ void AbandonCurrentlyFreeMemory(PagedSpace* space); ...@@ -50,6 +50,8 @@ void AbandonCurrentlyFreeMemory(PagedSpace* space);
void GcAndSweep(Heap* heap, AllocationSpace space); void GcAndSweep(Heap* heap, AllocationSpace space);
void ForceEvacuationCandidate(Page* page);
} // namespace heap } // namespace heap
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -127,7 +127,7 @@ TEST(ArrayBuffer_Compaction) { ...@@ -127,7 +127,7 @@ TEST(ArrayBuffer_Compaction) {
heap::GcAndSweep(heap, NEW_SPACE); heap::GcAndSweep(heap, NEW_SPACE);
Page* page_before_gc = Page::FromAddress(buf1->address()); Page* page_before_gc = Page::FromAddress(buf1->address());
page_before_gc->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); heap::ForceEvacuationCandidate(page_before_gc);
CHECK(IsTracked(*buf1)); CHECK(IsTracked(*buf1));
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
......
...@@ -791,7 +791,7 @@ TEST(BytecodeArray) { ...@@ -791,7 +791,7 @@ TEST(BytecodeArray) {
// Perform a full garbage collection and force the constant pool to be on an // Perform a full garbage collection and force the constant pool to be on an
// evacuation candidate. // evacuation candidate.
Page* evac_page = Page::FromAddress(constant_pool->address()); Page* evac_page = Page::FromAddress(constant_pool->address());
evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); heap::ForceEvacuationCandidate(evac_page);
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
// BytecodeArray should survive. // BytecodeArray should survive.
...@@ -4358,7 +4358,7 @@ TEST(Regress514122) { ...@@ -4358,7 +4358,7 @@ TEST(Regress514122) {
// Heap is ready, force {lit_page} to become an evacuation candidate and // Heap is ready, force {lit_page} to become an evacuation candidate and
// simulate incremental marking to enqueue optimized code map. // simulate incremental marking to enqueue optimized code map.
FLAG_manual_evacuation_candidates_selection = true; FLAG_manual_evacuation_candidates_selection = true;
evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); heap::ForceEvacuationCandidate(evac_page);
heap::SimulateIncrementalMarking(heap); heap::SimulateIncrementalMarking(heap);
// No matter whether reachable or not, {boomer} is doomed. // No matter whether reachable or not, {boomer} is doomed.
...@@ -4557,7 +4557,7 @@ TEST(LargeObjectSlotRecording) { ...@@ -4557,7 +4557,7 @@ TEST(LargeObjectSlotRecording) {
heap::SimulateFullSpace(heap->old_space()); heap::SimulateFullSpace(heap->old_space());
Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED); Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED);
Page* evac_page = Page::FromAddress(lit->address()); Page* evac_page = Page::FromAddress(lit->address());
evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); heap::ForceEvacuationCandidate(evac_page);
FixedArray* old_location = *lit; FixedArray* old_location = *lit;
// Allocate a large object. // Allocate a large object.
...@@ -5563,8 +5563,7 @@ HEAP_TEST(Regress538257) { ...@@ -5563,8 +5563,7 @@ HEAP_TEST(Regress538257) {
heap->CanExpandOldGeneration(old_space->AreaSize()); heap->CanExpandOldGeneration(old_space->AreaSize());
i++) { i++) {
objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED); objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED);
Page::FromAddress(objects[i]->address()) heap::ForceEvacuationCandidate(Page::FromAddress(objects[i]->address()));
->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
} }
heap::SimulateFullSpace(old_space); heap::SimulateFullSpace(old_space);
heap->CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask, heap->CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask,
...@@ -6526,7 +6525,7 @@ HEAP_TEST(Regress589413) { ...@@ -6526,7 +6525,7 @@ HEAP_TEST(Regress589413) {
AlwaysAllocateScope always_allocate(isolate); AlwaysAllocateScope always_allocate(isolate);
Handle<HeapObject> ec_obj = factory->NewFixedArray(5000, TENURED); Handle<HeapObject> ec_obj = factory->NewFixedArray(5000, TENURED);
Page* ec_page = Page::FromAddress(ec_obj->address()); Page* ec_page = Page::FromAddress(ec_obj->address());
ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); heap::ForceEvacuationCandidate(ec_page);
// Make all arrays point to evacuation candidate so that // Make all arrays point to evacuation candidate so that
// slots are recorded for them. // slots are recorded for them.
for (size_t j = 0; j < arrays.size(); j++) { for (size_t j = 0; j < arrays.size(); j++) {
...@@ -6733,8 +6732,7 @@ TEST(Regress631969) { ...@@ -6733,8 +6732,7 @@ TEST(Regress631969) {
heap::SimulateFullSpace(heap->old_space()); heap::SimulateFullSpace(heap->old_space());
Handle<String> s1 = factory->NewStringFromStaticChars("123456789", TENURED); Handle<String> s1 = factory->NewStringFromStaticChars("123456789", TENURED);
Handle<String> s2 = factory->NewStringFromStaticChars("01234", TENURED); Handle<String> s2 = factory->NewStringFromStaticChars("01234", TENURED);
Page::FromAddress(s1->address()) heap::ForceEvacuationCandidate(Page::FromAddress(s1->address()));
->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
heap::SimulateIncrementalMarking(heap, false); heap::SimulateIncrementalMarking(heap, false);
......
...@@ -1141,7 +1141,7 @@ TEST(DoScavengeWithIncrementalWriteBarrier) { ...@@ -1141,7 +1141,7 @@ TEST(DoScavengeWithIncrementalWriteBarrier) {
// simulate incremental marking. // simulate incremental marking.
FLAG_stress_compaction = true; FLAG_stress_compaction = true;
FLAG_manual_evacuation_candidates_selection = true; FLAG_manual_evacuation_candidates_selection = true;
ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); heap::ForceEvacuationCandidate(ec_page);
heap::SimulateIncrementalMarking(heap); heap::SimulateIncrementalMarking(heap);
// Disable stress compaction mode in order to let GC do scavenge. // Disable stress compaction mode in order to let GC do scavenge.
FLAG_stress_compaction = false; FLAG_stress_compaction = false;
...@@ -1459,7 +1459,7 @@ static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map, ...@@ -1459,7 +1459,7 @@ static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
// Heap is ready, force |ec_page| to become an evacuation candidate and // Heap is ready, force |ec_page| to become an evacuation candidate and
// simulate incremental marking. // simulate incremental marking.
ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); heap::ForceEvacuationCandidate(ec_page);
heap::SimulateIncrementalMarking(heap); heap::SimulateIncrementalMarking(heap);
// Check that everything is ready for triggering incremental write barrier // Check that everything is ready for triggering incremental write barrier
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment