Commit f5064ef9 authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

Reland "[heap] Use PagedNewSpace when MinorMC is enabled"

This is a reland of commit 924be695

Original change's description:
> [heap] Use PagedNewSpace when MinorMC is enabled
>
> This CL also introduces/updates DCHECKs that some methods are never
> reached with MinorMC (they may still be reached by full GC when MinorMC
> is disabled).
>
> Bug: v8:12612
> Change-Id: I8afb8c964bc5c44225a92d0f8d9ac5a4c0ecef75
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3823130
> Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
> Commit-Queue: Omer Katz <omerkatz@chromium.org>
> Cr-Commit-Position: refs/heads/main@{#82439}

Bug: v8:12612
Change-Id: I64aa83d48fb48970ee45263356aaf1541e3d6bdc
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3827040
Commit-Queue: Adam Klein <adamk@chromium.org>
Bot-Commit: Rubber Stamper <rubber-stamper@appspot.gserviceaccount.com>
Cr-Commit-Position: refs/heads/main@{#82448}
parent 5bc20acb
...@@ -63,6 +63,7 @@ T ForwardingAddress(T heap_obj) { ...@@ -63,6 +63,7 @@ T ForwardingAddress(T heap_obj) {
if (map_word.IsForwardingAddress()) { if (map_word.IsForwardingAddress()) {
return T::cast(map_word.ToForwardingAddress()); return T::cast(map_word.ToForwardingAddress());
} else if (Heap::InFromPage(heap_obj)) { } else if (Heap::InFromPage(heap_obj)) {
DCHECK(!FLAG_minor_mc);
return T(); return T();
} else { } else {
return heap_obj; return heap_obj;
...@@ -410,8 +411,9 @@ void Heap::UpdateAllocationSite(Map map, HeapObject object, ...@@ -410,8 +411,9 @@ void Heap::UpdateAllocationSite(Map map, HeapObject object,
DCHECK_NE(pretenuring_feedback, &global_pretenuring_feedback_); DCHECK_NE(pretenuring_feedback, &global_pretenuring_feedback_);
#ifdef DEBUG #ifdef DEBUG
BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(object); BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(object);
DCHECK_IMPLIES(chunk->IsToPage(), DCHECK_IMPLIES(
chunk->IsFlagSet(MemoryChunk::PAGE_NEW_NEW_PROMOTION)); chunk->IsToPage(),
FLAG_minor_mc || chunk->IsFlagSet(MemoryChunk::PAGE_NEW_NEW_PROMOTION));
DCHECK_IMPLIES(!chunk->InYoungGeneration(), DCHECK_IMPLIES(!chunk->InYoungGeneration(),
chunk->IsFlagSet(MemoryChunk::PAGE_NEW_OLD_PROMOTION)); chunk->IsFlagSet(MemoryChunk::PAGE_NEW_OLD_PROMOTION));
#endif #endif
......
...@@ -930,6 +930,9 @@ void Heap::PrintRetainingPath(HeapObject target, RetainingPathOption option) { ...@@ -930,6 +930,9 @@ void Heap::PrintRetainingPath(HeapObject target, RetainingPathOption option) {
void UpdateRetainersMapAfterScavenge( void UpdateRetainersMapAfterScavenge(
std::unordered_map<HeapObject, HeapObject, Object::Hasher>* map) { std::unordered_map<HeapObject, HeapObject, Object::Hasher>* map) {
// This is only used for Scavenger.
DCHECK(!FLAG_minor_mc);
std::unordered_map<HeapObject, HeapObject, Object::Hasher> updated_map; std::unordered_map<HeapObject, HeapObject, Object::Hasher> updated_map;
for (auto pair : *map) { for (auto pair : *map) {
...@@ -957,7 +960,7 @@ void UpdateRetainersMapAfterScavenge( ...@@ -957,7 +960,7 @@ void UpdateRetainersMapAfterScavenge(
void Heap::UpdateRetainersAfterScavenge() { void Heap::UpdateRetainersAfterScavenge() {
if (!incremental_marking()->IsMarking()) return; if (!incremental_marking()->IsMarking()) return;
// This isn't supported for Minor MC. // This is only used for Scavenger.
DCHECK(!FLAG_minor_mc); DCHECK(!FLAG_minor_mc);
UpdateRetainersMapAfterScavenge(&retainer_); UpdateRetainersMapAfterScavenge(&retainer_);
...@@ -2598,11 +2601,6 @@ void Heap::MinorMarkCompact() { ...@@ -2598,11 +2601,6 @@ void Heap::MinorMarkCompact() {
CHECK_EQ(NOT_IN_GC, gc_state()); CHECK_EQ(NOT_IN_GC, gc_state());
DCHECK(new_space()); DCHECK(new_space());
if (FLAG_trace_incremental_marking && !incremental_marking()->IsStopped()) {
isolate()->PrintWithTimestamp(
"[IncrementalMarking] MinorMarkCompact during marking.\n");
}
PauseAllocationObserversScope pause_observers(this); PauseAllocationObserversScope pause_observers(this);
SetGCState(MINOR_MARK_COMPACT); SetGCState(MINOR_MARK_COMPACT);
...@@ -2775,6 +2773,9 @@ void Heap::UpdateExternalString(String string, size_t old_payload, ...@@ -2775,6 +2773,9 @@ void Heap::UpdateExternalString(String string, size_t old_payload,
String Heap::UpdateYoungReferenceInExternalStringTableEntry(Heap* heap, String Heap::UpdateYoungReferenceInExternalStringTableEntry(Heap* heap,
FullObjectSlot p) { FullObjectSlot p) {
// This is only used for Scavenger.
DCHECK(!FLAG_minor_mc);
PtrComprCageBase cage_base(heap->isolate()); PtrComprCageBase cage_base(heap->isolate());
HeapObject obj = HeapObject::cast(*p); HeapObject obj = HeapObject::cast(*p);
MapWord first_word = obj.map_word(cage_base, kRelaxedLoad); MapWord first_word = obj.map_word(cage_base, kRelaxedLoad);
...@@ -4781,6 +4782,11 @@ void Heap::VerifyCommittedPhysicalMemory() { ...@@ -4781,6 +4782,11 @@ void Heap::VerifyCommittedPhysicalMemory() {
space = spaces.Next()) { space = spaces.Next()) {
space->VerifyCommittedPhysicalMemory(); space->VerifyCommittedPhysicalMemory();
} }
if (FLAG_minor_mc && new_space()) {
PagedNewSpace::From(new_space())
->paged_space()
->VerifyCommittedPhysicalMemory();
}
} }
#endif // DEBUG #endif // DEBUG
...@@ -5757,9 +5763,15 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info, ...@@ -5757,9 +5763,15 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info,
DCHECK_NOT_NULL(read_only_space_); DCHECK_NOT_NULL(read_only_space_);
const bool has_young_gen = !FLAG_single_generation && !IsShared(); const bool has_young_gen = !FLAG_single_generation && !IsShared();
if (has_young_gen) { if (has_young_gen) {
space_[NEW_SPACE] = new_space_ = if (FLAG_minor_mc) {
new SemiSpaceNewSpace(this, initial_semispace_size_, space_[NEW_SPACE] = new_space_ =
max_semi_space_size_, new_allocation_info); new PagedNewSpace(this, initial_semispace_size_, max_semi_space_size_,
new_allocation_info);
} else {
space_[NEW_SPACE] = new_space_ =
new SemiSpaceNewSpace(this, initial_semispace_size_,
max_semi_space_size_, new_allocation_info);
}
space_[NEW_LO_SPACE] = new_lo_space_ = space_[NEW_LO_SPACE] = new_lo_space_ =
new NewLargeObjectSpace(this, NewSpaceCapacity()); new NewLargeObjectSpace(this, NewSpaceCapacity());
} }
...@@ -5918,6 +5930,7 @@ void Heap::NotifyOldGenerationExpansion(AllocationSpace space, ...@@ -5918,6 +5930,7 @@ void Heap::NotifyOldGenerationExpansion(AllocationSpace space,
MemoryChunk* chunk) { MemoryChunk* chunk) {
// Pages created during bootstrapping may contain immortal immovable objects. // Pages created during bootstrapping may contain immortal immovable objects.
if (!deserialization_complete()) { if (!deserialization_complete()) {
DCHECK_NE(NEW_SPACE, chunk->owner()->identity());
chunk->MarkNeverEvacuate(); chunk->MarkNeverEvacuate();
} }
if (space == CODE_SPACE || space == CODE_LO_SPACE) { if (space == CODE_SPACE || space == CODE_LO_SPACE) {
......
...@@ -378,6 +378,7 @@ void IncrementalMarking::UpdateMarkingWorklistAfterYoungGenGC() { ...@@ -378,6 +378,7 @@ void IncrementalMarking::UpdateMarkingWorklistAfterYoungGenGC() {
DCHECK(obj.IsHeapObject()); DCHECK(obj.IsHeapObject());
// Only pointers to from space have to be updated. // Only pointers to from space have to be updated.
if (Heap::InFromPage(obj)) { if (Heap::InFromPage(obj)) {
DCHECK(!FLAG_minor_mc);
MapWord map_word = obj.map_word(cage_base, kRelaxedLoad); MapWord map_word = obj.map_word(cage_base, kRelaxedLoad);
if (!map_word.IsForwardingAddress()) { if (!map_word.IsForwardingAddress()) {
// There may be objects on the marking deque that do not exist // There may be objects on the marking deque that do not exist
...@@ -401,6 +402,11 @@ void IncrementalMarking::UpdateMarkingWorklistAfterYoungGenGC() { ...@@ -401,6 +402,11 @@ void IncrementalMarking::UpdateMarkingWorklistAfterYoungGenGC() {
// new space. // new space.
DCHECK(Heap::IsLargeObject(obj) || Page::FromHeapObject(obj)->IsFlagSet( DCHECK(Heap::IsLargeObject(obj) || Page::FromHeapObject(obj)->IsFlagSet(
Page::PAGE_NEW_NEW_PROMOTION)); Page::PAGE_NEW_NEW_PROMOTION));
DCHECK_IMPLIES(FLAG_minor_mc, !Page::FromHeapObject(obj)->IsFlagSet(
Page::PAGE_NEW_NEW_PROMOTION));
DCHECK_IMPLIES(
FLAG_minor_mc,
!obj.map_word(cage_base, kRelaxedLoad).IsForwardingAddress());
if (minor_marking_state->IsWhite(obj)) { if (minor_marking_state->IsWhite(obj)) {
return false; return false;
} }
......
...@@ -137,7 +137,7 @@ class MarkingVerifier : public ObjectVisitorWithCageBases, public RootVisitor { ...@@ -137,7 +137,7 @@ class MarkingVerifier : public ObjectVisitorWithCageBases, public RootVisitor {
void VerifyRoots(); void VerifyRoots();
void VerifyMarkingOnPage(const Page* page, Address start, Address end); void VerifyMarkingOnPage(const Page* page, Address start, Address end);
void VerifyMarking(NewSpace* new_space); void VerifyMarking(NewSpace* new_space);
void VerifyMarking(PagedSpace* paged_space); void VerifyMarking(PagedSpaceBase* paged_space);
void VerifyMarking(LargeObjectSpace* lo_space); void VerifyMarking(LargeObjectSpace* lo_space);
Heap* heap_; Heap* heap_;
...@@ -177,6 +177,10 @@ void MarkingVerifier::VerifyMarkingOnPage(const Page* page, Address start, ...@@ -177,6 +177,10 @@ void MarkingVerifier::VerifyMarkingOnPage(const Page* page, Address start,
void MarkingVerifier::VerifyMarking(NewSpace* space) { void MarkingVerifier::VerifyMarking(NewSpace* space) {
if (!space) return; if (!space) return;
if (FLAG_minor_mc) {
VerifyMarking(PagedNewSpace::From(space)->paged_space());
return;
}
Address end = space->top(); Address end = space->top();
// The bottom position is at the start of its page. Allows us to use // The bottom position is at the start of its page. Allows us to use
// page->area_start() as start of range on all pages. // page->area_start() as start of range on all pages.
...@@ -192,7 +196,7 @@ void MarkingVerifier::VerifyMarking(NewSpace* space) { ...@@ -192,7 +196,7 @@ void MarkingVerifier::VerifyMarking(NewSpace* space) {
} }
} }
void MarkingVerifier::VerifyMarking(PagedSpace* space) { void MarkingVerifier::VerifyMarking(PagedSpaceBase* space) {
for (Page* p : *space) { for (Page* p : *space) {
VerifyMarkingOnPage(p, p->area_start(), p->area_end()); VerifyMarkingOnPage(p, p->area_start(), p->area_end());
} }
...@@ -344,7 +348,7 @@ class EvacuationVerifier : public ObjectVisitorWithCageBases, ...@@ -344,7 +348,7 @@ class EvacuationVerifier : public ObjectVisitorWithCageBases,
void VerifyRoots(); void VerifyRoots();
void VerifyEvacuationOnPage(Address start, Address end); void VerifyEvacuationOnPage(Address start, Address end);
void VerifyEvacuation(NewSpace* new_space); void VerifyEvacuation(NewSpace* new_space);
void VerifyEvacuation(PagedSpace* paged_space); void VerifyEvacuation(PagedSpaceBase* paged_space);
Heap* heap_; Heap* heap_;
}; };
...@@ -367,6 +371,10 @@ void EvacuationVerifier::VerifyEvacuationOnPage(Address start, Address end) { ...@@ -367,6 +371,10 @@ void EvacuationVerifier::VerifyEvacuationOnPage(Address start, Address end) {
void EvacuationVerifier::VerifyEvacuation(NewSpace* space) { void EvacuationVerifier::VerifyEvacuation(NewSpace* space) {
if (!space) return; if (!space) return;
if (FLAG_minor_mc) {
VerifyEvacuation(PagedNewSpace::From(space)->paged_space());
return;
}
PageRange range(space->first_allocatable_address(), space->top()); PageRange range(space->first_allocatable_address(), space->top());
for (auto it = range.begin(); it != range.end();) { for (auto it = range.begin(); it != range.end();) {
Page* page = *(it++); Page* page = *(it++);
...@@ -377,7 +385,7 @@ void EvacuationVerifier::VerifyEvacuation(NewSpace* space) { ...@@ -377,7 +385,7 @@ void EvacuationVerifier::VerifyEvacuation(NewSpace* space) {
} }
} }
void EvacuationVerifier::VerifyEvacuation(PagedSpace* space) { void EvacuationVerifier::VerifyEvacuation(PagedSpaceBase* space) {
for (Page* p : *space) { for (Page* p : *space) {
if (p->IsEvacuationCandidate()) continue; if (p->IsEvacuationCandidate()) continue;
if (p->Contains(space->top())) { if (p->Contains(space->top())) {
...@@ -645,7 +653,7 @@ void MarkCompactCollector::VerifyMarkbitsAreDirty(ReadOnlySpace* space) { ...@@ -645,7 +653,7 @@ void MarkCompactCollector::VerifyMarkbitsAreDirty(ReadOnlySpace* space) {
} }
} }
void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpaceBase* space) {
for (Page* p : *space) { for (Page* p : *space) {
CHECK(non_atomic_marking_state()->bitmap(p)->IsClean()); CHECK(non_atomic_marking_state()->bitmap(p)->IsClean());
CHECK_EQ(0, non_atomic_marking_state()->live_bytes(p)); CHECK_EQ(0, non_atomic_marking_state()->live_bytes(p));
...@@ -654,6 +662,10 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { ...@@ -654,6 +662,10 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) { void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
if (!space) return; if (!space) return;
if (FLAG_minor_mc) {
VerifyMarkbitsAreClean(PagedNewSpace::From(space)->paged_space());
return;
}
for (Page* p : PageRange(space->first_allocatable_address(), space->top())) { for (Page* p : PageRange(space->first_allocatable_address(), space->top())) {
CHECK(non_atomic_marking_state()->bitmap(p)->IsClean()); CHECK(non_atomic_marking_state()->bitmap(p)->IsClean());
CHECK_EQ(0, non_atomic_marking_state()->live_bytes(p)); CHECK_EQ(0, non_atomic_marking_state()->live_bytes(p));
...@@ -1024,9 +1036,12 @@ void MarkCompactCollector::Prepare() { ...@@ -1024,9 +1036,12 @@ void MarkCompactCollector::Prepare() {
heap()->new_lo_space()->ResetPendingObject(); heap()->new_lo_space()->ResetPendingObject();
} }
if (heap()->new_space()) { NewSpace* new_space = heap()->new_space();
DCHECK_EQ(heap()->new_space()->top(), if (new_space) {
heap()->new_space()->original_top_acquire()); if (FLAG_minor_mc) {
PagedNewSpace::From(new_space)->paged_space()->PrepareForMarkCompact();
}
DCHECK_EQ(new_space->top(), new_space->original_top_acquire());
} }
} }
...@@ -1058,6 +1073,10 @@ void MarkCompactCollector::VerifyMarking() { ...@@ -1058,6 +1073,10 @@ void MarkCompactCollector::VerifyMarking() {
heap()->old_space()->VerifyLiveBytes(); heap()->old_space()->VerifyLiveBytes();
if (heap()->map_space()) heap()->map_space()->VerifyLiveBytes(); if (heap()->map_space()) heap()->map_space()->VerifyLiveBytes();
heap()->code_space()->VerifyLiveBytes(); heap()->code_space()->VerifyLiveBytes();
if (FLAG_minor_mc && heap()->new_space())
PagedNewSpace::From(heap()->new_space())
->paged_space()
->VerifyLiveBytes();
} }
#endif #endif
} }
...@@ -1626,9 +1645,10 @@ class RecordMigratedSlotVisitor : public ObjectVisitorWithCageBases { ...@@ -1626,9 +1645,10 @@ class RecordMigratedSlotVisitor : public ObjectVisitorWithCageBases {
if (value->IsStrongOrWeak()) { if (value->IsStrongOrWeak()) {
BasicMemoryChunk* p = BasicMemoryChunk::FromAddress(value.ptr()); BasicMemoryChunk* p = BasicMemoryChunk::FromAddress(value.ptr());
if (p->InYoungGeneration()) { if (p->InYoungGeneration()) {
DCHECK_IMPLIES( DCHECK_IMPLIES(p->IsToPage(),
p->IsToPage(), FLAG_minor_mc ||
p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION) || p->IsLargePage()); p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION) ||
p->IsLargePage());
MemoryChunk* chunk = MemoryChunk::FromHeapObject(host); MemoryChunk* chunk = MemoryChunk::FromHeapObject(host);
DCHECK(chunk->SweepingDone()); DCHECK(chunk->SweepingDone());
...@@ -1777,7 +1797,8 @@ class EvacuateVisitorBase : public HeapObjectVisitor { ...@@ -1777,7 +1797,8 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
AllocationResult allocation; AllocationResult allocation;
if (ShouldPromoteIntoSharedHeap(map)) { if (ShouldPromoteIntoSharedHeap(map)) {
DCHECK_EQ(target_space, OLD_SPACE); DCHECK_EQ(target_space, OLD_SPACE);
DCHECK(Heap::InYoungGeneration(object)); // TODO(v8:12612): Implement promotion from new space to shared heap.
DCHECK_IMPLIES(!FLAG_minor_mc, Heap::InYoungGeneration(object));
DCHECK_NOT_NULL(shared_old_allocator_); DCHECK_NOT_NULL(shared_old_allocator_);
allocation = shared_old_allocator_->AllocateRaw(size, alignment, allocation = shared_old_allocator_->AllocateRaw(size, alignment,
AllocationOrigin::kGC); AllocationOrigin::kGC);
...@@ -1880,6 +1901,9 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase { ...@@ -1880,6 +1901,9 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase {
if (heap_->new_space()->ShouldBePromoted(object.address()) && if (heap_->new_space()->ShouldBePromoted(object.address()) &&
TryEvacuateObject(OLD_SPACE, object, size, &target_object)) { TryEvacuateObject(OLD_SPACE, object, size, &target_object)) {
// Full GCs use AlwaysPromoteYoung::kYes above and MinorMC should never
// move objects.
DCHECK(!FLAG_minor_mc);
promoted_size_ += size; promoted_size_ += size;
return true; return true;
} }
...@@ -1899,7 +1923,7 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase { ...@@ -1899,7 +1923,7 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase {
private: private:
inline bool TryEvacuateWithoutCopy(HeapObject object) { inline bool TryEvacuateWithoutCopy(HeapObject object) {
if (is_incremental_marking_) return false; DCHECK(!is_incremental_marking_);
Map map = object.map(); Map map = object.map();
...@@ -1966,6 +1990,7 @@ class EvacuateNewSpacePageVisitor final : public HeapObjectVisitor { ...@@ -1966,6 +1990,7 @@ class EvacuateNewSpacePageVisitor final : public HeapObjectVisitor {
static void Move(Page* page) { static void Move(Page* page) {
switch (mode) { switch (mode) {
case NEW_TO_NEW: case NEW_TO_NEW:
DCHECK(!FLAG_minor_mc);
page->heap()->new_space()->PromotePageInNewSpace(page); page->heap()->new_space()->PromotePageInNewSpace(page);
break; break;
case NEW_TO_OLD: { case NEW_TO_OLD: {
...@@ -3559,7 +3584,7 @@ static inline void UpdateSlot(PtrComprCageBase cage_base, TSlot slot, ...@@ -3559,7 +3584,7 @@ static inline void UpdateSlot(PtrComprCageBase cage_base, TSlot slot,
"expected here"); "expected here");
MapWord map_word = heap_obj.map_word(cage_base, kRelaxedLoad); MapWord map_word = heap_obj.map_word(cage_base, kRelaxedLoad);
if (map_word.IsForwardingAddress()) { if (map_word.IsForwardingAddress()) {
DCHECK_IMPLIES(!Heap::InFromPage(heap_obj), DCHECK_IMPLIES((!FLAG_minor_mc && !Heap::InFromPage(heap_obj)),
MarkCompactCollector::IsOnEvacuationCandidate(heap_obj) || MarkCompactCollector::IsOnEvacuationCandidate(heap_obj) ||
Page::FromHeapObject(heap_obj)->IsFlagSet( Page::FromHeapObject(heap_obj)->IsFlagSet(
Page::COMPACTION_WAS_ABORTED)); Page::COMPACTION_WAS_ABORTED));
...@@ -5551,9 +5576,10 @@ class YoungGenerationRecordMigratedSlotVisitor final ...@@ -5551,9 +5576,10 @@ class YoungGenerationRecordMigratedSlotVisitor final
if (value->IsStrongOrWeak()) { if (value->IsStrongOrWeak()) {
BasicMemoryChunk* p = BasicMemoryChunk::FromAddress(value.ptr()); BasicMemoryChunk* p = BasicMemoryChunk::FromAddress(value.ptr());
if (p->InYoungGeneration()) { if (p->InYoungGeneration()) {
DCHECK_IMPLIES( DCHECK_IMPLIES(p->IsToPage(),
p->IsToPage(), FLAG_minor_mc ||
p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION) || p->IsLargePage()); p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION) ||
p->IsLargePage());
MemoryChunk* chunk = MemoryChunk::FromHeapObject(host); MemoryChunk* chunk = MemoryChunk::FromHeapObject(host);
DCHECK(chunk->SweepingDone()); DCHECK(chunk->SweepingDone());
RememberedSet<OLD_TO_NEW>::Insert<AccessMode::NON_ATOMIC>(chunk, slot); RememberedSet<OLD_TO_NEW>::Insert<AccessMode::NON_ATOMIC>(chunk, slot);
......
...@@ -504,7 +504,7 @@ class MarkCompactCollector final : public CollectorBase { ...@@ -504,7 +504,7 @@ class MarkCompactCollector final : public CollectorBase {
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
void VerifyMarkbitsAreClean(); void VerifyMarkbitsAreClean();
void VerifyMarkbitsAreDirty(ReadOnlySpace* space); void VerifyMarkbitsAreDirty(ReadOnlySpace* space);
void VerifyMarkbitsAreClean(PagedSpace* space); void VerifyMarkbitsAreClean(PagedSpaceBase* space);
void VerifyMarkbitsAreClean(NewSpace* space); void VerifyMarkbitsAreClean(NewSpace* space);
void VerifyMarkbitsAreClean(LargeObjectSpace* space); void VerifyMarkbitsAreClean(LargeObjectSpace* space);
#endif #endif
......
...@@ -909,10 +909,9 @@ PagedSpaceForNewSpace::PagedSpaceForNewSpace( ...@@ -909,10 +909,9 @@ PagedSpaceForNewSpace::PagedSpaceForNewSpace(
target_capacity_(initial_capacity_) { target_capacity_(initial_capacity_) {
DCHECK_LE(initial_capacity_, max_capacity_); DCHECK_LE(initial_capacity_, max_capacity_);
// Adding entries to the free list requires having a map for free space. Not if (!PreallocatePages()) {
// preallocating pages yet because the map may not be available yet when the V8::FatalProcessOutOfMemory(heap->isolate(), "New space setup");
// space is initialized. `EnsureCurrentCapacity()` should be called after maps }
// are allocated to preallocate pages.
} }
Page* PagedSpaceForNewSpace::InitializePage(MemoryChunk* chunk) { Page* PagedSpaceForNewSpace::InitializePage(MemoryChunk* chunk) {
...@@ -997,10 +996,9 @@ bool PagedSpaceForNewSpace::AddFreshPage() { ...@@ -997,10 +996,9 @@ bool PagedSpaceForNewSpace::AddFreshPage() {
return EnsureCurrentCapacity(); return EnsureCurrentCapacity();
} }
bool PagedSpaceForNewSpace::EnsureCurrentCapacity() { bool PagedSpaceForNewSpace::PreallocatePages() {
// Verify that the free space map is already initialized. Otherwise, new free // Verify that the free space map is already initialized. Otherwise, new free
// list entries will be invalid. // list entries will be invalid.
DCHECK_NE(0, heap()->isolate()->root(RootIndex::kFreeSpaceMap).ptr());
while (current_capacity_ < target_capacity_) { while (current_capacity_ < target_capacity_) {
if (!TryExpandImpl()) return false; if (!TryExpandImpl()) return false;
} }
...@@ -1008,6 +1006,13 @@ bool PagedSpaceForNewSpace::EnsureCurrentCapacity() { ...@@ -1008,6 +1006,13 @@ bool PagedSpaceForNewSpace::EnsureCurrentCapacity() {
return true; return true;
} }
bool PagedSpaceForNewSpace::EnsureCurrentCapacity() {
// Verify that the free space map is already initialized. Otherwise, new free
// list entries will be invalid.
DCHECK_NE(0, heap()->isolate()->root(RootIndex::kFreeSpaceMap).ptr());
return PreallocatePages();
}
void PagedSpaceForNewSpace::FreeLinearAllocationArea() { void PagedSpaceForNewSpace::FreeLinearAllocationArea() {
size_t remaining_allocation_area_size = limit() - top(); size_t remaining_allocation_area_size = limit() - top();
DCHECK_GE(allocated_linear_areas_, remaining_allocation_area_size); DCHECK_GE(allocated_linear_areas_, remaining_allocation_area_size);
......
...@@ -350,6 +350,7 @@ class NewSpace : NON_EXPORTED_BASE(public SpaceWithLinearArea) { ...@@ -350,6 +350,7 @@ class NewSpace : NON_EXPORTED_BASE(public SpaceWithLinearArea) {
class V8_EXPORT_PRIVATE SemiSpaceNewSpace final : public NewSpace { class V8_EXPORT_PRIVATE SemiSpaceNewSpace final : public NewSpace {
public: public:
static SemiSpaceNewSpace* From(NewSpace* space) { static SemiSpaceNewSpace* From(NewSpace* space) {
DCHECK(!FLAG_minor_mc);
return static_cast<SemiSpaceNewSpace*>(space); return static_cast<SemiSpaceNewSpace*>(space);
} }
...@@ -628,6 +629,8 @@ class V8_EXPORT_PRIVATE PagedSpaceForNewSpace final : public PagedSpaceBase { ...@@ -628,6 +629,8 @@ class V8_EXPORT_PRIVATE PagedSpaceForNewSpace final : public PagedSpaceBase {
#endif // V8_ENABLE_INNER_POINTER_RESOLUTION_OSB #endif // V8_ENABLE_INNER_POINTER_RESOLUTION_OSB
private: private:
bool PreallocatePages();
const size_t initial_capacity_; const size_t initial_capacity_;
const size_t max_capacity_; const size_t max_capacity_;
size_t target_capacity_ = 0; size_t target_capacity_ = 0;
...@@ -642,6 +645,7 @@ class V8_EXPORT_PRIVATE PagedSpaceForNewSpace final : public PagedSpaceBase { ...@@ -642,6 +645,7 @@ class V8_EXPORT_PRIVATE PagedSpaceForNewSpace final : public PagedSpaceBase {
class V8_EXPORT_PRIVATE PagedNewSpace final : public NewSpace { class V8_EXPORT_PRIVATE PagedNewSpace final : public NewSpace {
public: public:
static PagedNewSpace* From(NewSpace* space) { static PagedNewSpace* From(NewSpace* space) {
DCHECK(FLAG_minor_mc);
return static_cast<PagedNewSpace*>(space); return static_cast<PagedNewSpace*>(space);
} }
......
...@@ -184,6 +184,7 @@ void PagedSpaceBase::MergeCompactionSpace(CompactionSpace* other) { ...@@ -184,6 +184,7 @@ void PagedSpaceBase::MergeCompactionSpace(CompactionSpace* other) {
base::MutexGuard guard(mutex()); base::MutexGuard guard(mutex());
DCHECK_NE(NEW_SPACE, identity()); DCHECK_NE(NEW_SPACE, identity());
DCHECK_NE(NEW_SPACE, other->identity());
DCHECK(identity() == other->identity()); DCHECK(identity() == other->identity());
// Unmerged fields: // Unmerged fields:
...@@ -325,6 +326,9 @@ void PagedSpaceBase::RemovePage(Page* page) { ...@@ -325,6 +326,9 @@ void PagedSpaceBase::RemovePage(Page* page) {
DCHECK_IMPLIES(identity() == NEW_SPACE, page->IsFlagSet(Page::TO_PAGE)); DCHECK_IMPLIES(identity() == NEW_SPACE, page->IsFlagSet(Page::TO_PAGE));
memory_chunk_list_.Remove(page); memory_chunk_list_.Remove(page);
UnlinkFreeListCategories(page); UnlinkFreeListCategories(page);
if (identity() == NEW_SPACE) {
page->ReleaseFreeListCategories();
}
DecreaseAllocatedBytes(page->allocated_bytes(), page); DecreaseAllocatedBytes(page->allocated_bytes(), page);
DecreaseCapacity(page->area_size()); DecreaseCapacity(page->area_size());
AccountUncommitted(page->size()); AccountUncommitted(page->size());
...@@ -359,6 +363,7 @@ void PagedSpaceBase::ResetFreeList() { ...@@ -359,6 +363,7 @@ void PagedSpaceBase::ResetFreeList() {
free_list_->EvictFreeListItems(page); free_list_->EvictFreeListItems(page);
} }
DCHECK(free_list_->IsEmpty()); DCHECK(free_list_->IsEmpty());
DCHECK_EQ(0, free_list_->Available());
} }
void PagedSpaceBase::ShrinkImmortalImmovablePages() { void PagedSpaceBase::ShrinkImmortalImmovablePages() {
...@@ -504,7 +509,7 @@ void PagedSpaceBase::FreeLinearAllocationArea() { ...@@ -504,7 +509,7 @@ void PagedSpaceBase::FreeLinearAllocationArea() {
AdvanceAllocationObservers(); AdvanceAllocationObservers();
if (current_top != current_limit && if (identity() != NEW_SPACE && current_top != current_limit &&
heap()->incremental_marking()->black_allocation()) { heap()->incremental_marking()->black_allocation()) {
Page::FromAddress(current_top) Page::FromAddress(current_top)
->DestroyBlackArea(current_top, current_limit); ->DestroyBlackArea(current_top, current_limit);
......
...@@ -8,6 +8,7 @@ ...@@ -8,6 +8,7 @@
#include "src/execution/protectors.h" #include "src/execution/protectors.h"
#include "src/heap/factory.h" #include "src/heap/factory.h"
#include "src/heap/heap-inl.h" #include "src/heap/heap-inl.h"
#include "src/heap/new-spaces.h"
#include "src/ic/handler-configuration.h" #include "src/ic/handler-configuration.h"
#include "src/init/heap-symbols.h" #include "src/init/heap-symbols.h"
#include "src/init/setup-isolate.h" #include "src/init/setup-isolate.h"
...@@ -74,6 +75,12 @@ bool SetupIsolateDelegate::SetupHeapInternal(Heap* heap) { ...@@ -74,6 +75,12 @@ bool SetupIsolateDelegate::SetupHeapInternal(Heap* heap) {
bool Heap::CreateHeapObjects() { bool Heap::CreateHeapObjects() {
// Create initial maps. // Create initial maps.
if (!CreateInitialMaps()) return false; if (!CreateInitialMaps()) return false;
if (FLAG_minor_mc && new_space()) {
PagedNewSpace::From(new_space())
->paged_space()
->free_list()
->RepairLists(this);
}
CreateApiObjects(); CreateApiObjects();
// Create initial objects // Create initial objects
......
...@@ -930,6 +930,8 @@ void StringForwardingTable::Block::UpdateAfterEvacuation(Isolate* isolate) { ...@@ -930,6 +930,8 @@ void StringForwardingTable::Block::UpdateAfterEvacuation(Isolate* isolate) {
void StringForwardingTable::Block::UpdateAfterEvacuation(Isolate* isolate, void StringForwardingTable::Block::UpdateAfterEvacuation(Isolate* isolate,
int up_to_index) { int up_to_index) {
// This is only used for Scavenger.
DCHECK(!FLAG_minor_mc);
DCHECK(FLAG_always_use_string_forwarding_table); DCHECK(FLAG_always_use_string_forwarding_table);
for (int index = 0; index < up_to_index; ++index) { for (int index = 0; index < up_to_index; ++index) {
Object original = Get(isolate, IndexOfOriginalString(index)); Object original = Get(isolate, IndexOfOriginalString(index));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment