Commit 026a1000 authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

[heap] Update evacuation and implement sweeping in MinorMC

Bug: v8:12612
Change-Id: I28a574435646073d65e6fe1e746267ffb0eaa01d
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3864083
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82932}
parent 2f41221f
...@@ -982,18 +982,20 @@ enum AllocationSpace { ...@@ -982,18 +982,20 @@ enum AllocationSpace {
OLD_SPACE, // Old generation regular object space. OLD_SPACE, // Old generation regular object space.
CODE_SPACE, // Old generation code object space, marked executable. CODE_SPACE, // Old generation code object space, marked executable.
MAP_SPACE, // Old generation map object space, non-movable. MAP_SPACE, // Old generation map object space, non-movable.
NEW_SPACE, // Young generation space for regular objects collected
// with Scavenger/MinorMC.
LO_SPACE, // Old generation large object space. LO_SPACE, // Old generation large object space.
CODE_LO_SPACE, // Old generation large code object space. CODE_LO_SPACE, // Old generation large code object space.
NEW_LO_SPACE, // Young generation large object space. NEW_LO_SPACE, // Young generation large object space.
NEW_SPACE, // Young generation semispaces for regular objects collected with
// Scavenger.
FIRST_SPACE = RO_SPACE, FIRST_SPACE = RO_SPACE,
LAST_SPACE = NEW_SPACE, LAST_SPACE = NEW_LO_SPACE,
FIRST_MUTABLE_SPACE = OLD_SPACE, FIRST_MUTABLE_SPACE = OLD_SPACE,
LAST_MUTABLE_SPACE = NEW_SPACE, LAST_MUTABLE_SPACE = NEW_LO_SPACE,
FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE, FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE,
LAST_GROWABLE_PAGED_SPACE = MAP_SPACE LAST_GROWABLE_PAGED_SPACE = MAP_SPACE,
FIRST_SWEEPABLE_SPACE = OLD_SPACE,
LAST_SWEEPABLE_SPACE = NEW_SPACE
}; };
constexpr int kSpaceTagSize = 4; constexpr int kSpaceTagSize = 4;
static_assert(FIRST_SPACE == 0); static_assert(FIRST_SPACE == 0);
......
...@@ -827,7 +827,6 @@ void GCTracer::PrintNVP() const { ...@@ -827,7 +827,6 @@ void GCTracer::PrintNVP() const {
"evacuate=%.2f " "evacuate=%.2f "
"evacuate.copy=%.2f " "evacuate.copy=%.2f "
"evacuate.update_pointers=%.2f " "evacuate.update_pointers=%.2f "
"evacuate.update_pointers.to_new_roots=%.2f "
"evacuate.update_pointers.slots=%.2f " "evacuate.update_pointers.slots=%.2f "
"background.mark=%.2f " "background.mark=%.2f "
"background.evacuate.copy=%.2f " "background.evacuate.copy=%.2f "
...@@ -850,7 +849,6 @@ void GCTracer::PrintNVP() const { ...@@ -850,7 +849,6 @@ void GCTracer::PrintNVP() const {
current_scope(Scope::MINOR_MC_EVACUATE), current_scope(Scope::MINOR_MC_EVACUATE),
current_scope(Scope::MINOR_MC_EVACUATE_COPY), current_scope(Scope::MINOR_MC_EVACUATE_COPY),
current_scope(Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS), current_scope(Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS),
current_scope(Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS),
current_scope(Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS), current_scope(Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS),
current_scope(Scope::MINOR_MC_BACKGROUND_MARKING), current_scope(Scope::MINOR_MC_BACKGROUND_MARKING),
current_scope(Scope::MINOR_MC_BACKGROUND_EVACUATE_COPY), current_scope(Scope::MINOR_MC_BACKGROUND_EVACUATE_COPY),
......
...@@ -658,6 +658,10 @@ uintptr_t Heap::code_page_collection_memory_modification_scope_depth() { ...@@ -658,6 +658,10 @@ uintptr_t Heap::code_page_collection_memory_modification_scope_depth() {
return local_heap->code_page_collection_memory_modification_scope_depth_; return local_heap->code_page_collection_memory_modification_scope_depth_;
} }
PagedNewSpace* Heap::paged_new_space() const {
return PagedNewSpace::From(new_space());
}
CodeSpaceMemoryModificationScope::~CodeSpaceMemoryModificationScope() { CodeSpaceMemoryModificationScope::~CodeSpaceMemoryModificationScope() {
if (heap_->write_protect_code_memory()) { if (heap_->write_protect_code_memory()) {
heap_->decrement_code_space_memory_modification_scope_depth(); heap_->decrement_code_space_memory_modification_scope_depth();
......
...@@ -51,9 +51,7 @@ void HeapLayoutTracer::PrintBasicMemoryChunk(std::ostream& os, ...@@ -51,9 +51,7 @@ void HeapLayoutTracer::PrintBasicMemoryChunk(std::ostream& os,
// static // static
void HeapLayoutTracer::PrintHeapLayout(std::ostream& os, Heap* heap) { void HeapLayoutTracer::PrintHeapLayout(std::ostream& os, Heap* heap) {
if (FLAG_minor_mc) { if (FLAG_minor_mc) {
const PagedNewSpace* paged_new_space = for (const Page* page : *heap->paged_new_space()) {
PagedNewSpace::From(heap->new_space());
for (const Page* page : *paged_new_space) {
PrintBasicMemoryChunk(os, *page, "new_space"); PrintBasicMemoryChunk(os, *page, "new_space");
} }
} else { } else {
......
...@@ -4452,7 +4452,7 @@ void Heap::VerifyCountersBeforeConcurrentSweeping() { ...@@ -4452,7 +4452,7 @@ void Heap::VerifyCountersBeforeConcurrentSweeping() {
// We need to refine the counters on pages that are already swept and have // We need to refine the counters on pages that are already swept and have
// not been moved over to the actual space. Otherwise, the AccountingStats // not been moved over to the actual space. Otherwise, the AccountingStats
// are just an over approximation. // are just an over approximation.
space->RefillFreeList(); space->RefillFreeList(mark_compact_collector()->sweeper());
space->VerifyCountersBeforeConcurrentSweeping(); space->VerifyCountersBeforeConcurrentSweeping();
} }
} }
...@@ -4464,9 +4464,7 @@ void Heap::VerifyCommittedPhysicalMemory() { ...@@ -4464,9 +4464,7 @@ void Heap::VerifyCommittedPhysicalMemory() {
space->VerifyCommittedPhysicalMemory(); space->VerifyCommittedPhysicalMemory();
} }
if (FLAG_minor_mc && new_space()) { if (FLAG_minor_mc && new_space()) {
PagedNewSpace::From(new_space()) paged_new_space()->paged_space()->VerifyCommittedPhysicalMemory();
->paged_space()
->VerifyCommittedPhysicalMemory();
} }
} }
#endif // DEBUG #endif // DEBUG
......
...@@ -119,6 +119,7 @@ class ObjectIterator; ...@@ -119,6 +119,7 @@ class ObjectIterator;
class ObjectStats; class ObjectStats;
class Page; class Page;
class PagedSpace; class PagedSpace;
class PagedNewSpace;
class ReadOnlyHeap; class ReadOnlyHeap;
class RootVisitor; class RootVisitor;
class RwxMemoryWriteScope; class RwxMemoryWriteScope;
...@@ -873,6 +874,7 @@ class Heap { ...@@ -873,6 +874,7 @@ class Heap {
inline Address NewSpaceTop(); inline Address NewSpaceTop();
NewSpace* new_space() const { return new_space_; } NewSpace* new_space() const { return new_space_; }
inline PagedNewSpace* paged_new_space() const;
OldSpace* old_space() const { return old_space_; } OldSpace* old_space() const { return old_space_; }
OldSpace* shared_old_space() const { return shared_old_space_; } OldSpace* shared_old_space() const { return shared_old_space_; }
CodeSpace* code_space() const { return code_space_; } CodeSpace* code_space() const { return code_space_; }
......
This diff is collapsed.
...@@ -29,6 +29,7 @@ class ItemParallelJob; ...@@ -29,6 +29,7 @@ class ItemParallelJob;
class LargeObjectSpace; class LargeObjectSpace;
class LargePage; class LargePage;
class MigrationObserver; class MigrationObserver;
class PagedNewSpace;
class ReadOnlySpace; class ReadOnlySpace;
class RecordMigratedSlotVisitor; class RecordMigratedSlotVisitor;
class UpdatingItem; class UpdatingItem;
...@@ -683,6 +684,7 @@ class MarkCompactCollector final : public CollectorBase { ...@@ -683,6 +684,7 @@ class MarkCompactCollector final : public CollectorBase {
// up other pages for sweeping. Does not start sweeper tasks. // up other pages for sweeping. Does not start sweeper tasks.
void Sweep(); void Sweep();
void StartSweepSpace(PagedSpace* space); void StartSweepSpace(PagedSpace* space);
void StartSweepNewSpace();
void SweepLargeSpace(LargeObjectSpace* space); void SweepLargeSpace(LargeObjectSpace* space);
void EvacuatePrologue(); void EvacuatePrologue();
...@@ -811,10 +813,8 @@ class MinorMarkCompactCollector final : public CollectorBase { ...@@ -811,10 +813,8 @@ class MinorMarkCompactCollector final : public CollectorBase {
void Finish() final; void Finish() final;
bool sweeping_in_progress() const final { Sweeper* sweeper() { return sweeper_.get(); }
// TODO(v8:13012): Fix this once sweeping is implemented. bool sweeping_in_progress() const { return sweeper_->sweeping_in_progress(); }
return false;
}
void VisitObject(HeapObject obj) final; void VisitObject(HeapObject obj) final;
...@@ -831,19 +831,15 @@ class MinorMarkCompactCollector final : public CollectorBase { ...@@ -831,19 +831,15 @@ class MinorMarkCompactCollector final : public CollectorBase {
void TraceFragmentation(); void TraceFragmentation();
void ClearNonLiveReferences(); void ClearNonLiveReferences();
void Sweep();
void StartSweepNewSpace();
void EvacuatePrologue(); void EvacuatePrologue();
void EvacuateEpilogue(); void EvacuateEpilogue();
void Evacuate(); void Evacuate();
void EvacuatePagesInParallel(); void EvacuatePagesInParallel();
void UpdatePointersAfterEvacuation(); void UpdatePointersAfterEvacuation();
std::unique_ptr<UpdatingItem> CreateToSpaceUpdatingItem(MemoryChunk* chunk,
Address start,
Address end);
int CollectToSpaceUpdatingItems(
std::vector<std::unique_ptr<UpdatingItem>>* items);
void SweepArrayBufferExtensions(); void SweepArrayBufferExtensions();
std::unique_ptr<YoungGenerationMainMarkingVisitor> main_marking_visitor_; std::unique_ptr<YoungGenerationMainMarkingVisitor> main_marking_visitor_;
...@@ -853,6 +849,8 @@ class MinorMarkCompactCollector final : public CollectorBase { ...@@ -853,6 +849,8 @@ class MinorMarkCompactCollector final : public CollectorBase {
std::vector<Page*> promoted_pages_; std::vector<Page*> promoted_pages_;
std::vector<LargePage*> promoted_large_pages_; std::vector<LargePage*> promoted_large_pages_;
std::unique_ptr<Sweeper> sweeper_;
friend class YoungGenerationMarkingTask; friend class YoungGenerationMarkingTask;
friend class YoungGenerationMarkingJob; friend class YoungGenerationMarkingJob;
friend class YoungGenerationMainMarkingVisitor; friend class YoungGenerationMainMarkingVisitor;
......
...@@ -966,8 +966,6 @@ void PagedSpaceForNewSpace::Shrink() { ...@@ -966,8 +966,6 @@ void PagedSpaceForNewSpace::Shrink() {
target_capacity_ = current_capacity_; target_capacity_ = current_capacity_;
} }
void PagedSpaceForNewSpace::EvacuatePrologue() { FreeLinearAllocationArea(); }
void PagedSpaceForNewSpace::UpdateInlineAllocationLimit(size_t size_in_bytes) { void PagedSpaceForNewSpace::UpdateInlineAllocationLimit(size_t size_in_bytes) {
PagedSpaceBase::UpdateInlineAllocationLimit(size_in_bytes); PagedSpaceBase::UpdateInlineAllocationLimit(size_in_bytes);
} }
......
...@@ -584,7 +584,7 @@ class V8_EXPORT_PRIVATE PagedSpaceForNewSpace final : public PagedSpaceBase { ...@@ -584,7 +584,7 @@ class V8_EXPORT_PRIVATE PagedSpaceForNewSpace final : public PagedSpaceBase {
} }
// Reset the allocation pointer. // Reset the allocation pointer.
void EvacuatePrologue(); void EvacuatePrologue() {}
void EvacuateEpilogue() { allocated_linear_areas_ = 0; } void EvacuateEpilogue() { allocated_linear_areas_ = 0; }
// When inline allocation stepping is active, either because of incremental // When inline allocation stepping is active, either because of incremental
......
...@@ -23,6 +23,7 @@ ...@@ -23,6 +23,7 @@
#include "src/heap/read-only-heap.h" #include "src/heap/read-only-heap.h"
#include "src/heap/safepoint.h" #include "src/heap/safepoint.h"
#include "src/heap/spaces.h" #include "src/heap/spaces.h"
#include "src/heap/sweeper.h"
#include "src/logging/runtime-call-stats-scope.h" #include "src/logging/runtime-call-stats-scope.h"
#include "src/objects/string.h" #include "src/objects/string.h"
#include "src/utils/utils.h" #include "src/utils/utils.h"
...@@ -138,17 +139,17 @@ void PagedSpaceBase::TearDown() { ...@@ -138,17 +139,17 @@ void PagedSpaceBase::TearDown() {
accounting_stats_.Clear(); accounting_stats_.Clear();
} }
void PagedSpaceBase::RefillFreeList() { void PagedSpaceBase::RefillFreeList(Sweeper* sweeper) {
// Any PagedSpace might invoke RefillFreeList. We filter all but our old // Any PagedSpace might invoke RefillFreeList. We filter all but our old
// generation spaces out. // generation spaces out.
DCHECK(identity() == OLD_SPACE || identity() == CODE_SPACE || DCHECK(identity() == OLD_SPACE || identity() == CODE_SPACE ||
identity() == MAP_SPACE || identity() == NEW_SPACE); identity() == MAP_SPACE || identity() == NEW_SPACE);
MarkCompactCollector* collector = heap()->mark_compact_collector();
size_t added = 0; size_t added = 0;
{ {
Page* p = nullptr; Page* p = nullptr;
while ((p = collector->sweeper()->GetSweptPageSafe(this)) != nullptr) { while ((p = sweeper->GetSweptPageSafe(this)) != nullptr) {
// We regularly sweep NEVER_ALLOCATE_ON_PAGE pages. We drop the freelist // We regularly sweep NEVER_ALLOCATE_ON_PAGE pages. We drop the freelist
// entries here to make them unavailable for allocations. // entries here to make them unavailable for allocations.
if (p->IsFlagSet(Page::NEVER_ALLOCATE_ON_PAGE)) { if (p->IsFlagSet(Page::NEVER_ALLOCATE_ON_PAGE)) {
...@@ -162,6 +163,7 @@ void PagedSpaceBase::RefillFreeList() { ...@@ -162,6 +163,7 @@ void PagedSpaceBase::RefillFreeList() {
// during compaction. // during compaction.
if (is_compaction_space()) { if (is_compaction_space()) {
DCHECK_NE(this, p->owner()); DCHECK_NE(this, p->owner());
DCHECK_NE(NEW_SPACE, identity());
PagedSpaceBase* owner = reinterpret_cast<PagedSpaceBase*>(p->owner()); PagedSpaceBase* owner = reinterpret_cast<PagedSpaceBase*>(p->owner());
base::MutexGuard guard(owner->mutex()); base::MutexGuard guard(owner->mutex());
owner->RefineAllocatedBytesAfterSweeping(p); owner->RefineAllocatedBytesAfterSweeping(p);
...@@ -282,7 +284,7 @@ bool PagedSpaceBase::ContainsSlow(Address addr) const { ...@@ -282,7 +284,7 @@ bool PagedSpaceBase::ContainsSlow(Address addr) const {
void PagedSpaceBase::RefineAllocatedBytesAfterSweeping(Page* page) { void PagedSpaceBase::RefineAllocatedBytesAfterSweeping(Page* page) {
CHECK(page->SweepingDone()); CHECK(page->SweepingDone());
auto marking_state = auto marking_state =
heap()->incremental_marking()->non_atomic_marking_state(); heap()->mark_compact_collector()->non_atomic_marking_state();
// The live_byte on the page was accounted in the space allocated // The live_byte on the page was accounted in the space allocated
// bytes counter. After sweeping allocated_bytes() contains the // bytes counter. After sweeping allocated_bytes() contains the
// accurate live byte count on the page. // accurate live byte count on the page.
...@@ -329,7 +331,13 @@ void PagedSpaceBase::RemovePage(Page* page) { ...@@ -329,7 +331,13 @@ void PagedSpaceBase::RemovePage(Page* page) {
if (identity() == NEW_SPACE) { if (identity() == NEW_SPACE) {
page->ReleaseFreeListCategories(); page->ReleaseFreeListCategories();
} }
// Pages are only removed from new space when they are promoted to old space
// during a GC. This happens after sweeping as started and the allocation
// counters have been reset.
DCHECK_IMPLIES(identity() == NEW_SPACE, Size() == 0);
if (identity() != NEW_SPACE) {
DecreaseAllocatedBytes(page->allocated_bytes(), page); DecreaseAllocatedBytes(page->allocated_bytes(), page);
}
DecreaseCapacity(page->area_size()); DecreaseCapacity(page->area_size());
AccountUncommitted(page->size()); AccountUncommitted(page->size());
for (size_t i = 0; i < ExternalBackingStoreType::kNumTypes; i++) { for (size_t i = 0; i < ExternalBackingStoreType::kNumTypes; i++) {
...@@ -662,7 +670,7 @@ PagedSpaceBase::RawAllocateBackground(LocalHeap* local_heap, ...@@ -662,7 +670,7 @@ PagedSpaceBase::RawAllocateBackground(LocalHeap* local_heap,
if (collector->sweeping_in_progress()) { if (collector->sweeping_in_progress()) {
// First try to refill the free-list, concurrent sweeper threads // First try to refill the free-list, concurrent sweeper threads
// may have freed some objects in the meantime. // may have freed some objects in the meantime.
RefillFreeList(); RefillFreeList(collector->sweeper());
// Retry the free list allocation. // Retry the free list allocation.
result = TryAllocationFromFreeListBackground(min_size_in_bytes, result = TryAllocationFromFreeListBackground(min_size_in_bytes,
...@@ -677,7 +685,8 @@ PagedSpaceBase::RawAllocateBackground(LocalHeap* local_heap, ...@@ -677,7 +685,8 @@ PagedSpaceBase::RawAllocateBackground(LocalHeap* local_heap,
identity(), Sweeper::SweepingMode::kLazyOrConcurrent, identity(), Sweeper::SweepingMode::kLazyOrConcurrent,
static_cast<int>(min_size_in_bytes), kMaxPagesToSweep); static_cast<int>(min_size_in_bytes), kMaxPagesToSweep);
RefillFreeList(); // Keep new space sweeping atomic.
RefillFreeList(collector->sweeper());
if (static_cast<size_t>(max_freed) >= min_size_in_bytes) { if (static_cast<size_t>(max_freed) >= min_size_in_bytes) {
result = TryAllocationFromFreeListBackground(min_size_in_bytes, result = TryAllocationFromFreeListBackground(min_size_in_bytes,
...@@ -699,7 +708,7 @@ PagedSpaceBase::RawAllocateBackground(LocalHeap* local_heap, ...@@ -699,7 +708,7 @@ PagedSpaceBase::RawAllocateBackground(LocalHeap* local_heap,
collector->DrainSweepingWorklistForSpace(identity()); collector->DrainSweepingWorklistForSpace(identity());
} }
RefillFreeList(); RefillFreeList(collector->sweeper());
// Last try to acquire memory from free list. // Last try to acquire memory from free list.
return TryAllocationFromFreeListBackground(min_size_in_bytes, return TryAllocationFromFreeListBackground(min_size_in_bytes,
...@@ -985,7 +994,7 @@ bool PagedSpaceBase::RawRefillLabMain(int size_in_bytes, ...@@ -985,7 +994,7 @@ bool PagedSpaceBase::RawRefillLabMain(int size_in_bytes,
if (collector->sweeping_in_progress()) { if (collector->sweeping_in_progress()) {
// First try to refill the free-list, concurrent sweeper threads // First try to refill the free-list, concurrent sweeper threads
// may have freed some objects in the meantime. // may have freed some objects in the meantime.
RefillFreeList(); RefillFreeList(collector->sweeper());
// Retry the free list allocation. // Retry the free list allocation.
if (TryAllocationFromFreeListMain(static_cast<size_t>(size_in_bytes), if (TryAllocationFromFreeListMain(static_cast<size_t>(size_in_bytes),
...@@ -1049,7 +1058,7 @@ bool PagedSpaceBase::ContributeToSweepingMain(int required_freed_bytes, ...@@ -1049,7 +1058,7 @@ bool PagedSpaceBase::ContributeToSweepingMain(int required_freed_bytes,
if (collector->sweeping_in_progress()) { if (collector->sweeping_in_progress()) {
collector->sweeper()->ParallelSweepSpace(identity(), sweeping_mode, collector->sweeper()->ParallelSweepSpace(identity(), sweeping_mode,
required_freed_bytes, max_pages); required_freed_bytes, max_pages);
RefillFreeList(); RefillFreeList(collector->sweeper());
return TryAllocationFromFreeListMain(size_in_bytes, origin); return TryAllocationFromFreeListMain(size_in_bytes, origin);
} }
return false; return false;
......
...@@ -31,6 +31,7 @@ class HeapObject; ...@@ -31,6 +31,7 @@ class HeapObject;
class Isolate; class Isolate;
class ObjectVisitor; class ObjectVisitor;
class PagedSpaceBase; class PagedSpaceBase;
class Sweeper;
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// Heap object iterator in paged spaces. // Heap object iterator in paged spaces.
...@@ -211,8 +212,6 @@ class V8_EXPORT_PRIVATE PagedSpaceBase ...@@ -211,8 +212,6 @@ class V8_EXPORT_PRIVATE PagedSpaceBase
accounting_stats_.IncreaseCapacity(bytes); accounting_stats_.IncreaseCapacity(bytes);
} }
void RefineAllocatedBytesAfterSweeping(Page* page);
Page* InitializePage(MemoryChunk* chunk) override; Page* InitializePage(MemoryChunk* chunk) override;
virtual void ReleasePage(Page* page); virtual void ReleasePage(Page* page);
...@@ -281,7 +280,7 @@ class V8_EXPORT_PRIVATE PagedSpaceBase ...@@ -281,7 +280,7 @@ class V8_EXPORT_PRIVATE PagedSpaceBase
// Refills the free list from the corresponding free list filled by the // Refills the free list from the corresponding free list filled by the
// sweeper. // sweeper.
virtual void RefillFreeList(); void RefillFreeList(Sweeper* sweeper);
base::Mutex* mutex() { return &space_mutex_; } base::Mutex* mutex() { return &space_mutex_; }
...@@ -343,6 +342,8 @@ class V8_EXPORT_PRIVATE PagedSpaceBase ...@@ -343,6 +342,8 @@ class V8_EXPORT_PRIVATE PagedSpaceBase
return !is_compaction_space(); return !is_compaction_space();
} }
void RefineAllocatedBytesAfterSweeping(Page* page);
protected: protected:
void UpdateInlineAllocationLimit(size_t min_size) override; void UpdateInlineAllocationLimit(size_t min_size) override;
......
...@@ -76,10 +76,7 @@ bool Heap::CreateHeapObjects() { ...@@ -76,10 +76,7 @@ bool Heap::CreateHeapObjects() {
// Create initial maps. // Create initial maps.
if (!CreateInitialMaps()) return false; if (!CreateInitialMaps()) return false;
if (FLAG_minor_mc && new_space()) { if (FLAG_minor_mc && new_space()) {
PagedNewSpace::From(new_space()) paged_new_space()->paged_space()->free_list()->RepairLists(this);
->paged_space()
->free_list()
->RepairLists(this);
} }
CreateApiObjects(); CreateApiObjects();
......
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
#include <memory> #include <memory>
#include <vector> #include <vector>
#include "src/base/logging.h"
#include "src/common/globals.h" #include "src/common/globals.h"
#include "src/execution/vm-state-inl.h" #include "src/execution/vm-state-inl.h"
#include "src/heap/base/active-system-pages.h" #include "src/heap/base/active-system-pages.h"
...@@ -16,28 +17,41 @@ ...@@ -16,28 +17,41 @@
#include "src/heap/gc-tracer.h" #include "src/heap/gc-tracer.h"
#include "src/heap/invalidated-slots-inl.h" #include "src/heap/invalidated-slots-inl.h"
#include "src/heap/mark-compact-inl.h" #include "src/heap/mark-compact-inl.h"
#include "src/heap/new-spaces.h"
#include "src/heap/paged-spaces.h"
#include "src/heap/remembered-set.h" #include "src/heap/remembered-set.h"
#include "src/objects/objects-inl.h" #include "src/objects/objects-inl.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
namespace {
static const int kInitialLocalPretenuringFeedbackCapacity = 256;
} // namespace
class Sweeper::ConcurrentSweeper final { class Sweeper::ConcurrentSweeper final {
public: public:
explicit ConcurrentSweeper(Sweeper* sweeper) : sweeper_(sweeper) {} explicit ConcurrentSweeper(Sweeper* sweeper)
: sweeper_(sweeper),
local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity) {}
bool ConcurrentSweepSpace(AllocationSpace identity, JobDelegate* delegate) { bool ConcurrentSweepSpace(AllocationSpace identity, JobDelegate* delegate) {
while (!delegate->ShouldYield()) { while (!delegate->ShouldYield()) {
Page* page = sweeper_->GetSweepingPageSafe(identity); Page* page = sweeper_->GetSweepingPageSafe(identity);
if (page == nullptr) return true; if (page == nullptr) return true;
sweeper_->ParallelSweepPage(page, identity, sweeper_->ParallelSweepPage(page, identity, &local_pretenuring_feedback_,
SweepingMode::kLazyOrConcurrent); SweepingMode::kLazyOrConcurrent);
} }
return false; return false;
} }
Heap::PretenuringFeedbackMap* local_pretenuring_feedback() {
return &local_pretenuring_feedback_;
}
private: private:
Sweeper* const sweeper_; Sweeper* const sweeper_;
Heap::PretenuringFeedbackMap local_pretenuring_feedback_;
}; };
class Sweeper::SweeperJob final : public JobTask { class Sweeper::SweeperJob final : public JobTask {
...@@ -80,8 +94,7 @@ class Sweeper::SweeperJob final : public JobTask { ...@@ -80,8 +94,7 @@ class Sweeper::SweeperJob final : public JobTask {
ConcurrentSweeper& sweeper = (*concurrent_sweepers_)[offset]; ConcurrentSweeper& sweeper = (*concurrent_sweepers_)[offset];
for (int i = 0; i < kNumberOfSweepingSpaces; i++) { for (int i = 0; i < kNumberOfSweepingSpaces; i++) {
const AllocationSpace space_id = static_cast<AllocationSpace>( const AllocationSpace space_id = static_cast<AllocationSpace>(
FIRST_GROWABLE_PAGED_SPACE + FIRST_SWEEPABLE_SPACE + ((i + offset) % kNumberOfSweepingSpaces));
((i + offset) % kNumberOfSweepingSpaces));
DCHECK(IsValidSweepingSpace(space_id)); DCHECK(IsValidSweepingSpace(space_id));
if (!sweeper.ConcurrentSweepSpace(space_id, delegate)) return; if (!sweeper.ConcurrentSweepSpace(space_id, delegate)) return;
} }
...@@ -96,9 +109,13 @@ Sweeper::Sweeper(Heap* heap, NonAtomicMarkingState* marking_state) ...@@ -96,9 +109,13 @@ Sweeper::Sweeper(Heap* heap, NonAtomicMarkingState* marking_state)
: heap_(heap), : heap_(heap),
marking_state_(marking_state), marking_state_(marking_state),
sweeping_in_progress_(false), sweeping_in_progress_(false),
should_reduce_memory_(false) {} should_reduce_memory_(false),
local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity) {}
Sweeper::~Sweeper() { DCHECK(concurrent_sweepers_.empty()); } Sweeper::~Sweeper() {
DCHECK(concurrent_sweepers_.empty());
DCHECK(local_pretenuring_feedback_.empty());
}
Sweeper::PauseScope::PauseScope(Sweeper* sweeper) : sweeper_(sweeper) { Sweeper::PauseScope::PauseScope(Sweeper* sweeper) : sweeper_(sweeper) {
if (!sweeper_->sweeping_in_progress()) return; if (!sweeper_->sweeping_in_progress()) return;
...@@ -143,11 +160,10 @@ void Sweeper::TearDown() { ...@@ -143,11 +160,10 @@ void Sweeper::TearDown() {
} }
void Sweeper::StartSweeping() { void Sweeper::StartSweeping() {
DCHECK(local_pretenuring_feedback_.empty());
sweeping_in_progress_ = true; sweeping_in_progress_ = true;
should_reduce_memory_ = heap_->ShouldReduceMemory(); should_reduce_memory_ = heap_->ShouldReduceMemory();
NonAtomicMarkingState* marking_state = ForAllSweepingSpaces([this](AllocationSpace space) {
heap_->mark_compact_collector()->non_atomic_marking_state();
ForAllSweepingSpaces([this, marking_state](AllocationSpace space) {
// Sorting is done in order to make compaction more efficient: by sweeping // Sorting is done in order to make compaction more efficient: by sweeping
// pages with the most free bytes first, we make it more likely that when // pages with the most free bytes first, we make it more likely that when
// evacuating a page, already swept pages will have enough free bytes to // evacuating a page, already swept pages will have enough free bytes to
...@@ -158,7 +174,7 @@ void Sweeper::StartSweeping() { ...@@ -158,7 +174,7 @@ void Sweeper::StartSweeping() {
int space_index = GetSweepSpaceIndex(space); int space_index = GetSweepSpaceIndex(space);
std::sort( std::sort(
sweeping_list_[space_index].begin(), sweeping_list_[space_index].end(), sweeping_list_[space_index].begin(), sweeping_list_[space_index].end(),
[marking_state](Page* a, Page* b) { [marking_state = marking_state_](Page* a, Page* b) {
return marking_state->live_bytes(a) > marking_state->live_bytes(b); return marking_state->live_bytes(a) > marking_state->live_bytes(b);
}); });
}); });
...@@ -198,13 +214,13 @@ Page* Sweeper::GetSweptPageSafe(PagedSpaceBase* space) { ...@@ -198,13 +214,13 @@ Page* Sweeper::GetSweptPageSafe(PagedSpaceBase* space) {
return nullptr; return nullptr;
} }
void Sweeper::EnsureCompleted() { void Sweeper::EnsureCompleted(SweepingMode sweeping_mode) {
if (!sweeping_in_progress_) return; if (!sweeping_in_progress_) return;
// If sweeping is not completed or not running at all, we try to complete it // If sweeping is not completed or not running at all, we try to complete it
// here. // here.
ForAllSweepingSpaces([this](AllocationSpace space) { ForAllSweepingSpaces([this, sweeping_mode](AllocationSpace space) {
ParallelSweepSpace(space, SweepingMode::kLazyOrConcurrent, 0); ParallelSweepSpace(space, sweeping_mode, 0);
}); });
if (job_handle_ && job_handle_->IsValid()) job_handle_->Join(); if (job_handle_ && job_handle_->IsValid()) job_handle_->Join();
...@@ -213,7 +229,14 @@ void Sweeper::EnsureCompleted() { ...@@ -213,7 +229,14 @@ void Sweeper::EnsureCompleted() {
CHECK(sweeping_list_[GetSweepSpaceIndex(space)].empty()); CHECK(sweeping_list_[GetSweepSpaceIndex(space)].empty());
}); });
heap_->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_);
for (ConcurrentSweeper& concurrent_sweeper : concurrent_sweepers_) {
heap_->MergeAllocationSitePretenuringFeedback(
*concurrent_sweeper.local_pretenuring_feedback());
}
local_pretenuring_feedback_.clear();
concurrent_sweepers_.clear(); concurrent_sweepers_.clear();
sweeping_in_progress_ = false; sweeping_in_progress_ = false;
} }
...@@ -322,13 +345,17 @@ void Sweeper::ClearMarkBitsAndHandleLivenessStatistics(Page* page, ...@@ -322,13 +345,17 @@ void Sweeper::ClearMarkBitsAndHandleLivenessStatistics(Page* page,
DCHECK_EQ(live_bytes, page->allocated_bytes()); DCHECK_EQ(live_bytes, page->allocated_bytes());
} }
int Sweeper::RawSweep(Page* p, FreeSpaceTreatmentMode free_space_treatment_mode, int Sweeper::RawSweep(
SweepingMode sweeping_mode, Page* p, FreeSpaceTreatmentMode free_space_treatment_mode,
const base::MutexGuard& page_guard) { SweepingMode sweeping_mode, const base::MutexGuard& page_guard,
Heap::PretenuringFeedbackMap* local_pretenuring_feedback) {
Space* space = p->owner(); Space* space = p->owner();
DCHECK_NOT_NULL(space); DCHECK_NOT_NULL(space);
DCHECK(space->identity() == OLD_SPACE || space->identity() == CODE_SPACE || DCHECK(space->identity() == OLD_SPACE || space->identity() == CODE_SPACE ||
space->identity() == MAP_SPACE); space->identity() == MAP_SPACE ||
(space->identity() == NEW_SPACE && FLAG_minor_mc));
DCHECK_IMPLIES(space->identity() == NEW_SPACE,
sweeping_mode == SweepingMode::kEagerDuringGC);
DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone()); DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone());
// Phase 1: Prepare the page for sweeping. // Phase 1: Prepare the page for sweeping.
...@@ -410,6 +437,10 @@ int Sweeper::RawSweep(Page* p, FreeSpaceTreatmentMode free_space_treatment_mode, ...@@ -410,6 +437,10 @@ int Sweeper::RawSweep(Page* p, FreeSpaceTreatmentMode free_space_treatment_mode,
live_bytes += size; live_bytes += size;
free_start = free_end + size; free_start = free_end + size;
if (p->InYoungGeneration()) {
heap_->UpdateAllocationSite(map, object, local_pretenuring_feedback);
}
if (active_system_pages_after_sweeping) { if (active_system_pages_after_sweeping) {
active_system_pages_after_sweeping->Add( active_system_pages_after_sweeping->Add(
free_end - p->address(), free_start - p->address(), free_end - p->address(), free_start - p->address(),
...@@ -456,7 +487,9 @@ int Sweeper::RawSweep(Page* p, FreeSpaceTreatmentMode free_space_treatment_mode, ...@@ -456,7 +487,9 @@ int Sweeper::RawSweep(Page* p, FreeSpaceTreatmentMode free_space_treatment_mode,
size_t Sweeper::ConcurrentSweepingPageCount() { size_t Sweeper::ConcurrentSweepingPageCount() {
base::MutexGuard guard(&mutex_); base::MutexGuard guard(&mutex_);
return sweeping_list_[GetSweepSpaceIndex(OLD_SPACE)].size() + return sweeping_list_[GetSweepSpaceIndex(OLD_SPACE)].size() +
sweeping_list_[GetSweepSpaceIndex(MAP_SPACE)].size(); sweeping_list_[GetSweepSpaceIndex(MAP_SPACE)].size() +
(FLAG_minor_mc ? sweeping_list_[GetSweepSpaceIndex(NEW_SPACE)].size()
: 0);
} }
int Sweeper::ParallelSweepSpace(AllocationSpace identity, int Sweeper::ParallelSweepSpace(AllocationSpace identity,
...@@ -466,7 +499,8 @@ int Sweeper::ParallelSweepSpace(AllocationSpace identity, ...@@ -466,7 +499,8 @@ int Sweeper::ParallelSweepSpace(AllocationSpace identity,
int pages_freed = 0; int pages_freed = 0;
Page* page = nullptr; Page* page = nullptr;
while ((page = GetSweepingPageSafe(identity)) != nullptr) { while ((page = GetSweepingPageSafe(identity)) != nullptr) {
int freed = ParallelSweepPage(page, identity, sweeping_mode); int freed = ParallelSweepPage(page, identity, &local_pretenuring_feedback_,
sweeping_mode);
++pages_freed; ++pages_freed;
if (page->IsFlagSet(Page::NEVER_ALLOCATE_ON_PAGE)) { if (page->IsFlagSet(Page::NEVER_ALLOCATE_ON_PAGE)) {
// Free list of a never-allocate page will be dropped later on. // Free list of a never-allocate page will be dropped later on.
...@@ -481,7 +515,9 @@ int Sweeper::ParallelSweepSpace(AllocationSpace identity, ...@@ -481,7 +515,9 @@ int Sweeper::ParallelSweepSpace(AllocationSpace identity,
return max_freed; return max_freed;
} }
int Sweeper::ParallelSweepPage(Page* page, AllocationSpace identity, int Sweeper::ParallelSweepPage(
Page* page, AllocationSpace identity,
Heap::PretenuringFeedbackMap* local_pretenuring_feedback,
SweepingMode sweeping_mode) { SweepingMode sweeping_mode) {
DCHECK(IsValidSweepingSpace(identity)); DCHECK(IsValidSweepingSpace(identity));
...@@ -503,7 +539,8 @@ int Sweeper::ParallelSweepPage(Page* page, AllocationSpace identity, ...@@ -503,7 +539,8 @@ int Sweeper::ParallelSweepPage(Page* page, AllocationSpace identity,
const FreeSpaceTreatmentMode free_space_treatment_mode = const FreeSpaceTreatmentMode free_space_treatment_mode =
Heap::ShouldZapGarbage() ? FreeSpaceTreatmentMode::kZapFreeSpace Heap::ShouldZapGarbage() ? FreeSpaceTreatmentMode::kZapFreeSpace
: FreeSpaceTreatmentMode::kIgnoreFreeSpace; : FreeSpaceTreatmentMode::kIgnoreFreeSpace;
max_freed = RawSweep(page, free_space_treatment_mode, sweeping_mode, guard); max_freed = RawSweep(page, free_space_treatment_mode, sweeping_mode, guard,
local_pretenuring_feedback);
DCHECK(page->SweepingDone()); DCHECK(page->SweepingDone());
} }
...@@ -522,7 +559,8 @@ void Sweeper::EnsurePageIsSwept(Page* page) { ...@@ -522,7 +559,8 @@ void Sweeper::EnsurePageIsSwept(Page* page) {
if (IsValidSweepingSpace(space)) { if (IsValidSweepingSpace(space)) {
if (TryRemoveSweepingPageSafe(space, page)) { if (TryRemoveSweepingPageSafe(space, page)) {
// Page was successfully removed and can now be swept. // Page was successfully removed and can now be swept.
ParallelSweepPage(page, space, SweepingMode::kLazyOrConcurrent); ParallelSweepPage(page, space, &local_pretenuring_feedback_,
SweepingMode::kLazyOrConcurrent);
} else { } else {
// Some sweeper task already took ownership of that page, wait until // Some sweeper task already took ownership of that page, wait until
// sweeping is finished. // sweeping is finished.
...@@ -578,8 +616,14 @@ void Sweeper::PrepareToBeSweptPage(AllocationSpace space, Page* page) { ...@@ -578,8 +616,14 @@ void Sweeper::PrepareToBeSweptPage(AllocationSpace space, Page* page) {
}); });
#endif // DEBUG #endif // DEBUG
page->set_concurrent_sweeping_state(Page::ConcurrentSweepingState::kPending); page->set_concurrent_sweeping_state(Page::ConcurrentSweepingState::kPending);
heap_->paged_space(space)->IncreaseAllocatedBytes( PagedSpaceBase* paged_space;
marking_state_->live_bytes(page), page); if (space == NEW_SPACE) {
DCHECK(FLAG_minor_mc);
paged_space = heap_->paged_new_space()->paged_space();
} else {
paged_space = heap_->paged_space(space);
}
paged_space->IncreaseAllocatedBytes(marking_state_->live_bytes(page), page);
} }
Page* Sweeper::GetSweepingPageSafe(AllocationSpace space) { Page* Sweeper::GetSweepingPageSafe(AllocationSpace space) {
......
...@@ -11,6 +11,8 @@ ...@@ -11,6 +11,8 @@
#include "src/base/platform/condition-variable.h" #include "src/base/platform/condition-variable.h"
#include "src/base/platform/semaphore.h" #include "src/base/platform/semaphore.h"
#include "src/common/globals.h" #include "src/common/globals.h"
#include "src/flags/flags.h"
#include "src/heap/heap.h"
#include "src/heap/slot-set.h" #include "src/heap/slot-set.h"
#include "src/tasks/cancelable-task.h" #include "src/tasks/cancelable-task.h"
...@@ -85,20 +87,24 @@ class Sweeper { ...@@ -85,20 +87,24 @@ class Sweeper {
int ParallelSweepSpace(AllocationSpace identity, SweepingMode sweeping_mode, int ParallelSweepSpace(AllocationSpace identity, SweepingMode sweeping_mode,
int required_freed_bytes, int max_pages = 0); int required_freed_bytes, int max_pages = 0);
int ParallelSweepPage(Page* page, AllocationSpace identity, int ParallelSweepPage(
Page* page, AllocationSpace identity,
Heap::PretenuringFeedbackMap* local_pretenuring_feedback,
SweepingMode sweeping_mode); SweepingMode sweeping_mode);
void EnsurePageIsSwept(Page* page); void EnsurePageIsSwept(Page* page);
int RawSweep(Page* p, FreeSpaceTreatmentMode free_space_treatment_mode, int RawSweep(Page* p, FreeSpaceTreatmentMode free_space_treatment_mode,
SweepingMode sweeping_mode, const base::MutexGuard& page_guard); SweepingMode sweeping_mode, const base::MutexGuard& page_guard,
Heap::PretenuringFeedbackMap* local_pretenuring_feedback);
// After calling this function sweeping is considered to be in progress // After calling this function sweeping is considered to be in progress
// and the main thread can sweep lazily, but the background sweeper tasks // and the main thread can sweep lazily, but the background sweeper tasks
// are not running yet. // are not running yet.
void StartSweeping(); void StartSweeping();
V8_EXPORT_PRIVATE void StartSweeperTasks(); V8_EXPORT_PRIVATE void StartSweeperTasks();
void EnsureCompleted(); void EnsureCompleted(
SweepingMode sweeping_mode = SweepingMode::kLazyOrConcurrent);
void DrainSweepingWorklistForSpace(AllocationSpace space); void DrainSweepingWorklistForSpace(AllocationSpace space);
bool AreSweeperTasksRunning(); bool AreSweeperTasksRunning();
...@@ -107,16 +113,21 @@ class Sweeper { ...@@ -107,16 +113,21 @@ class Sweeper {
Page* GetSweptPageSafe(PagedSpaceBase* space); Page* GetSweptPageSafe(PagedSpaceBase* space);
NonAtomicMarkingState* marking_state() const { return marking_state_; }
private: private:
class ConcurrentSweeper; class ConcurrentSweeper;
class SweeperJob; class SweeperJob;
static const int kNumberOfSweepingSpaces = static const int kNumberOfSweepingSpaces =
LAST_GROWABLE_PAGED_SPACE - FIRST_GROWABLE_PAGED_SPACE + 1; LAST_SWEEPABLE_SPACE - FIRST_SWEEPABLE_SPACE + 1;
static constexpr int kMaxSweeperTasks = 3; static constexpr int kMaxSweeperTasks = 3;
template <typename Callback> template <typename Callback>
void ForAllSweepingSpaces(Callback callback) const { void ForAllSweepingSpaces(Callback callback) const {
if (FLAG_minor_mc) {
callback(NEW_SPACE);
}
callback(OLD_SPACE); callback(OLD_SPACE);
callback(CODE_SPACE); callback(CODE_SPACE);
callback(MAP_SPACE); callback(MAP_SPACE);
...@@ -165,13 +176,12 @@ class Sweeper { ...@@ -165,13 +176,12 @@ class Sweeper {
void PrepareToBeSweptPage(AllocationSpace space, Page* page); void PrepareToBeSweptPage(AllocationSpace space, Page* page);
static bool IsValidSweepingSpace(AllocationSpace space) { static bool IsValidSweepingSpace(AllocationSpace space) {
return space >= FIRST_GROWABLE_PAGED_SPACE && return space >= FIRST_SWEEPABLE_SPACE && space <= LAST_SWEEPABLE_SPACE;
space <= LAST_GROWABLE_PAGED_SPACE;
} }
static int GetSweepSpaceIndex(AllocationSpace space) { static int GetSweepSpaceIndex(AllocationSpace space) {
DCHECK(IsValidSweepingSpace(space)); DCHECK(IsValidSweepingSpace(space));
return space - FIRST_GROWABLE_PAGED_SPACE; return space - FIRST_SWEEPABLE_SPACE;
} }
int NumberOfConcurrentSweepers() const; int NumberOfConcurrentSweepers() const;
...@@ -188,6 +198,7 @@ class Sweeper { ...@@ -188,6 +198,7 @@ class Sweeper {
// path checks this flag to see whether it could support concurrent sweeping. // path checks this flag to see whether it could support concurrent sweeping.
std::atomic<bool> sweeping_in_progress_; std::atomic<bool> sweeping_in_progress_;
bool should_reduce_memory_; bool should_reduce_memory_;
Heap::PretenuringFeedbackMap local_pretenuring_feedback_;
}; };
} // namespace internal } // namespace internal
......
...@@ -577,6 +577,7 @@ ...@@ -577,6 +577,7 @@
F(MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS) \ F(MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS) \
F(MC_EVACUATE_UPDATE_POINTERS_WEAK) \ F(MC_EVACUATE_UPDATE_POINTERS_WEAK) \
F(MC_FINISH_SWEEP_NEW_LO) \ F(MC_FINISH_SWEEP_NEW_LO) \
F(MC_FINISH_SWEEP_NEW) \
F(MC_FINISH_SWEEP_ARRAY_BUFFERS) \ F(MC_FINISH_SWEEP_ARRAY_BUFFERS) \
F(MC_MARK_CLIENT_HEAPS) \ F(MC_MARK_CLIENT_HEAPS) \
F(MC_MARK_EMBEDDER_PROLOGUE) \ F(MC_MARK_EMBEDDER_PROLOGUE) \
...@@ -593,6 +594,7 @@ ...@@ -593,6 +594,7 @@
F(MC_SWEEP_CODE_LO) \ F(MC_SWEEP_CODE_LO) \
F(MC_SWEEP_LO) \ F(MC_SWEEP_LO) \
F(MC_SWEEP_MAP) \ F(MC_SWEEP_MAP) \
F(MC_SWEEP_NEW) \
F(MC_SWEEP_OLD) \ F(MC_SWEEP_OLD) \
F(MINOR_MARK_COMPACTOR) \ F(MINOR_MARK_COMPACTOR) \
F(MINOR_MC) \ F(MINOR_MC) \
...@@ -610,10 +612,10 @@ ...@@ -610,10 +612,10 @@
F(MINOR_MC_EVACUATE_UPDATE_POINTERS) \ F(MINOR_MC_EVACUATE_UPDATE_POINTERS) \
F(MINOR_MC_EVACUATE_UPDATE_POINTERS_PARALLEL) \ F(MINOR_MC_EVACUATE_UPDATE_POINTERS_PARALLEL) \
F(MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS) \ F(MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS) \
F(MINOR_MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS) \
F(MINOR_MC_EVACUATE_UPDATE_POINTERS_WEAK) \ F(MINOR_MC_EVACUATE_UPDATE_POINTERS_WEAK) \
F(MINOR_MC_FINISH) \ F(MINOR_MC_FINISH) \
F(MINOR_MC_FINISH_SWEEP_ARRAY_BUFFERS) \ F(MINOR_MC_FINISH_SWEEP_ARRAY_BUFFERS) \
F(MINOR_MC_FINISH_SWEEP_NEW) \
F(MINOR_MC_MARK) \ F(MINOR_MC_MARK) \
F(MINOR_MC_MARK_GLOBAL_HANDLES) \ F(MINOR_MC_MARK_GLOBAL_HANDLES) \
F(MINOR_MC_MARK_PARALLEL) \ F(MINOR_MC_MARK_PARALLEL) \
...@@ -622,6 +624,8 @@ ...@@ -622,6 +624,8 @@
F(MINOR_MC_MARK_WEAK) \ F(MINOR_MC_MARK_WEAK) \
F(MINOR_MC_MARKING_DEQUE) \ F(MINOR_MC_MARKING_DEQUE) \
F(MINOR_MC_RESET_LIVENESS) \ F(MINOR_MC_RESET_LIVENESS) \
F(MINOR_MC_SWEEP) \
F(MINOR_MC_SWEEP_NEW) \
F(SAFEPOINT) \ F(SAFEPOINT) \
F(SCAVENGER) \ F(SCAVENGER) \
F(SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS) \ F(SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS) \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment