Commit 0665568d authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

cppgc: Discard memory on memory reducing GCs

Add discarded of memory on memory reducing garbage collections. In
addition, add tracking of discarded memory and properly adjust the
resident memory of heap dumps.

- Memory is discarded during sweeping and the counter is persistent
  across garbage collection cycles.
- Subsequent sweep calls are not supposed to touch the memory anymore.
- As a simplification, discarded memory is tracked on page granularity
  and assumed to be fully paged in as soon as a page's free list entries
  are reused for allocation.

Change-Id: Icfd58f49f3400c4df0d482e20326a0c43c1ca9f5
Bug: chromium:1056170
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3015563
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarAnton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/master@{#75677}
parent a73ce1d0
...@@ -460,7 +460,10 @@ void CppHeap::TraceEpilogue(TraceSummary* trace_summary) { ...@@ -460,7 +460,10 @@ void CppHeap::TraceEpilogue(TraceSummary* trace_summary) {
? cppgc::internal::Sweeper::SweepingConfig::SweepingType::kAtomic ? cppgc::internal::Sweeper::SweepingConfig::SweepingType::kAtomic
: cppgc::internal::Sweeper::SweepingConfig::SweepingType:: : cppgc::internal::Sweeper::SweepingConfig::SweepingType::
kIncrementalAndConcurrent, kIncrementalAndConcurrent,
compactable_space_handling}; compactable_space_handling,
current_flags_ & TraceFlags::kReduceMemory
? cppgc::internal::FreeMemoryHandling::kDiscardWherePossible
: cppgc::internal::FreeMemoryHandling::kDoNotDiscard};
DCHECK_IMPLIES( DCHECK_IMPLIES(
!isolate_, !isolate_,
cppgc::internal::Sweeper::SweepingConfig::SweepingType::kAtomic == cppgc::internal::Sweeper::SweepingConfig::SweepingType::kAtomic ==
......
...@@ -60,7 +60,7 @@ FreeList& FreeList::operator=(FreeList&& other) V8_NOEXCEPT { ...@@ -60,7 +60,7 @@ FreeList& FreeList::operator=(FreeList&& other) V8_NOEXCEPT {
return *this; return *this;
} }
void FreeList::Add(FreeList::Block block) { Address FreeList::Add(FreeList::Block block) {
const size_t size = block.size; const size_t size = block.size;
DCHECK_GT(kPageSize, size); DCHECK_GT(kPageSize, size);
DCHECK_LE(sizeof(HeapObjectHeader), size); DCHECK_LE(sizeof(HeapObjectHeader), size);
...@@ -73,7 +73,7 @@ void FreeList::Add(FreeList::Block block) { ...@@ -73,7 +73,7 @@ void FreeList::Add(FreeList::Block block) {
// zeroing it out. // zeroing it out.
ASAN_UNPOISON_MEMORY_REGION(block.address, sizeof(HeapObjectHeader)); ASAN_UNPOISON_MEMORY_REGION(block.address, sizeof(HeapObjectHeader));
new (block.address) HeapObjectHeader(size, kFreeListGCInfoIndex); new (block.address) HeapObjectHeader(size, kFreeListGCInfoIndex);
return; return reinterpret_cast<Address>(block.address) + block.size;
} }
// Make sure the freelist header is writable. SET_MEMORY_ACCESSIBLE is not // Make sure the freelist header is writable. SET_MEMORY_ACCESSIBLE is not
...@@ -86,6 +86,7 @@ void FreeList::Add(FreeList::Block block) { ...@@ -86,6 +86,7 @@ void FreeList::Add(FreeList::Block block) {
if (!entry->Next()) { if (!entry->Next()) {
free_list_tails_[index] = entry; free_list_tails_[index] = entry;
} }
return reinterpret_cast<Address>(block.address) + sizeof(Entry);
} }
void FreeList::Append(FreeList&& other) { void FreeList::Append(FreeList&& other) {
......
...@@ -34,7 +34,9 @@ class V8_EXPORT_PRIVATE FreeList { ...@@ -34,7 +34,9 @@ class V8_EXPORT_PRIVATE FreeList {
Block Allocate(size_t); Block Allocate(size_t);
// Adds block to the freelist. The minimal block size is two words. // Adds block to the freelist. The minimal block size is two words.
void Add(Block); // Returns the start of the free list payload that will not be accessed by
// the free list itself.
Address Add(Block);
// Append other freelist into this. // Append other freelist into this.
void Append(FreeList&&); void Append(FreeList&&);
......
...@@ -45,6 +45,13 @@ const BasePage* BasePage::FromInnerAddress(const HeapBase* heap, ...@@ -45,6 +45,13 @@ const BasePage* BasePage::FromInnerAddress(const HeapBase* heap,
// static // static
void BasePage::Destroy(BasePage* page) { void BasePage::Destroy(BasePage* page) {
if (page->discarded_memory()) {
page->space()
.raw_heap()
->heap()
->stats_collector()
->DecrementDiscardedMemory(page->discarded_memory());
}
if (page->is_large()) { if (page->is_large()) {
LargePage::Destroy(LargePage::From(page)); LargePage::Destroy(LargePage::From(page));
} else { } else {
......
...@@ -78,6 +78,13 @@ class V8_EXPORT_PRIVATE BasePage { ...@@ -78,6 +78,13 @@ class V8_EXPORT_PRIVATE BasePage {
#endif #endif
} }
void IncrementDiscardedMemory(size_t value) {
DCHECK_GE(discarded_memory_ + value, discarded_memory_);
discarded_memory_ += value;
}
void ResetDiscardedMemory() { discarded_memory_ = 0; }
size_t discarded_memory() const { return discarded_memory_; }
protected: protected:
enum class PageType : uint8_t { kNormal, kLarge }; enum class PageType : uint8_t { kNormal, kLarge };
BasePage(HeapBase&, BaseSpace&, PageType); BasePage(HeapBase&, BaseSpace&, PageType);
...@@ -86,6 +93,7 @@ class V8_EXPORT_PRIVATE BasePage { ...@@ -86,6 +93,7 @@ class V8_EXPORT_PRIVATE BasePage {
HeapBase& heap_; HeapBase& heap_;
BaseSpace& space_; BaseSpace& space_;
PageType type_; PageType type_;
size_t discarded_memory_ = 0;
}; };
class V8_EXPORT_PRIVATE NormalPage final : public BasePage { class V8_EXPORT_PRIVATE NormalPage final : public BasePage {
......
...@@ -179,6 +179,13 @@ void* ObjectAllocator::AllocateFromFreeList(NormalPageSpace& space, size_t size, ...@@ -179,6 +179,13 @@ void* ObjectAllocator::AllocateFromFreeList(NormalPageSpace& space, size_t size,
const FreeList::Block entry = space.free_list().Allocate(size); const FreeList::Block entry = space.free_list().Allocate(size);
if (!entry.address) return nullptr; if (!entry.address) return nullptr;
// Assume discarded memory on that page is now zero.
auto& page = *NormalPage::From(BasePage::FromPayload(entry.address));
if (page.discarded_memory()) {
stats_collector_->DecrementDiscardedMemory(page.discarded_memory());
page.ResetDiscardedMemory();
}
ReplaceLinearAllocationBuffer(space, *stats_collector_, ReplaceLinearAllocationBuffer(space, *stats_collector_,
static_cast<Address>(entry.address), static_cast<Address>(entry.address),
entry.size); entry.size);
......
...@@ -5,6 +5,7 @@ ...@@ -5,6 +5,7 @@
#include "src/heap/cppgc/stats-collector.h" #include "src/heap/cppgc/stats-collector.h"
#include <algorithm> #include <algorithm>
#include <atomic>
#include <cmath> #include <cmath>
#include "src/base/atomicops.h" #include "src/base/atomicops.h"
...@@ -309,6 +310,28 @@ void StatsCollector::NotifyFreedMemory(int64_t size) { ...@@ -309,6 +310,28 @@ void StatsCollector::NotifyFreedMemory(int64_t size) {
#endif // DEBUG #endif // DEBUG
} }
void StatsCollector::IncrementDiscardedMemory(size_t value) {
const size_t old =
discarded_bytes_.fetch_add(value, std::memory_order_relaxed);
DCHECK_GE(old + value, old);
USE(old);
}
void StatsCollector::DecrementDiscardedMemory(size_t value) {
const size_t old =
discarded_bytes_.fetch_sub(value, std::memory_order_relaxed);
DCHECK_GE(old, old - value);
USE(old);
}
void StatsCollector::ResetDiscardedMemory() {
discarded_bytes_.store(0, std::memory_order_relaxed);
}
size_t StatsCollector::discarded_memory() const {
return discarded_bytes_.load(std::memory_order_relaxed);
}
void StatsCollector::RecordHistogramSample(ScopeId scope_id_, void StatsCollector::RecordHistogramSample(ScopeId scope_id_,
v8::base::TimeDelta time) { v8::base::TimeDelta time) {
switch (scope_id_) { switch (scope_id_) {
......
...@@ -8,6 +8,7 @@ ...@@ -8,6 +8,7 @@
#include <stddef.h> #include <stddef.h>
#include <stdint.h> #include <stdint.h>
#include <atomic>
#include <vector> #include <vector>
#include "include/cppgc/platform.h" #include "include/cppgc/platform.h"
...@@ -293,6 +294,11 @@ class V8_EXPORT_PRIVATE StatsCollector final { ...@@ -293,6 +294,11 @@ class V8_EXPORT_PRIVATE StatsCollector final {
void NotifyAllocatedMemory(int64_t); void NotifyAllocatedMemory(int64_t);
void NotifyFreedMemory(int64_t); void NotifyFreedMemory(int64_t);
void IncrementDiscardedMemory(size_t);
void DecrementDiscardedMemory(size_t);
void ResetDiscardedMemory();
size_t discarded_memory() const;
void SetMetricRecorder(std::unique_ptr<MetricRecorder> histogram_recorder) { void SetMetricRecorder(std::unique_ptr<MetricRecorder> histogram_recorder) {
metric_recorder_ = std::move(histogram_recorder); metric_recorder_ = std::move(histogram_recorder);
} }
...@@ -331,6 +337,7 @@ class V8_EXPORT_PRIVATE StatsCollector final { ...@@ -331,6 +337,7 @@ class V8_EXPORT_PRIVATE StatsCollector final {
int64_t memory_allocated_bytes_ = 0; int64_t memory_allocated_bytes_ = 0;
int64_t memory_freed_bytes_since_end_of_marking_ = 0; int64_t memory_freed_bytes_since_end_of_marking_ = 0;
std::atomic<size_t> discarded_bytes_{0};
// vector to allow fast iteration of observers. Register/Unregisters only // vector to allow fast iteration of observers. Register/Unregisters only
// happens on startup/teardown. // happens on startup/teardown.
......
...@@ -63,6 +63,70 @@ class ObjectStartBitmapVerifier ...@@ -63,6 +63,70 @@ class ObjectStartBitmapVerifier
HeapObjectHeader* prev_ = nullptr; HeapObjectHeader* prev_ = nullptr;
}; };
class FreeHandlerBase {
public:
virtual ~FreeHandlerBase() = default;
virtual void FreeFreeList(
std::vector<FreeList::Block>& unfinalized_free_list) = 0;
};
class DiscardingFreeHandler : public FreeHandlerBase {
public:
DiscardingFreeHandler(PageAllocator& page_allocator, FreeList& free_list,
BasePage& page)
: page_allocator_(page_allocator), free_list_(free_list), page_(page) {}
void Free(FreeList::Block block) {
const uintptr_t aligned_begin_unused =
RoundUp(reinterpret_cast<uintptr_t>(free_list_.Add(block)),
page_allocator_.CommitPageSize());
const uintptr_t aligned_end_unused =
RoundDown(reinterpret_cast<uintptr_t>(block.address) + block.size,
page_allocator_.CommitPageSize());
if (aligned_begin_unused < aligned_end_unused) {
const size_t discarded_size = aligned_end_unused - aligned_begin_unused;
page_allocator_.DiscardSystemPages(
reinterpret_cast<void*>(aligned_begin_unused),
aligned_end_unused - aligned_begin_unused);
page_.IncrementDiscardedMemory(discarded_size);
page_.space()
.raw_heap()
->heap()
->stats_collector()
->IncrementDiscardedMemory(discarded_size);
}
}
void FreeFreeList(std::vector<FreeList::Block>& unfinalized_free_list) final {
for (auto entry : unfinalized_free_list) {
Free(std::move(entry));
}
}
private:
PageAllocator& page_allocator_;
FreeList& free_list_;
BasePage& page_;
};
class RegularFreeHandler : public FreeHandlerBase {
public:
RegularFreeHandler(PageAllocator& page_allocator, FreeList& free_list,
BasePage& page)
: free_list_(free_list) {}
void Free(FreeList::Block block) { free_list_.Add(std::move(block)); }
void FreeFreeList(std::vector<FreeList::Block>& unfinalized_free_list) final {
for (auto entry : unfinalized_free_list) {
Free(std::move(entry));
}
}
private:
FreeList& free_list_;
};
template <typename T> template <typename T>
class ThreadSafeStack { class ThreadSafeStack {
public: public:
...@@ -121,15 +185,22 @@ void StickyUnmark(HeapObjectHeader* header) { ...@@ -121,15 +185,22 @@ void StickyUnmark(HeapObjectHeader* header) {
#endif #endif
} }
// Builder that finalizes objects and adds freelist entries right away. class InlinedFinalizationBuilderBase {
class InlinedFinalizationBuilder final {
public: public:
struct ResultType { struct ResultType {
bool is_empty = false; bool is_empty = false;
size_t largest_new_free_list_entry = 0; size_t largest_new_free_list_entry = 0;
}; };
};
explicit InlinedFinalizationBuilder(BasePage* page) : page_(page) {} // Builder that finalizes objects and adds freelist entries right away.
template <typename FreeHandler>
class InlinedFinalizationBuilder final : public InlinedFinalizationBuilderBase,
public FreeHandler {
public:
InlinedFinalizationBuilder(BasePage& page, PageAllocator& page_allocator)
: FreeHandler(page_allocator,
NormalPageSpace::From(page.space()).free_list(), page) {}
void AddFinalizer(HeapObjectHeader* header, size_t size) { void AddFinalizer(HeapObjectHeader* header, size_t size) {
header->Finalize(); header->Finalize();
...@@ -137,23 +208,24 @@ class InlinedFinalizationBuilder final { ...@@ -137,23 +208,24 @@ class InlinedFinalizationBuilder final {
} }
void AddFreeListEntry(Address start, size_t size) { void AddFreeListEntry(Address start, size_t size) {
NormalPageSpace::From(page_->space()).free_list().Add({start, size}); FreeHandler::Free({start, size});
} }
ResultType GetResult(bool is_empty, size_t largest_new_free_list_entry) { ResultType GetResult(bool is_empty, size_t largest_new_free_list_entry) {
return {is_empty, largest_new_free_list_entry}; return {is_empty, largest_new_free_list_entry};
} }
private:
BasePage* page_;
}; };
// Builder that produces results for deferred processing. // Builder that produces results for deferred processing.
class DeferredFinalizationBuilder final { template <typename FreeHandler>
class DeferredFinalizationBuilder final : public FreeHandler {
public: public:
using ResultType = SpaceState::SweptPageState; using ResultType = SpaceState::SweptPageState;
explicit DeferredFinalizationBuilder(BasePage* page) { result_.page = page; } DeferredFinalizationBuilder(BasePage& page, PageAllocator& page_allocator)
: FreeHandler(page_allocator, result_.cached_free_list, page) {
result_.page = &page;
}
void AddFinalizer(HeapObjectHeader* header, size_t size) { void AddFinalizer(HeapObjectHeader* header, size_t size) {
if (header->IsFinalizable()) { if (header->IsFinalizable()) {
...@@ -168,7 +240,7 @@ class DeferredFinalizationBuilder final { ...@@ -168,7 +240,7 @@ class DeferredFinalizationBuilder final {
if (found_finalizer_) { if (found_finalizer_) {
result_.unfinalized_free_list.push_back({start, size}); result_.unfinalized_free_list.push_back({start, size});
} else { } else {
result_.cached_free_list.Add({start, size}); FreeHandler::Free({start, size});
} }
found_finalizer_ = false; found_finalizer_ = false;
} }
...@@ -185,9 +257,10 @@ class DeferredFinalizationBuilder final { ...@@ -185,9 +257,10 @@ class DeferredFinalizationBuilder final {
}; };
template <typename FinalizationBuilder> template <typename FinalizationBuilder>
typename FinalizationBuilder::ResultType SweepNormalPage(NormalPage* page) { typename FinalizationBuilder::ResultType SweepNormalPage(
NormalPage* page, PageAllocator& page_allocator) {
constexpr auto kAtomicAccess = AccessMode::kAtomic; constexpr auto kAtomicAccess = AccessMode::kAtomic;
FinalizationBuilder builder(page); FinalizationBuilder builder(*page, page_allocator);
PlatformAwareObjectStartBitmap& bitmap = page->object_start_bitmap(); PlatformAwareObjectStartBitmap& bitmap = page->object_start_bitmap();
bitmap.Clear(); bitmap.Clear();
...@@ -203,6 +276,8 @@ typename FinalizationBuilder::ResultType SweepNormalPage(NormalPage* page) { ...@@ -203,6 +276,8 @@ typename FinalizationBuilder::ResultType SweepNormalPage(NormalPage* page) {
// Check if this is a free list entry. // Check if this is a free list entry.
if (header->IsFree<kAtomicAccess>()) { if (header->IsFree<kAtomicAccess>()) {
SetMemoryInaccessible(header, std::min(kFreeListEntrySize, size)); SetMemoryInaccessible(header, std::min(kFreeListEntrySize, size));
// This prevents memory from being discarded in configurations where
// `CheckMemoryIsInaccessibleIsNoop()` is false.
CheckMemoryIsInaccessible(header, size); CheckMemoryIsInaccessible(header, size);
begin += size; begin += size;
continue; continue;
...@@ -249,7 +324,9 @@ typename FinalizationBuilder::ResultType SweepNormalPage(NormalPage* page) { ...@@ -249,7 +324,9 @@ typename FinalizationBuilder::ResultType SweepNormalPage(NormalPage* page) {
// - merges freelists to the space's freelist. // - merges freelists to the space's freelist.
class SweepFinalizer final { class SweepFinalizer final {
public: public:
explicit SweepFinalizer(cppgc::Platform* platform) : platform_(platform) {} SweepFinalizer(cppgc::Platform* platform,
FreeMemoryHandling free_memory_handling)
: platform_(platform), free_memory_handling_(free_memory_handling) {}
void FinalizeHeap(SpaceStates* space_states) { void FinalizeHeap(SpaceStates* space_states) {
for (SpaceState& space_state : *space_states) { for (SpaceState& space_state : *space_states) {
...@@ -308,9 +385,13 @@ class SweepFinalizer final { ...@@ -308,9 +385,13 @@ class SweepFinalizer final {
space_freelist.Append(std::move(page_state->cached_free_list)); space_freelist.Append(std::move(page_state->cached_free_list));
// Merge freelist with finalizers. // Merge freelist with finalizers.
for (auto entry : page_state->unfinalized_free_list) { std::unique_ptr<FreeHandlerBase> handler =
space_freelist.Add(std::move(entry)); (free_memory_handling_ == FreeMemoryHandling::kDiscardWherePossible)
} ? std::unique_ptr<FreeHandlerBase>(new DiscardingFreeHandler(
*platform_->GetPageAllocator(), space_freelist, *page))
: std::unique_ptr<FreeHandlerBase>(new RegularFreeHandler(
*platform_->GetPageAllocator(), space_freelist, *page));
handler->FreeFreeList(page_state->unfinalized_free_list);
largest_new_free_list_entry_ = std::max( largest_new_free_list_entry_ = std::max(
page_state->largest_new_free_list_entry, largest_new_free_list_entry_); page_state->largest_new_free_list_entry, largest_new_free_list_entry_);
...@@ -326,14 +407,18 @@ class SweepFinalizer final { ...@@ -326,14 +407,18 @@ class SweepFinalizer final {
private: private:
cppgc::Platform* platform_; cppgc::Platform* platform_;
size_t largest_new_free_list_entry_ = 0; size_t largest_new_free_list_entry_ = 0;
const FreeMemoryHandling free_memory_handling_;
}; };
class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> { class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> {
friend class HeapVisitor<MutatorThreadSweeper>; friend class HeapVisitor<MutatorThreadSweeper>;
public: public:
explicit MutatorThreadSweeper(SpaceStates* states, cppgc::Platform* platform) MutatorThreadSweeper(SpaceStates* states, cppgc::Platform* platform,
: states_(states), platform_(platform) {} FreeMemoryHandling free_memory_handling)
: states_(states),
platform_(platform),
free_memory_handling_(free_memory_handling) {}
void Sweep() { void Sweep() {
for (SpaceState& state : *states_) { for (SpaceState& state : *states_) {
...@@ -357,7 +442,7 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> { ...@@ -357,7 +442,7 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> {
if (remaining_budget <= 0.) return false; if (remaining_budget <= 0.) return false;
// First, prioritize finalization of pages that were swept concurrently. // First, prioritize finalization of pages that were swept concurrently.
SweepFinalizer finalizer(platform_); SweepFinalizer finalizer(platform_, free_memory_handling_);
if (!finalizer.FinalizeSpaceWithDeadline(&state, deadline_in_seconds)) { if (!finalizer.FinalizeSpaceWithDeadline(&state, deadline_in_seconds)) {
return false; return false;
} }
...@@ -391,8 +476,16 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> { ...@@ -391,8 +476,16 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> {
} }
bool VisitNormalPage(NormalPage& page) { bool VisitNormalPage(NormalPage& page) {
const InlinedFinalizationBuilder::ResultType result = if (free_memory_handling_ == FreeMemoryHandling::kDiscardWherePossible) {
SweepNormalPage<InlinedFinalizationBuilder>(&page); page.ResetDiscardedMemory();
}
const auto result =
(free_memory_handling_ == FreeMemoryHandling::kDiscardWherePossible)
? SweepNormalPage<
InlinedFinalizationBuilder<DiscardingFreeHandler>>(
&page, *platform_->GetPageAllocator())
: SweepNormalPage<InlinedFinalizationBuilder<RegularFreeHandler>>(
&page, *platform_->GetPageAllocator());
if (result.is_empty) { if (result.is_empty) {
NormalPage::Destroy(&page); NormalPage::Destroy(&page);
} else { } else {
...@@ -418,6 +511,7 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> { ...@@ -418,6 +511,7 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> {
SpaceStates* states_; SpaceStates* states_;
cppgc::Platform* platform_; cppgc::Platform* platform_;
size_t largest_new_free_list_entry_ = 0; size_t largest_new_free_list_entry_ = 0;
const FreeMemoryHandling free_memory_handling_;
}; };
class ConcurrentSweepTask final : public cppgc::JobTask, class ConcurrentSweepTask final : public cppgc::JobTask,
...@@ -425,8 +519,12 @@ class ConcurrentSweepTask final : public cppgc::JobTask, ...@@ -425,8 +519,12 @@ class ConcurrentSweepTask final : public cppgc::JobTask,
friend class HeapVisitor<ConcurrentSweepTask>; friend class HeapVisitor<ConcurrentSweepTask>;
public: public:
explicit ConcurrentSweepTask(HeapBase& heap, SpaceStates* states) ConcurrentSweepTask(HeapBase& heap, SpaceStates* states, Platform* platform,
: heap_(heap), states_(states) {} FreeMemoryHandling free_memory_handling)
: heap_(heap),
states_(states),
platform_(platform),
free_memory_handling_(free_memory_handling) {}
void Run(cppgc::JobDelegate* delegate) final { void Run(cppgc::JobDelegate* delegate) final {
StatsCollector::EnabledConcurrentScope stats_scope( StatsCollector::EnabledConcurrentScope stats_scope(
...@@ -447,8 +545,16 @@ class ConcurrentSweepTask final : public cppgc::JobTask, ...@@ -447,8 +545,16 @@ class ConcurrentSweepTask final : public cppgc::JobTask,
private: private:
bool VisitNormalPage(NormalPage& page) { bool VisitNormalPage(NormalPage& page) {
if (free_memory_handling_ == FreeMemoryHandling::kDiscardWherePossible) {
page.ResetDiscardedMemory();
}
SpaceState::SweptPageState sweep_result = SpaceState::SweptPageState sweep_result =
SweepNormalPage<DeferredFinalizationBuilder>(&page); (free_memory_handling_ == FreeMemoryHandling::kDiscardWherePossible)
? SweepNormalPage<
DeferredFinalizationBuilder<DiscardingFreeHandler>>(
&page, *platform_->GetPageAllocator())
: SweepNormalPage<DeferredFinalizationBuilder<RegularFreeHandler>>(
&page, *platform_->GetPageAllocator());
const size_t space_index = page.space().index(); const size_t space_index = page.space().index();
DCHECK_GT(states_->size(), space_index); DCHECK_GT(states_->size(), space_index);
SpaceState& space_state = (*states_)[space_index]; SpaceState& space_state = (*states_)[space_index];
...@@ -477,7 +583,9 @@ class ConcurrentSweepTask final : public cppgc::JobTask, ...@@ -477,7 +583,9 @@ class ConcurrentSweepTask final : public cppgc::JobTask,
HeapBase& heap_; HeapBase& heap_;
SpaceStates* states_; SpaceStates* states_;
Platform* platform_;
std::atomic_bool is_completed_{false}; std::atomic_bool is_completed_{false};
const FreeMemoryHandling free_memory_handling_;
}; };
// This visitor: // This visitor:
...@@ -542,10 +650,24 @@ class Sweeper::SweeperImpl final { ...@@ -542,10 +650,24 @@ class Sweeper::SweeperImpl final {
StatsCollector::kAtomicSweep); StatsCollector::kAtomicSweep);
is_in_progress_ = true; is_in_progress_ = true;
platform_ = platform; platform_ = platform;
config_ = config;
#if DEBUG #if DEBUG
// Verify bitmap for all spaces regardless of |compactable_space_handling|. // Verify bitmap for all spaces regardless of |compactable_space_handling|.
ObjectStartBitmapVerifier().Verify(heap_); ObjectStartBitmapVerifier().Verify(heap_);
#endif #endif
// If inaccessible memory is touched to check whether it is set up
// correctly it cannot be discarded.
if (!CheckMemoryIsInaccessibleIsNoop()) {
config_.free_memory_handling = FreeMemoryHandling::kDoNotDiscard;
}
if (config_.free_memory_handling ==
FreeMemoryHandling::kDiscardWherePossible) {
// The discarded counter will be recomputed.
heap_.heap()->stats_collector()->ResetDiscardedMemory();
}
PrepareForSweepVisitor(&space_states_, config.compactable_space_handling) PrepareForSweepVisitor(&space_states_, config.compactable_space_handling)
.Traverse(heap_); .Traverse(heap_);
...@@ -577,7 +699,7 @@ class Sweeper::SweeperImpl final { ...@@ -577,7 +699,7 @@ class Sweeper::SweeperImpl final {
{ {
// First, process unfinalized pages as finalizing a page is faster than // First, process unfinalized pages as finalizing a page is faster than
// sweeping. // sweeping.
SweepFinalizer finalizer(platform_); SweepFinalizer finalizer(platform_, config_.free_memory_handling);
while (auto page = space_state.swept_unfinalized_pages.Pop()) { while (auto page = space_state.swept_unfinalized_pages.Pop()) {
finalizer.FinalizePage(&*page); finalizer.FinalizePage(&*page);
if (size <= finalizer.largest_new_free_list_entry()) return true; if (size <= finalizer.largest_new_free_list_entry()) return true;
...@@ -586,7 +708,8 @@ class Sweeper::SweeperImpl final { ...@@ -586,7 +708,8 @@ class Sweeper::SweeperImpl final {
{ {
// Then, if no matching slot is found in the unfinalized pages, search the // Then, if no matching slot is found in the unfinalized pages, search the
// unswept page. This also helps out the concurrent sweeper. // unswept page. This also helps out the concurrent sweeper.
MutatorThreadSweeper sweeper(&space_states_, platform_); MutatorThreadSweeper sweeper(&space_states_, platform_,
config_.free_memory_handling);
while (auto page = space_state.unswept_pages.Pop()) { while (auto page = space_state.unswept_pages.Pop()) {
sweeper.SweepPage(**page); sweeper.SweepPage(**page);
if (size <= sweeper.largest_new_free_list_entry()) return true; if (size <= sweeper.largest_new_free_list_entry()) return true;
...@@ -624,11 +747,12 @@ class Sweeper::SweeperImpl final { ...@@ -624,11 +747,12 @@ class Sweeper::SweeperImpl final {
MutatorThreadSweepingScope sweeping_in_progresss(*this); MutatorThreadSweepingScope sweeping_in_progresss(*this);
// First, call finalizers on the mutator thread. // First, call finalizers on the mutator thread.
SweepFinalizer finalizer(platform_); SweepFinalizer finalizer(platform_, config_.free_memory_handling);
finalizer.FinalizeHeap(&space_states_); finalizer.FinalizeHeap(&space_states_);
// Then, help out the concurrent thread. // Then, help out the concurrent thread.
MutatorThreadSweeper sweeper(&space_states_, platform_); MutatorThreadSweeper sweeper(&space_states_, platform_,
config_.free_memory_handling);
sweeper.Sweep(); sweeper.Sweep();
FinalizeSweep(); FinalizeSweep();
...@@ -675,7 +799,8 @@ class Sweeper::SweeperImpl final { ...@@ -675,7 +799,8 @@ class Sweeper::SweeperImpl final {
StatsCollector::EnabledScope stats_scope( StatsCollector::EnabledScope stats_scope(
stats_collector_, StatsCollector::kIncrementalSweep); stats_collector_, StatsCollector::kIncrementalSweep);
MutatorThreadSweeper sweeper(&space_states_, platform_); MutatorThreadSweeper sweeper(&space_states_, platform_,
config_.free_memory_handling);
{ {
StatsCollector::EnabledScope stats_scope( StatsCollector::EnabledScope stats_scope(
stats_collector_, internal_scope_id, "deltaInSeconds", stats_collector_, internal_scope_id, "deltaInSeconds",
...@@ -754,9 +879,11 @@ class Sweeper::SweeperImpl final { ...@@ -754,9 +879,11 @@ class Sweeper::SweeperImpl final {
void ScheduleConcurrentSweeping() { void ScheduleConcurrentSweeping() {
DCHECK(platform_); DCHECK(platform_);
concurrent_sweeper_handle_ = platform_->PostJob( concurrent_sweeper_handle_ =
cppgc::TaskPriority::kUserVisible, platform_->PostJob(cppgc::TaskPriority::kUserVisible,
std::make_unique<ConcurrentSweepTask>(*heap_.heap(), &space_states_)); std::make_unique<ConcurrentSweepTask>(
*heap_.heap(), &space_states_, platform_,
config_.free_memory_handling));
} }
void CancelSweepers() { void CancelSweepers() {
...@@ -768,7 +895,7 @@ class Sweeper::SweeperImpl final { ...@@ -768,7 +895,7 @@ class Sweeper::SweeperImpl final {
void SynchronizeAndFinalizeConcurrentSweeping() { void SynchronizeAndFinalizeConcurrentSweeping() {
CancelSweepers(); CancelSweepers();
SweepFinalizer finalizer(platform_); SweepFinalizer finalizer(platform_, config_.free_memory_handling);
finalizer.FinalizeHeap(&space_states_); finalizer.FinalizeHeap(&space_states_);
} }
...@@ -776,6 +903,7 @@ class Sweeper::SweeperImpl final { ...@@ -776,6 +903,7 @@ class Sweeper::SweeperImpl final {
StatsCollector* const stats_collector_; StatsCollector* const stats_collector_;
SpaceStates space_states_; SpaceStates space_states_;
cppgc::Platform* platform_; cppgc::Platform* platform_;
SweepingConfig config_;
IncrementalSweepTask::Handle incremental_sweeper_handle_; IncrementalSweepTask::Handle incremental_sweeper_handle_;
std::unique_ptr<cppgc::JobHandle> concurrent_sweeper_handle_; std::unique_ptr<cppgc::JobHandle> concurrent_sweeper_handle_;
// Indicates whether the sweeping phase is in progress. // Indicates whether the sweeping phase is in progress.
......
...@@ -21,6 +21,11 @@ class HeapBase; ...@@ -21,6 +21,11 @@ class HeapBase;
class ConcurrentSweeperTest; class ConcurrentSweeperTest;
class NormalPageSpace; class NormalPageSpace;
enum class FreeMemoryHandling : uint8_t {
kDoNotDiscard,
kDiscardWherePossible
};
class V8_EXPORT_PRIVATE Sweeper final { class V8_EXPORT_PRIVATE Sweeper final {
public: public:
struct SweepingConfig { struct SweepingConfig {
...@@ -30,6 +35,7 @@ class V8_EXPORT_PRIVATE Sweeper final { ...@@ -30,6 +35,7 @@ class V8_EXPORT_PRIVATE Sweeper final {
SweepingType sweeping_type = SweepingType::kIncrementalAndConcurrent; SweepingType sweeping_type = SweepingType::kIncrementalAndConcurrent;
CompactableSpaceHandling compactable_space_handling = CompactableSpaceHandling compactable_space_handling =
CompactableSpaceHandling::kSweep; CompactableSpaceHandling::kSweep;
FreeMemoryHandling free_memory_handling = FreeMemoryHandling::kDoNotDiscard;
}; };
explicit Sweeper(HeapBase&); explicit Sweeper(HeapBase&);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment