Commit d46c94db authored by Omer Katz's avatar Omer Katz Committed by Commit Bot

cppgc: Allow to disable incremental marking/sweeping

Add fields to HeapOptions to denote on heap creation that the heap does
not support incremental/concurrent marking/sweeping.
This only applies to standalone heaps.
When triggering a GC (either explicitly or by the heap growing
heuristics), the given config is limited to not trigger unsupported
marking/sweeping types.

Bug: chromium:1156170
Change-Id: Id7b5cf82962e7c40920f942df9415d798e2b6686
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2581961
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71698}
parent 7608e28a
...@@ -56,6 +56,41 @@ class V8_EXPORT Heap { ...@@ -56,6 +56,41 @@ class V8_EXPORT Heap {
kNoConservativeStackScan, kNoConservativeStackScan,
}; };
/**
* Specifies supported marking types
*/
enum class MarkingType : uint8_t {
/**
* Atomic stop-the-world marking. This option does not require any write
* barriers but is the most intrusive in terms of jank.
*/
kAtomic,
/**
* Incremental marking, i.e. interleave marking is the rest of the
* application on the same thread.
*/
kIncremental,
/**
* Incremental and concurrent marking.
*/
kIncrementalAndConcurrent
};
/**
* Specifies supported sweeping types
*/
enum class SweepingType : uint8_t {
/**
* Atomic stop-the-world sweeping. All of sweeping is performed at once.
*/
kAtomic,
/**
* Incremental and concurrent sweeping. Sweeping is split and interleaved
* with the rest of the application.
*/
kIncrementalAndConcurrent
};
/** /**
* Constraints for a Heap setup. * Constraints for a Heap setup.
*/ */
...@@ -98,6 +133,16 @@ class V8_EXPORT Heap { ...@@ -98,6 +133,16 @@ class V8_EXPORT Heap {
*/ */
StackSupport stack_support = StackSupport::kSupportsConservativeStackScan; StackSupport stack_support = StackSupport::kSupportsConservativeStackScan;
/**
* Specifies which types of marking are supported by the heap.
*/
MarkingType marking_support = MarkingType::kIncrementalAndConcurrent;
/**
* Specifies which types of sweeping are supported by the heap.
*/
SweepingType sweeping_support = SweepingType::kIncrementalAndConcurrent;
/** /**
* Resource constraints specifying various properties that the internal * Resource constraints specifying various properties that the internal
* GC scheduler follows. * GC scheduler follows.
......
...@@ -199,7 +199,7 @@ void CppHeap::TracePrologue(TraceFlags flags) { ...@@ -199,7 +199,7 @@ void CppHeap::TracePrologue(TraceFlags flags) {
const UnifiedHeapMarker::MarkingConfig marking_config{ const UnifiedHeapMarker::MarkingConfig marking_config{
UnifiedHeapMarker::MarkingConfig::CollectionType::kMajor, UnifiedHeapMarker::MarkingConfig::CollectionType::kMajor,
cppgc::Heap::StackState::kNoHeapPointers, cppgc::Heap::StackState::kNoHeapPointers,
UnifiedHeapMarker::MarkingConfig::MarkingType::kIncrementalAndConcurrent, cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
flags == TraceFlags::kForced flags == TraceFlags::kForced
? UnifiedHeapMarker::MarkingConfig::IsForcedGC::kForced ? UnifiedHeapMarker::MarkingConfig::IsForcedGC::kForced
: UnifiedHeapMarker::MarkingConfig::IsForcedGC::kNotForced}; : UnifiedHeapMarker::MarkingConfig::IsForcedGC::kNotForced};
...@@ -234,9 +234,8 @@ void CppHeap::EnterFinalPause(EmbedderStackState stack_state) { ...@@ -234,9 +234,8 @@ void CppHeap::EnterFinalPause(EmbedderStackState stack_state) {
AsBase(), cppgc::internal::StatsCollector::kAtomicMark); AsBase(), cppgc::internal::StatsCollector::kAtomicMark);
is_in_final_pause_ = true; is_in_final_pause_ = true;
marker_->EnterAtomicPause(stack_state); marker_->EnterAtomicPause(stack_state);
if (compactor_.CancelIfShouldNotCompact( if (compactor_.CancelIfShouldNotCompact(cppgc::Heap::MarkingType::kAtomic,
UnifiedHeapMarker::MarkingConfig::MarkingType::kAtomic, stack_state)) {
stack_state)) {
marker_->NotifyCompactionCancelled(); marker_->NotifyCompactionCancelled();
} }
} }
......
...@@ -206,16 +206,14 @@ ConcurrentMarkerBase::~ConcurrentMarkerBase() { ...@@ -206,16 +206,14 @@ ConcurrentMarkerBase::~ConcurrentMarkerBase() {
!concurrent_marking_handle_->IsValid()); !concurrent_marking_handle_->IsValid());
} }
bool ConcurrentMarkerBase::NotifyIncrementalMutatorStepCompleted() { void ConcurrentMarkerBase::NotifyIncrementalMutatorStepCompleted() {
DCHECK(concurrent_marking_handle_); DCHECK(concurrent_marking_handle_);
if (HasWorkForConcurrentMarking(marking_worklists_)) { if (HasWorkForConcurrentMarking(marking_worklists_)) {
// Notifies the scheduler that max concurrency might have increased. // Notifies the scheduler that max concurrency might have increased.
// This will adjust the number of markers if necessary. // This will adjust the number of markers if necessary.
IncreaseMarkingPriorityIfNeeded(); IncreaseMarkingPriorityIfNeeded();
concurrent_marking_handle_->NotifyConcurrencyIncrease(); concurrent_marking_handle_->NotifyConcurrencyIncrease();
return false;
} }
return !concurrent_marking_handle_->IsActive();
} }
void ConcurrentMarkerBase::IncreaseMarkingPriorityIfNeeded() { void ConcurrentMarkerBase::IncreaseMarkingPriorityIfNeeded() {
......
...@@ -28,7 +28,7 @@ class V8_EXPORT_PRIVATE ConcurrentMarkerBase { ...@@ -28,7 +28,7 @@ class V8_EXPORT_PRIVATE ConcurrentMarkerBase {
void JoinForTesting(); void JoinForTesting();
bool NotifyIncrementalMutatorStepCompleted(); void NotifyIncrementalMutatorStepCompleted();
bool IsActive() const; bool IsActive() const;
......
...@@ -30,15 +30,19 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector { ...@@ -30,15 +30,19 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector {
public: public:
using Handle = SingleThreadedHandle; using Handle = SingleThreadedHandle;
static Handle Post(GarbageCollector* collector, cppgc::TaskRunner* runner) { static Handle Post(GarbageCollector* collector, cppgc::TaskRunner* runner,
auto task = std::make_unique<GCInvoker::GCInvokerImpl::GCTask>(collector); GarbageCollector::Config config) {
auto task =
std::make_unique<GCInvoker::GCInvokerImpl::GCTask>(collector, config);
auto handle = task->GetHandle(); auto handle = task->GetHandle();
runner->PostNonNestableTask(std::move(task)); runner->PostNonNestableTask(std::move(task));
return handle; return handle;
} }
explicit GCTask(GarbageCollector* collector) explicit GCTask(GarbageCollector* collector,
GarbageCollector::Config config)
: collector_(collector), : collector_(collector),
config_(config),
handle_(Handle::NonEmptyTag{}), handle_(Handle::NonEmptyTag{}),
saved_epoch_(collector->epoch()) {} saved_epoch_(collector->epoch()) {}
...@@ -46,14 +50,14 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector { ...@@ -46,14 +50,14 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector {
void Run() final { void Run() final {
if (handle_.IsCanceled() || (collector_->epoch() != saved_epoch_)) return; if (handle_.IsCanceled() || (collector_->epoch() != saved_epoch_)) return;
collector_->CollectGarbage( collector_->CollectGarbage(config_);
GarbageCollector::Config::PreciseAtomicConfig());
handle_.Cancel(); handle_.Cancel();
} }
Handle GetHandle() { return handle_; } Handle GetHandle() { return handle_; }
GarbageCollector* collector_; GarbageCollector* collector_;
GarbageCollector::Config config_;
Handle handle_; Handle handle_;
size_t saved_epoch_; size_t saved_epoch_;
}; };
...@@ -78,21 +82,27 @@ GCInvoker::GCInvokerImpl::~GCInvokerImpl() { ...@@ -78,21 +82,27 @@ GCInvoker::GCInvokerImpl::~GCInvokerImpl() {
} }
void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) { void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) {
DCHECK_EQ(config.marking_type, cppgc::Heap::MarkingType::kAtomic);
if ((config.stack_state == if ((config.stack_state ==
GarbageCollector::Config::StackState::kNoHeapPointers) || GarbageCollector::Config::StackState::kNoHeapPointers) ||
(stack_support_ == (stack_support_ ==
cppgc::Heap::StackSupport::kSupportsConservativeStackScan)) { cppgc::Heap::StackSupport::kSupportsConservativeStackScan)) {
collector_->CollectGarbage(config); collector_->CollectGarbage(config);
} else if (platform_->GetForegroundTaskRunner()->NonNestableTasksEnabled()) { } else if (platform_->GetForegroundTaskRunner() &&
platform_->GetForegroundTaskRunner()->NonNestableTasksEnabled()) {
if (!gc_task_handle_) { if (!gc_task_handle_) {
gc_task_handle_ = // Force a precise GC since it will run in a non-nestable task.
GCTask::Post(collector_, platform_->GetForegroundTaskRunner().get()); config.stack_state =
GarbageCollector::Config::StackState::kNoHeapPointers;
gc_task_handle_ = GCTask::Post(
collector_, platform_->GetForegroundTaskRunner().get(), config);
} }
} }
} }
void GCInvoker::GCInvokerImpl::StartIncrementalGarbageCollection( void GCInvoker::GCInvokerImpl::StartIncrementalGarbageCollection(
GarbageCollector::Config config) { GarbageCollector::Config config) {
DCHECK_NE(config.marking_type, cppgc::Heap::MarkingType::kAtomic);
if ((stack_support_ != if ((stack_support_ !=
cppgc::Heap::StackSupport::kSupportsConservativeStackScan) && cppgc::Heap::StackSupport::kSupportsConservativeStackScan) &&
(!platform_->GetForegroundTaskRunner() || (!platform_->GetForegroundTaskRunner() ||
......
...@@ -31,7 +31,8 @@ class HeapGrowing::HeapGrowingImpl final ...@@ -31,7 +31,8 @@ class HeapGrowing::HeapGrowingImpl final
: public StatsCollector::AllocationObserver { : public StatsCollector::AllocationObserver {
public: public:
HeapGrowingImpl(GarbageCollector*, StatsCollector*, HeapGrowingImpl(GarbageCollector*, StatsCollector*,
cppgc::Heap::ResourceConstraints); cppgc::Heap::ResourceConstraints, cppgc::Heap::MarkingType,
cppgc::Heap::SweepingType);
~HeapGrowingImpl(); ~HeapGrowingImpl();
HeapGrowingImpl(const HeapGrowingImpl&) = delete; HeapGrowingImpl(const HeapGrowingImpl&) = delete;
...@@ -60,14 +61,21 @@ class HeapGrowing::HeapGrowingImpl final ...@@ -60,14 +61,21 @@ class HeapGrowing::HeapGrowingImpl final
SingleThreadedHandle gc_task_handle_; SingleThreadedHandle gc_task_handle_;
bool disabled_for_testing_ = false; bool disabled_for_testing_ = false;
const cppgc::Heap::MarkingType marking_support_;
const cppgc::Heap::SweepingType sweeping_support_;
}; };
HeapGrowing::HeapGrowingImpl::HeapGrowingImpl( HeapGrowing::HeapGrowingImpl::HeapGrowingImpl(
GarbageCollector* collector, StatsCollector* stats_collector, GarbageCollector* collector, StatsCollector* stats_collector,
cppgc::Heap::ResourceConstraints constraints) cppgc::Heap::ResourceConstraints constraints,
cppgc::Heap::MarkingType marking_support,
cppgc::Heap::SweepingType sweeping_support)
: collector_(collector), : collector_(collector),
stats_collector_(stats_collector), stats_collector_(stats_collector),
gc_task_handle_(SingleThreadedHandle::NonEmptyTag{}) { gc_task_handle_(SingleThreadedHandle::NonEmptyTag{}),
marking_support_(marking_support),
sweeping_support_(sweeping_support) {
if (constraints.initial_heap_size_bytes > 0) { if (constraints.initial_heap_size_bytes > 0) {
initial_heap_size_ = constraints.initial_heap_size_bytes; initial_heap_size_ = constraints.initial_heap_size_bytes;
} }
...@@ -85,10 +93,15 @@ void HeapGrowing::HeapGrowingImpl::AllocatedObjectSizeIncreased(size_t) { ...@@ -85,10 +93,15 @@ void HeapGrowing::HeapGrowingImpl::AllocatedObjectSizeIncreased(size_t) {
size_t allocated_object_size = stats_collector_->allocated_object_size(); size_t allocated_object_size = stats_collector_->allocated_object_size();
if (allocated_object_size > limit_for_atomic_gc_) { if (allocated_object_size > limit_for_atomic_gc_) {
collector_->CollectGarbage( collector_->CollectGarbage(
GarbageCollector::Config::ConservativeAtomicConfig()); {GarbageCollector::Config::CollectionType::kMajor,
GarbageCollector::Config::StackState::kMayContainHeapPointers,
GarbageCollector::Config::MarkingType::kAtomic, sweeping_support_});
} else if (allocated_object_size > limit_for_incremental_gc_) { } else if (allocated_object_size > limit_for_incremental_gc_) {
if (marking_support_ == cppgc::Heap::MarkingType::kAtomic) return;
collector_->StartIncrementalGarbageCollection( collector_->StartIncrementalGarbageCollection(
GarbageCollector::Config::ConservativeIncrementalConfig()); {GarbageCollector::Config::CollectionType::kMajor,
GarbageCollector::Config::StackState::kMayContainHeapPointers,
marking_support_, sweeping_support_});
} }
} }
...@@ -133,9 +146,12 @@ void HeapGrowing::HeapGrowingImpl::DisableForTesting() { ...@@ -133,9 +146,12 @@ void HeapGrowing::HeapGrowingImpl::DisableForTesting() {
HeapGrowing::HeapGrowing(GarbageCollector* collector, HeapGrowing::HeapGrowing(GarbageCollector* collector,
StatsCollector* stats_collector, StatsCollector* stats_collector,
cppgc::Heap::ResourceConstraints constraints) cppgc::Heap::ResourceConstraints constraints,
cppgc::Heap::MarkingType marking_support,
cppgc::Heap::SweepingType sweeping_support)
: impl_(std::make_unique<HeapGrowing::HeapGrowingImpl>( : impl_(std::make_unique<HeapGrowing::HeapGrowingImpl>(
collector, stats_collector, constraints)) {} collector, stats_collector, constraints, marking_support,
sweeping_support)) {}
HeapGrowing::~HeapGrowing() = default; HeapGrowing::~HeapGrowing() = default;
......
...@@ -34,7 +34,8 @@ class V8_EXPORT_PRIVATE HeapGrowing final { ...@@ -34,7 +34,8 @@ class V8_EXPORT_PRIVATE HeapGrowing final {
kPageSize * RawHeap::kNumberOfRegularSpaces; kPageSize * RawHeap::kNumberOfRegularSpaces;
HeapGrowing(GarbageCollector*, StatsCollector*, HeapGrowing(GarbageCollector*, StatsCollector*,
cppgc::Heap::ResourceConstraints); cppgc::Heap::ResourceConstraints, cppgc::Heap::MarkingType,
cppgc::Heap::SweepingType);
~HeapGrowing(); ~HeapGrowing();
HeapGrowing(const HeapGrowing&) = delete; HeapGrowing(const HeapGrowing&) = delete;
......
...@@ -43,8 +43,7 @@ void Heap::ForceGarbageCollectionSlow(const char* source, const char* reason, ...@@ -43,8 +43,7 @@ void Heap::ForceGarbageCollectionSlow(const char* source, const char* reason,
Heap::StackState stack_state) { Heap::StackState stack_state) {
internal::Heap::From(this)->CollectGarbage( internal::Heap::From(this)->CollectGarbage(
{internal::GarbageCollector::Config::CollectionType::kMajor, stack_state, {internal::GarbageCollector::Config::CollectionType::kMajor, stack_state,
internal::GarbageCollector::Config::MarkingType::kAtomic, MarkingType::kAtomic, SweepingType::kAtomic,
internal::GarbageCollector::Config::SweepingType::kAtomic,
internal::GarbageCollector::Config::IsForcedGC::kForced}); internal::GarbageCollector::Config::IsForcedGC::kForced});
} }
...@@ -71,11 +70,16 @@ class Unmarker final : private HeapVisitor<Unmarker> { ...@@ -71,11 +70,16 @@ class Unmarker final : private HeapVisitor<Unmarker> {
} }
}; };
void CheckConfig(Heap::Config config) { void CheckConfig(Heap::Config config, Heap::MarkingType marking_support,
Heap::SweepingType sweeping_support) {
CHECK_WITH_MSG( CHECK_WITH_MSG(
(config.collection_type != Heap::Config::CollectionType::kMinor) || (config.collection_type != Heap::Config::CollectionType::kMinor) ||
(config.stack_state == Heap::Config::StackState::kNoHeapPointers), (config.stack_state == Heap::Config::StackState::kNoHeapPointers),
"Minor GCs with stack is currently not supported"); "Minor GCs with stack is currently not supported");
CHECK_LE(static_cast<int>(config.marking_type),
static_cast<int>(marking_support));
CHECK_LE(static_cast<int>(config.sweeping_type),
static_cast<int>(sweeping_support));
} }
} // namespace } // namespace
...@@ -85,7 +89,15 @@ Heap::Heap(std::shared_ptr<cppgc::Platform> platform, ...@@ -85,7 +89,15 @@ Heap::Heap(std::shared_ptr<cppgc::Platform> platform,
: HeapBase(platform, options.custom_spaces, options.stack_support), : HeapBase(platform, options.custom_spaces, options.stack_support),
gc_invoker_(this, platform_.get(), options.stack_support), gc_invoker_(this, platform_.get(), options.stack_support),
growing_(&gc_invoker_, stats_collector_.get(), growing_(&gc_invoker_, stats_collector_.get(),
options.resource_constraints) {} options.resource_constraints, options.marking_support,
options.sweeping_support),
marking_support_(options.marking_support),
sweeping_support_(options.sweeping_support) {
CHECK_IMPLIES(options.marking_support != MarkingType::kAtomic,
platform_->GetForegroundTaskRunner());
CHECK_IMPLIES(options.sweeping_support != SweepingType::kAtomic,
platform_->GetForegroundTaskRunner());
}
Heap::~Heap() { Heap::~Heap() {
NoGCScope no_gc(*this); NoGCScope no_gc(*this);
...@@ -95,7 +107,7 @@ Heap::~Heap() { ...@@ -95,7 +107,7 @@ Heap::~Heap() {
void Heap::CollectGarbage(Config config) { void Heap::CollectGarbage(Config config) {
DCHECK_EQ(Config::MarkingType::kAtomic, config.marking_type); DCHECK_EQ(Config::MarkingType::kAtomic, config.marking_type);
CheckConfig(config); CheckConfig(config, marking_support_, sweeping_support_);
if (in_no_gc_scope()) return; if (in_no_gc_scope()) return;
...@@ -110,7 +122,8 @@ void Heap::CollectGarbage(Config config) { ...@@ -110,7 +122,8 @@ void Heap::CollectGarbage(Config config) {
void Heap::StartIncrementalGarbageCollection(Config config) { void Heap::StartIncrementalGarbageCollection(Config config) {
DCHECK_NE(Config::MarkingType::kAtomic, config.marking_type); DCHECK_NE(Config::MarkingType::kAtomic, config.marking_type);
CheckConfig(config); DCHECK_NE(marking_support_, MarkingType::kAtomic);
CheckConfig(config, marking_support_, sweeping_support_);
if (gc_in_progress_ || in_no_gc_scope()) return; if (gc_in_progress_ || in_no_gc_scope()) return;
...@@ -120,6 +133,9 @@ void Heap::StartIncrementalGarbageCollection(Config config) { ...@@ -120,6 +133,9 @@ void Heap::StartIncrementalGarbageCollection(Config config) {
} }
void Heap::FinalizeIncrementalGarbageCollectionIfRunning(Config config) { void Heap::FinalizeIncrementalGarbageCollectionIfRunning(Config config) {
DCHECK_NE(marking_support_, MarkingType::kAtomic);
CheckConfig(config, marking_support_, sweeping_support_);
if (!gc_in_progress_) return; if (!gc_in_progress_) return;
DCHECK(!in_no_gc_scope()); DCHECK(!in_no_gc_scope());
......
...@@ -52,6 +52,9 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase, ...@@ -52,6 +52,9 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase,
GCInvoker gc_invoker_; GCInvoker gc_invoker_;
HeapGrowing growing_; HeapGrowing growing_;
const MarkingType marking_support_;
const SweepingType sweeping_support_;
bool gc_in_progress_ = false; bool gc_in_progress_ = false;
size_t epoch_ = 0; size_t epoch_ = 0;
}; };
......
...@@ -44,11 +44,7 @@ class V8_EXPORT_PRIVATE MarkerBase { ...@@ -44,11 +44,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
kMajor, kMajor,
}; };
using StackState = cppgc::Heap::StackState; using StackState = cppgc::Heap::StackState;
enum class MarkingType : uint8_t { using MarkingType = cppgc::Heap::MarkingType;
kAtomic,
kIncremental,
kIncrementalAndConcurrent
};
enum class IsForcedGC : uint8_t { enum class IsForcedGC : uint8_t {
kNotForced, kNotForced,
kForced, kForced,
......
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
#include <memory> #include <memory>
#include "include/cppgc/heap.h"
#include "src/base/macros.h" #include "src/base/macros.h"
namespace cppgc { namespace cppgc {
...@@ -22,7 +23,7 @@ class ConcurrentSweeperTest; ...@@ -22,7 +23,7 @@ class ConcurrentSweeperTest;
class V8_EXPORT_PRIVATE Sweeper final { class V8_EXPORT_PRIVATE Sweeper final {
public: public:
struct SweepingConfig { struct SweepingConfig {
enum class SweepingType : uint8_t { kAtomic, kIncrementalAndConcurrent }; using SweepingType = cppgc::Heap::SweepingType;
enum class CompactableSpaceHandling { kSweep, kIgnore }; enum class CompactableSpaceHandling { kSweep, kIgnore };
SweepingType sweeping_type = SweepingType::kIncrementalAndConcurrent; SweepingType sweeping_type = SweepingType::kIncrementalAndConcurrent;
......
...@@ -65,7 +65,9 @@ TEST(HeapGrowingTest, ConservativeGCInvoked) { ...@@ -65,7 +65,9 @@ TEST(HeapGrowingTest, ConservativeGCInvoked) {
cppgc::Heap::ResourceConstraints constraints; cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update. // Force GC at the first update.
constraints.initial_heap_size_bytes = 1; constraints.initial_heap_size_bytes = 1;
HeapGrowing growing(&gc, &stats_collector, constraints); HeapGrowing growing(&gc, &stats_collector, constraints,
cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
EXPECT_CALL(gc, CollectGarbage(::testing::_)); EXPECT_CALL(gc, CollectGarbage(::testing::_));
FakeAllocate(&stats_collector, 100 * kMB); FakeAllocate(&stats_collector, 100 * kMB);
} }
...@@ -77,7 +79,9 @@ TEST(HeapGrowingTest, InitialHeapSize) { ...@@ -77,7 +79,9 @@ TEST(HeapGrowingTest, InitialHeapSize) {
// Use larger size to avoid running into small heap optimizations. // Use larger size to avoid running into small heap optimizations.
constexpr size_t kObjectSize = 10 * HeapGrowing::kMinLimitIncrease; constexpr size_t kObjectSize = 10 * HeapGrowing::kMinLimitIncrease;
constraints.initial_heap_size_bytes = kObjectSize; constraints.initial_heap_size_bytes = kObjectSize;
HeapGrowing growing(&gc, &stats_collector, constraints); HeapGrowing growing(&gc, &stats_collector, constraints,
cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
FakeAllocate(&stats_collector, kObjectSize - 1); FakeAllocate(&stats_collector, kObjectSize - 1);
EXPECT_CALL(gc, CollectGarbage(::testing::_)); EXPECT_CALL(gc, CollectGarbage(::testing::_));
FakeAllocate(&stats_collector, kObjectSize); FakeAllocate(&stats_collector, kObjectSize);
...@@ -91,7 +95,9 @@ TEST(HeapGrowingTest, ConstantGrowingFactor) { ...@@ -91,7 +95,9 @@ TEST(HeapGrowingTest, ConstantGrowingFactor) {
cppgc::Heap::ResourceConstraints constraints; cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update. // Force GC at the first update.
constraints.initial_heap_size_bytes = HeapGrowing::kMinLimitIncrease; constraints.initial_heap_size_bytes = HeapGrowing::kMinLimitIncrease;
HeapGrowing growing(&gc, &stats_collector, constraints); HeapGrowing growing(&gc, &stats_collector, constraints,
cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
EXPECT_EQ(0u, gc.epoch()); EXPECT_EQ(0u, gc.epoch());
gc.SetLiveBytes(kObjectSize); gc.SetLiveBytes(kObjectSize);
FakeAllocate(&stats_collector, kObjectSize + 1); FakeAllocate(&stats_collector, kObjectSize + 1);
...@@ -107,7 +113,9 @@ TEST(HeapGrowingTest, SmallHeapGrowing) { ...@@ -107,7 +113,9 @@ TEST(HeapGrowingTest, SmallHeapGrowing) {
cppgc::Heap::ResourceConstraints constraints; cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update. // Force GC at the first update.
constraints.initial_heap_size_bytes = 1; constraints.initial_heap_size_bytes = 1;
HeapGrowing growing(&gc, &stats_collector, constraints); HeapGrowing growing(&gc, &stats_collector, constraints,
cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
EXPECT_EQ(0u, gc.epoch()); EXPECT_EQ(0u, gc.epoch());
gc.SetLiveBytes(1); gc.SetLiveBytes(1);
FakeAllocate(&stats_collector, kLargeAllocation); FakeAllocate(&stats_collector, kLargeAllocation);
...@@ -119,7 +127,9 @@ TEST(HeapGrowingTest, IncrementalGCStarted) { ...@@ -119,7 +127,9 @@ TEST(HeapGrowingTest, IncrementalGCStarted) {
StatsCollector stats_collector; StatsCollector stats_collector;
MockGarbageCollector gc; MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints; cppgc::Heap::ResourceConstraints constraints;
HeapGrowing growing(&gc, &stats_collector, constraints); HeapGrowing growing(&gc, &stats_collector, constraints,
cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
EXPECT_CALL(gc, CollectGarbage(::testing::_)).Times(0); EXPECT_CALL(gc, CollectGarbage(::testing::_)).Times(0);
EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_)); EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_));
// Allocate 1 byte less the limit for atomic gc to trigger incremental gc. // Allocate 1 byte less the limit for atomic gc to trigger incremental gc.
...@@ -130,7 +140,9 @@ TEST(HeapGrowingTest, IncrementalGCFinalized) { ...@@ -130,7 +140,9 @@ TEST(HeapGrowingTest, IncrementalGCFinalized) {
StatsCollector stats_collector; StatsCollector stats_collector;
MockGarbageCollector gc; MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints; cppgc::Heap::ResourceConstraints constraints;
HeapGrowing growing(&gc, &stats_collector, constraints); HeapGrowing growing(&gc, &stats_collector, constraints,
cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
EXPECT_CALL(gc, CollectGarbage(::testing::_)).Times(0); EXPECT_CALL(gc, CollectGarbage(::testing::_)).Times(0);
EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_)); EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_));
// Allocate 1 byte less the limit for atomic gc to trigger incremental gc. // Allocate 1 byte less the limit for atomic gc to trigger incremental gc.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment