Commit aa923b1c authored by Omer Katz's avatar Omer Katz Committed by Commit Bot

cppgc: Update heap growing heuristics for incremental gc

Heap growing estimates when to start  incremental gc such that it
will finish when we are expecting to finalize (i.e. when an atomic
gc would be triggered).
There is also a minimum ratio between limit for atomic gc and limit
for incremental gc, to guarantee that incremental gc get's some time to
run even with the application rarely allocates.

This is a continuation of:
https://chromium-review.googlesource.com/c/v8/v8/+/2377691

Bug: chromium:1056170
Change-Id: I8c87e98d60b6f8b5748558771a236f15385f7858
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2381454Reviewed-by: 's avatarAnton Bikineev <bikineev@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69630}
parent db837d58
...@@ -31,6 +31,16 @@ class GarbageCollector { ...@@ -31,6 +31,16 @@ class GarbageCollector {
MarkingType::kAtomic, SweepingType::kAtomic}; MarkingType::kAtomic, SweepingType::kAtomic};
} }
static constexpr Config ConservativeIncrementalConfig() {
return {CollectionType::kMajor, StackState::kMayContainHeapPointers,
MarkingType::kIncremental, SweepingType::kAtomic};
}
static constexpr Config PreciseIncrementalConfig() {
return {CollectionType::kMajor, StackState::kNoHeapPointers,
MarkingType::kIncremental, SweepingType::kAtomic};
}
static constexpr Config MinorPreciseAtomicConfig() { static constexpr Config MinorPreciseAtomicConfig() {
return {CollectionType::kMinor, StackState::kNoHeapPointers, return {CollectionType::kMinor, StackState::kNoHeapPointers,
MarkingType::kAtomic, SweepingType::kAtomic}; MarkingType::kAtomic, SweepingType::kAtomic};
...@@ -43,7 +53,8 @@ class GarbageCollector { ...@@ -43,7 +53,8 @@ class GarbageCollector {
}; };
// Executes a garbage collection specified in config. // Executes a garbage collection specified in config.
virtual void CollectGarbage(Config config) = 0; virtual void CollectGarbage(Config) = 0;
virtual void StartIncrementalGarbageCollection(Config) = 0;
// The current epoch that the GC maintains. The epoch is increased on every // The current epoch that the GC maintains. The epoch is increased on every
// GC invocation. // GC invocation.
......
...@@ -22,6 +22,7 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector { ...@@ -22,6 +22,7 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector {
GCInvokerImpl& operator=(const GCInvokerImpl&) = delete; GCInvokerImpl& operator=(const GCInvokerImpl&) = delete;
void CollectGarbage(GarbageCollector::Config) final; void CollectGarbage(GarbageCollector::Config) final;
void StartIncrementalGarbageCollection(GarbageCollector::Config) final;
size_t epoch() const final { return collector_->epoch(); } size_t epoch() const final { return collector_->epoch(); }
private: private:
...@@ -88,6 +89,13 @@ void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) { ...@@ -88,6 +89,13 @@ void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) {
} }
} }
void GCInvoker::GCInvokerImpl::StartIncrementalGarbageCollection(
GarbageCollector::Config config) {
// No need to postpone starting incremental GC since the stack is not scanned
// until GC finalization.
collector_->StartIncrementalGarbageCollection(config);
}
GCInvoker::GCInvoker(GarbageCollector* collector, cppgc::Platform* platform, GCInvoker::GCInvoker(GarbageCollector* collector, cppgc::Platform* platform,
cppgc::Heap::StackSupport stack_support) cppgc::Heap::StackSupport stack_support)
: impl_(std::make_unique<GCInvoker::GCInvokerImpl>(collector, platform, : impl_(std::make_unique<GCInvoker::GCInvokerImpl>(collector, platform,
...@@ -99,6 +107,11 @@ void GCInvoker::CollectGarbage(GarbageCollector::Config config) { ...@@ -99,6 +107,11 @@ void GCInvoker::CollectGarbage(GarbageCollector::Config config) {
impl_->CollectGarbage(config); impl_->CollectGarbage(config);
} }
void GCInvoker::StartIncrementalGarbageCollection(
GarbageCollector::Config config) {
impl_->StartIncrementalGarbageCollection(config);
}
size_t GCInvoker::epoch() const { return impl_->epoch(); } size_t GCInvoker::epoch() const { return impl_->epoch(); }
} // namespace internal } // namespace internal
......
...@@ -34,6 +34,7 @@ class V8_EXPORT_PRIVATE GCInvoker final : public GarbageCollector { ...@@ -34,6 +34,7 @@ class V8_EXPORT_PRIVATE GCInvoker final : public GarbageCollector {
GCInvoker& operator=(const GCInvoker&) = delete; GCInvoker& operator=(const GCInvoker&) = delete;
void CollectGarbage(GarbageCollector::Config) final; void CollectGarbage(GarbageCollector::Config) final;
void StartIncrementalGarbageCollection(GarbageCollector::Config) final;
size_t epoch() const final; size_t epoch() const final;
private: private:
......
...@@ -4,18 +4,29 @@ ...@@ -4,18 +4,29 @@
#include "src/heap/cppgc/heap-growing.h" #include "src/heap/cppgc/heap-growing.h"
#include <cmath>
#include <memory> #include <memory>
#include "include/cppgc/platform.h" #include "include/cppgc/platform.h"
#include "src/base/macros.h" #include "src/base/macros.h"
#include "src/heap/cppgc/globals.h" #include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap.h" #include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/incremental-marking-schedule.h"
#include "src/heap/cppgc/stats-collector.h" #include "src/heap/cppgc/stats-collector.h"
#include "src/heap/cppgc/task-handle.h" #include "src/heap/cppgc/task-handle.h"
namespace cppgc { namespace cppgc {
namespace internal { namespace internal {
namespace {
// Minimum ratio between limit for incremental GC and limit for atomic GC
// (to guarantee that limits are not to close to each other).
constexpr double kMaximumLimitRatioForIncrementalGC = 0.9;
// Minimum ratio between limit for incremental GC and limit for atomic GC
// (to guarantee that limit is not too close to current allocated size).
constexpr double kMinimumLimitRatioForIncrementalGC = 0.5;
} // namespace
class HeapGrowing::HeapGrowingImpl final class HeapGrowing::HeapGrowingImpl final
: public StatsCollector::AllocationObserver { : public StatsCollector::AllocationObserver {
public: public:
...@@ -31,7 +42,8 @@ class HeapGrowing::HeapGrowingImpl final ...@@ -31,7 +42,8 @@ class HeapGrowing::HeapGrowingImpl final
void AllocatedObjectSizeDecreased(size_t) final {} void AllocatedObjectSizeDecreased(size_t) final {}
void ResetAllocatedObjectSize(size_t) final; void ResetAllocatedObjectSize(size_t) final;
size_t limit() const { return limit_; } size_t limit_for_atomic_gc() const { return limit_for_atomic_gc_; }
size_t limit_for_incremental_gc() const { return limit_for_incremental_gc_; }
private: private:
void ConfigureLimit(size_t allocated_object_size); void ConfigureLimit(size_t allocated_object_size);
...@@ -40,7 +52,8 @@ class HeapGrowing::HeapGrowingImpl final ...@@ -40,7 +52,8 @@ class HeapGrowing::HeapGrowingImpl final
StatsCollector* stats_collector_; StatsCollector* stats_collector_;
// Allow 1 MB heap by default; // Allow 1 MB heap by default;
size_t initial_heap_size_ = 1 * kMB; size_t initial_heap_size_ = 1 * kMB;
size_t limit_ = 0; // See ConfigureLimit(). size_t limit_for_atomic_gc_ = 0; // See ConfigureLimit().
size_t limit_for_incremental_gc_ = 0; // See ConfigureLimit().
SingleThreadedHandle gc_task_handle_; SingleThreadedHandle gc_task_handle_;
}; };
...@@ -64,9 +77,13 @@ HeapGrowing::HeapGrowingImpl::~HeapGrowingImpl() { ...@@ -64,9 +77,13 @@ HeapGrowing::HeapGrowingImpl::~HeapGrowingImpl() {
} }
void HeapGrowing::HeapGrowingImpl::AllocatedObjectSizeIncreased(size_t) { void HeapGrowing::HeapGrowingImpl::AllocatedObjectSizeIncreased(size_t) {
if (stats_collector_->allocated_object_size() > limit_) { size_t allocated_object_size = stats_collector_->allocated_object_size();
if (allocated_object_size > limit_for_atomic_gc_) {
collector_->CollectGarbage( collector_->CollectGarbage(
GarbageCollector::Config::ConservativeAtomicConfig()); GarbageCollector::Config::ConservativeAtomicConfig());
} else if (allocated_object_size > limit_for_incremental_gc_) {
collector_->StartIncrementalGarbageCollection(
GarbageCollector::Config::ConservativeIncrementalConfig());
} }
} }
...@@ -78,8 +95,31 @@ void HeapGrowing::HeapGrowingImpl::ResetAllocatedObjectSize( ...@@ -78,8 +95,31 @@ void HeapGrowing::HeapGrowingImpl::ResetAllocatedObjectSize(
void HeapGrowing::HeapGrowingImpl::ConfigureLimit( void HeapGrowing::HeapGrowingImpl::ConfigureLimit(
size_t allocated_object_size) { size_t allocated_object_size) {
const size_t size = std::max(allocated_object_size, initial_heap_size_); const size_t size = std::max(allocated_object_size, initial_heap_size_);
limit_ = std::max(static_cast<size_t>(size * kGrowingFactor), limit_for_atomic_gc_ = std::max(static_cast<size_t>(size * kGrowingFactor),
size + kMinLimitIncrease); size + kMinLimitIncrease);
// Estimate when to start incremental GC based on current allocation speed.
// Ideally we start incremental GC such that it is ready to finalize no
// later than when we reach |limit_for_atomic_gc_|. However, we need to cap
// |limit_for_incremental_gc_| within a range to prevent:
// 1) |limit_for_incremental_gc_| being too close to |limit_for_atomic_gc_|
// such that incremental gc gets nothing done before reaching
// |limit_for_atomic_gc_| (in case where the allocation rate is very low).
// 2) |limit_for_incremental_gc_| being too close to |size| such that GC is
// essentially always running and write barriers are always active (in
// case allocation rate is very high).
size_t estimated_bytes_allocated_during_incremental_gc =
std::ceil(IncrementalMarkingSchedule::kEstimatedMarkingTimeMs *
stats_collector_->GetRecentAllocationSpeedInBytesPerMs());
size_t limit_incremental_gc_based_on_allocation_rate =
limit_for_atomic_gc_ - estimated_bytes_allocated_during_incremental_gc;
size_t maximum_limit_incremental_gc =
size + (limit_for_atomic_gc_ - size) * kMaximumLimitRatioForIncrementalGC;
size_t minimum_limit_incremental_gc =
size + (limit_for_atomic_gc_ - size) * kMinimumLimitRatioForIncrementalGC;
limit_for_incremental_gc_ =
std::max(minimum_limit_incremental_gc,
std::min(maximum_limit_incremental_gc,
limit_incremental_gc_based_on_allocation_rate));
} }
HeapGrowing::HeapGrowing(GarbageCollector* collector, HeapGrowing::HeapGrowing(GarbageCollector* collector,
...@@ -90,7 +130,12 @@ HeapGrowing::HeapGrowing(GarbageCollector* collector, ...@@ -90,7 +130,12 @@ HeapGrowing::HeapGrowing(GarbageCollector* collector,
HeapGrowing::~HeapGrowing() = default; HeapGrowing::~HeapGrowing() = default;
size_t HeapGrowing::limit() const { return impl_->limit(); } size_t HeapGrowing::limit_for_atomic_gc() const {
return impl_->limit_for_atomic_gc();
}
size_t HeapGrowing::limit_for_incremental_gc() const {
return impl_->limit_for_incremental_gc();
}
// static // static
constexpr double HeapGrowing::kGrowingFactor; constexpr double HeapGrowing::kGrowingFactor;
......
...@@ -40,7 +40,8 @@ class V8_EXPORT_PRIVATE HeapGrowing final { ...@@ -40,7 +40,8 @@ class V8_EXPORT_PRIVATE HeapGrowing final {
HeapGrowing(const HeapGrowing&) = delete; HeapGrowing(const HeapGrowing&) = delete;
HeapGrowing& operator=(const HeapGrowing&) = delete; HeapGrowing& operator=(const HeapGrowing&) = delete;
size_t limit() const; size_t limit_for_atomic_gc() const;
size_t limit_for_incremental_gc() const;
private: private:
class HeapGrowingImpl; class HeapGrowingImpl;
......
...@@ -90,12 +90,12 @@ Heap::~Heap() { ...@@ -90,12 +90,12 @@ Heap::~Heap() {
void Heap::CollectGarbage(Config config) { void Heap::CollectGarbage(Config config) {
DCHECK_EQ(Config::MarkingType::kAtomic, config.marking_type); DCHECK_EQ(Config::MarkingType::kAtomic, config.marking_type);
CheckConfig(config); CheckConfig(config);
config_ = config;
if (in_no_gc_scope()) return; if (in_no_gc_scope()) return;
config_ = config;
if (!gc_in_progress_) StartGarbageCollection(config); if (!gc_in_progress_) StartGarbageCollection(config);
DCHECK(marker_); DCHECK(marker_);
...@@ -104,13 +104,12 @@ void Heap::CollectGarbage(Config config) { ...@@ -104,13 +104,12 @@ void Heap::CollectGarbage(Config config) {
} }
void Heap::StartIncrementalGarbageCollection(Config config) { void Heap::StartIncrementalGarbageCollection(Config config) {
DCHECK(!gc_in_progress_);
DCHECK_NE(Config::MarkingType::kAtomic, config.marking_type); DCHECK_NE(Config::MarkingType::kAtomic, config.marking_type);
CheckConfig(config); CheckConfig(config);
config_ = config;
if (in_no_gc_scope()) return; if (gc_in_progress_ || in_no_gc_scope()) return;
config_ = config;
StartGarbageCollection(config); StartGarbageCollection(config);
} }
......
...@@ -33,7 +33,7 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase, ...@@ -33,7 +33,7 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase,
const HeapBase& AsBase() const { return *this; } const HeapBase& AsBase() const { return *this; }
void CollectGarbage(Config) final; void CollectGarbage(Config) final;
void StartIncrementalGarbageCollection(Config); void StartIncrementalGarbageCollection(Config) final;
void FinalizeIncrementalGarbageCollectionIfRunning(Config); void FinalizeIncrementalGarbageCollectionIfRunning(Config);
size_t epoch() const final { return epoch_; } size_t epoch() const final { return epoch_; }
......
...@@ -80,13 +80,25 @@ void StatsCollector::NotifyMarkingCompleted(size_t marked_bytes) { ...@@ -80,13 +80,25 @@ void StatsCollector::NotifyMarkingCompleted(size_t marked_bytes) {
DCHECK_EQ(GarbageCollectionState::kMarking, gc_state_); DCHECK_EQ(GarbageCollectionState::kMarking, gc_state_);
gc_state_ = GarbageCollectionState::kSweeping; gc_state_ = GarbageCollectionState::kSweeping;
current_.marked_bytes = marked_bytes; current_.marked_bytes = marked_bytes;
allocated_bytes_since_end_of_marking_ = 0;
allocated_bytes_since_safepoint_ = 0; allocated_bytes_since_safepoint_ = 0;
explicitly_freed_bytes_since_safepoint_ = 0; explicitly_freed_bytes_since_safepoint_ = 0;
ForAllAllocationObservers([marked_bytes](AllocationObserver* observer) { ForAllAllocationObservers([marked_bytes](AllocationObserver* observer) {
observer->ResetAllocatedObjectSize(marked_bytes); observer->ResetAllocatedObjectSize(marked_bytes);
}); });
// HeapGrowing would use the below fields to estimate allocation rate during
// execution of ResetAllocatedObjectSize.
allocated_bytes_since_end_of_marking_ = 0;
time_of_last_end_of_marking_ = v8::base::TimeTicks::Now();
}
double StatsCollector::GetRecentAllocationSpeedInBytesPerMs() const {
v8::base::TimeTicks current_time = v8::base::TimeTicks::Now();
DCHECK_LE(time_of_last_end_of_marking_, current_time);
if (time_of_last_end_of_marking_ == current_time) return 0;
return allocated_bytes_since_end_of_marking_ /
(current_time - time_of_last_end_of_marking_).InMillisecondsF();
} }
const StatsCollector::Event& StatsCollector::NotifySweepingCompleted() { const StatsCollector::Event& StatsCollector::NotifySweepingCompleted() {
......
...@@ -11,6 +11,7 @@ ...@@ -11,6 +11,7 @@
#include <vector> #include <vector>
#include "src/base/macros.h" #include "src/base/macros.h"
#include "src/base/platform/time.h"
namespace cppgc { namespace cppgc {
namespace internal { namespace internal {
...@@ -79,6 +80,8 @@ class V8_EXPORT_PRIVATE StatsCollector final { ...@@ -79,6 +80,8 @@ class V8_EXPORT_PRIVATE StatsCollector final {
// bytes and the bytes allocated since last marking. // bytes and the bytes allocated since last marking.
size_t allocated_object_size() const; size_t allocated_object_size() const;
double GetRecentAllocationSpeedInBytesPerMs() const;
private: private:
enum class GarbageCollectionState : uint8_t { enum class GarbageCollectionState : uint8_t {
kNotRunning, kNotRunning,
...@@ -97,6 +100,7 @@ class V8_EXPORT_PRIVATE StatsCollector final { ...@@ -97,6 +100,7 @@ class V8_EXPORT_PRIVATE StatsCollector final {
// an object was explicitly freed that was marked as live in the previous // an object was explicitly freed that was marked as live in the previous
// cycle. // cycle.
int64_t allocated_bytes_since_end_of_marking_ = 0; int64_t allocated_bytes_since_end_of_marking_ = 0;
v8::base::TimeTicks time_of_last_end_of_marking_ = v8::base::TimeTicks::Now();
// Counters for allocation and free. The individual values are never negative // Counters for allocation and free. The individual values are never negative
// but their delta may be because of the same reason the overall // but their delta may be because of the same reason the overall
// allocated_bytes_since_end_of_marking_ may be negative. Keep integer // allocated_bytes_since_end_of_marking_ may be negative. Keep integer
......
...@@ -18,6 +18,8 @@ namespace { ...@@ -18,6 +18,8 @@ namespace {
class MockGarbageCollector : public GarbageCollector { class MockGarbageCollector : public GarbageCollector {
public: public:
MOCK_METHOD(void, CollectGarbage, (GarbageCollector::Config), (override)); MOCK_METHOD(void, CollectGarbage, (GarbageCollector::Config), (override));
MOCK_METHOD(void, StartIncrementalGarbageCollection,
(GarbageCollector::Config), (override));
MOCK_METHOD(size_t, epoch, (), (const, override)); MOCK_METHOD(size_t, epoch, (), (const, override));
}; };
...@@ -94,5 +96,31 @@ TEST(GCInvokerTest, ConservativeGCIsInvokedAsPreciseGCViaPlatform) { ...@@ -94,5 +96,31 @@ TEST(GCInvokerTest, ConservativeGCIsInvokedAsPreciseGCViaPlatform) {
invoker.CollectGarbage(GarbageCollector::Config::ConservativeAtomicConfig()); invoker.CollectGarbage(GarbageCollector::Config::ConservativeAtomicConfig());
} }
TEST(GCInvokerTest, IncrementalGCIsStarted) {
// Since StartIncrementalGarbageCollection doesn't scan the stack, support for
// conservative stack scanning should not matter.
MockPlatform platform(nullptr);
MockGarbageCollector gc;
// Conservative stack scanning supported.
GCInvoker invoker_with_support(
&gc, &platform,
cppgc::Heap::StackSupport::kSupportsConservativeStackScan);
EXPECT_CALL(
gc, StartIncrementalGarbageCollection(::testing::Field(
&GarbageCollector::Config::stack_state,
GarbageCollector::Config::StackState::kMayContainHeapPointers)));
invoker_with_support.StartIncrementalGarbageCollection(
GarbageCollector::Config::ConservativeIncrementalConfig());
// Conservative stack scanning *not* supported.
GCInvoker invoker_without_support(
&gc, &platform, cppgc::Heap::StackSupport::kNoConservativeStackScan);
EXPECT_CALL(
gc, StartIncrementalGarbageCollection(::testing::Field(
&GarbageCollector::Config::stack_state,
GarbageCollector::Config::StackState::kMayContainHeapPointers)));
invoker_without_support.StartIncrementalGarbageCollection(
GarbageCollector::Config::ConservativeIncrementalConfig());
}
} // namespace internal } // namespace internal
} // namespace cppgc } // namespace cppgc
...@@ -29,6 +29,11 @@ class FakeGarbageCollector : public GarbageCollector { ...@@ -29,6 +29,11 @@ class FakeGarbageCollector : public GarbageCollector {
callcount_++; callcount_++;
} }
void StartIncrementalGarbageCollection(
GarbageCollector::Config config) override {
UNREACHABLE();
}
size_t epoch() const override { return callcount_; } size_t epoch() const override { return callcount_; }
private: private:
...@@ -40,6 +45,8 @@ class FakeGarbageCollector : public GarbageCollector { ...@@ -40,6 +45,8 @@ class FakeGarbageCollector : public GarbageCollector {
class MockGarbageCollector : public GarbageCollector { class MockGarbageCollector : public GarbageCollector {
public: public:
MOCK_METHOD(void, CollectGarbage, (GarbageCollector::Config), (override)); MOCK_METHOD(void, CollectGarbage, (GarbageCollector::Config), (override));
MOCK_METHOD(void, StartIncrementalGarbageCollection,
(GarbageCollector::Config), (override));
MOCK_METHOD(size_t, epoch, (), (const, override)); MOCK_METHOD(size_t, epoch, (), (const, override));
}; };
...@@ -87,7 +94,7 @@ TEST(HeapGrowingTest, ConstantGrowingFactor) { ...@@ -87,7 +94,7 @@ TEST(HeapGrowingTest, ConstantGrowingFactor) {
gc.SetLiveBytes(kObjectSize); gc.SetLiveBytes(kObjectSize);
FakeAllocate(&stats_collector, kObjectSize + 1); FakeAllocate(&stats_collector, kObjectSize + 1);
EXPECT_EQ(1u, gc.epoch()); EXPECT_EQ(1u, gc.epoch());
EXPECT_EQ(1.5 * kObjectSize, growing.limit()); EXPECT_EQ(1.5 * kObjectSize, growing.limit_for_atomic_gc());
} }
TEST(HeapGrowingTest, SmallHeapGrowing) { TEST(HeapGrowingTest, SmallHeapGrowing) {
...@@ -103,7 +110,35 @@ TEST(HeapGrowingTest, SmallHeapGrowing) { ...@@ -103,7 +110,35 @@ TEST(HeapGrowingTest, SmallHeapGrowing) {
gc.SetLiveBytes(1); gc.SetLiveBytes(1);
FakeAllocate(&stats_collector, kLargeAllocation); FakeAllocate(&stats_collector, kLargeAllocation);
EXPECT_EQ(1u, gc.epoch()); EXPECT_EQ(1u, gc.epoch());
EXPECT_EQ(1 + HeapGrowing::kMinLimitIncrease, growing.limit()); EXPECT_EQ(1 + HeapGrowing::kMinLimitIncrease, growing.limit_for_atomic_gc());
}
TEST(HeapGrowingTest, IncrementalGCStarted) {
StatsCollector stats_collector;
MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints;
HeapGrowing growing(&gc, &stats_collector, constraints);
EXPECT_CALL(gc, CollectGarbage(::testing::_)).Times(0);
EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_));
// Allocate 1 byte less the limit for atomic gc to trigger incremental gc.
FakeAllocate(&stats_collector, growing.limit_for_atomic_gc() - 1);
}
TEST(HeapGrowingTest, IncrementalGCFinalized) {
StatsCollector stats_collector;
MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints;
HeapGrowing growing(&gc, &stats_collector, constraints);
EXPECT_CALL(gc, CollectGarbage(::testing::_)).Times(0);
EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_));
// Allocate 1 byte less the limit for atomic gc to trigger incremental gc.
size_t bytes_for_incremental_gc = growing.limit_for_atomic_gc() - 1;
FakeAllocate(&stats_collector, bytes_for_incremental_gc);
::testing::Mock::VerifyAndClearExpectations(&gc);
EXPECT_CALL(gc, CollectGarbage(::testing::_));
EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_)).Times(0);
// Allocate the rest needed to trigger atomic gc ().
FakeAllocate(&stats_collector, StatsCollector::kAllocationThresholdBytes);
} }
} // namespace internal } // namespace internal
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment