Commit c9d7b236 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

cppgc: Add basic heap growing strategy

Adds allocation-based heap growing strategy that triggers GC based on
some limit. The limit is computed based on previous live memory and a
constant growing factor.

For invoking GC, we support two modes: with and without conservative
stack scanning. Without conservative stack scanning, an invoker makes
sure that we schedule a GC without stack using the existing platform.

Bug: chromium:1056170
Change-Id: I1808aeb5806a6ddd5501b556d6b6b129a85b9cda
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2228887Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarAnton Bikineev <bikineev@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#68235}
parent ef864644
......@@ -4125,9 +4125,14 @@ v8_source_set("cppgc_base") {
"src/heap/cppgc/allocation.cc",
"src/heap/cppgc/free-list.cc",
"src/heap/cppgc/free-list.h",
"src/heap/cppgc/garbage-collector.h",
"src/heap/cppgc/gc-info-table.cc",
"src/heap/cppgc/gc-info-table.h",
"src/heap/cppgc/gc-info.cc",
"src/heap/cppgc/gc-invoker.cc",
"src/heap/cppgc/gc-invoker.h",
"src/heap/cppgc/heap-growing.cc",
"src/heap/cppgc/heap-growing.h",
"src/heap/cppgc/heap-inl.h",
"src/heap/cppgc/heap-object-header-inl.h",
"src/heap/cppgc/heap-object-header.cc",
......@@ -4169,6 +4174,7 @@ v8_source_set("cppgc_base") {
"src/heap/cppgc/stats-collector.h",
"src/heap/cppgc/sweeper.cc",
"src/heap/cppgc/sweeper.h",
"src/heap/cppgc/task-handle.h",
"src/heap/cppgc/virtual-memory.cc",
"src/heap/cppgc/virtual-memory.h",
"src/heap/cppgc/visitor.cc",
......
......@@ -29,6 +29,34 @@ class V8_EXPORT Heap {
*/
using StackState = EmbedderStackState;
/**
* Specifies whether conservative stack scanning is supported.
*/
enum class StackSupport : uint8_t {
/**
* Conservative stack scan is supported.
*/
kSupportsConservativeStackScan,
/**
* Conservative stack scan is not supported. Embedders may use this option
* when using custom infrastructure that is unsupported by the library.
*/
kNoConservativeStackScan,
};
/**
* Constraints for a Heap setup.
*/
struct ResourceConstraints {
/**
* Allows the heap to grow to some initial size in bytes before triggering
* garbage collections. This is useful when it is known that applications
* need a certain minimum heap to run to avoid repeatedly invoking the
* garbage collector when growing the heap.
*/
size_t initial_heap_size_bytes = 0;
};
/**
* Options specifying Heap properties (e.g. custom spaces) when initializing a
* heap through Heap::Create().
......@@ -47,6 +75,22 @@ class V8_EXPORT Heap {
* to the index they reside in the vector.
*/
std::vector<std::unique_ptr<CustomSpaceBase>> custom_spaces;
/**
* Specifies whether conserative stack scan is supported. When conservative
* stack scan is not supported, the collector may try to invoke
* garbage collections using non-nestable task, which are guaranteed to have
* no interesting stack, through the provided Platform. If such tasks are
* not supported by the Platform, the embedder must take care of invoking
* the GC through ForceGarbageCollectionSlow().
*/
StackSupport stack_support = StackSupport::kSupportsConservativeStackScan;
/**
* Resource constraints specifying various properties that the internal
* GC scheduler follows.
*/
ResourceConstraints resource_constraints;
};
/**
......
......@@ -12,9 +12,11 @@ namespace cppgc {
// TODO(v8:10346): Create separate includes for concepts that are not
// V8-specific.
using IdleTask = v8::IdleTask;
using JobHandle = v8::JobHandle;
using JobTask = v8::JobTask;
using PageAllocator = v8::PageAllocator;
using Task = v8::Task;
using TaskPriority = v8::TaskPriority;
using TaskRunner = v8::TaskRunner;
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_GARBAGE_COLLECTOR_H_
#define V8_HEAP_CPPGC_GARBAGE_COLLECTOR_H_
#include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/sweeper.h"
namespace cppgc {
namespace internal {
// GC interface that allows abstraction over the actual GC invocation. This is
// needed to mock/fake GC for testing.
class GarbageCollector {
public:
struct Config {
using StackState = cppgc::Heap::StackState;
using MarkingType = Marker::MarkingConfig::MarkingType;
using SweepingType = Sweeper::Config;
static constexpr Config ConservativeAtomicConfig() {
return {StackState::kMayContainHeapPointers, MarkingType::kAtomic,
SweepingType::kAtomic};
}
static constexpr Config PreciseAtomicConfig() {
return {StackState::kNoHeapPointers, MarkingType::kAtomic,
SweepingType::kAtomic};
}
StackState stack_state = StackState::kMayContainHeapPointers;
MarkingType marking_type = MarkingType::kAtomic;
SweepingType sweeping_type = SweepingType::kAtomic;
};
// Executes a garbage collection specified in config.
virtual void CollectGarbage(Config config) = 0;
// The current epoch that the GC maintains. The epoch is increased on every
// GC invocation.
virtual size_t epoch() const = 0;
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_GARBAGE_COLLECTOR_H_
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/gc-invoker.h"
#include <memory>
#include "include/cppgc/platform.h"
#include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/task-handle.h"
namespace cppgc {
namespace internal {
class GCInvoker::GCInvokerImpl final : public GarbageCollector {
public:
GCInvokerImpl(GarbageCollector*, cppgc::Platform*, cppgc::Heap::StackSupport);
~GCInvokerImpl();
GCInvokerImpl(const GCInvokerImpl&) = delete;
GCInvokerImpl& operator=(const GCInvokerImpl&) = delete;
void CollectGarbage(GarbageCollector::Config) final;
size_t epoch() const final { return collector_->epoch(); }
private:
class GCTask final : public cppgc::Task {
public:
using Handle = SingleThreadedHandle;
static Handle Post(GarbageCollector* collector, cppgc::TaskRunner* runner) {
auto task = std::make_unique<GCInvoker::GCInvokerImpl::GCTask>(collector);
auto handle = task->GetHandle();
runner->PostNonNestableTask(std::move(task));
return handle;
}
explicit GCTask(GarbageCollector* collector)
: collector_(collector), saved_epoch_(collector->epoch()) {}
private:
void Run() final {
if (handle_.IsCanceled() || (collector_->epoch() != saved_epoch_)) return;
collector_->CollectGarbage(
GarbageCollector::Config::PreciseAtomicConfig());
handle_.Cancel();
}
Handle GetHandle() { return handle_; }
GarbageCollector* collector_;
Handle handle_;
size_t saved_epoch_;
};
GarbageCollector* collector_;
cppgc::Platform* platform_;
cppgc::Heap::StackSupport stack_support_;
GCTask::Handle gc_task_handle_;
};
GCInvoker::GCInvokerImpl::GCInvokerImpl(GarbageCollector* collector,
cppgc::Platform* platform,
cppgc::Heap::StackSupport stack_support)
: collector_(collector),
platform_(platform),
stack_support_(stack_support) {}
GCInvoker::GCInvokerImpl::~GCInvokerImpl() {
if (gc_task_handle_) {
gc_task_handle_.Cancel();
}
}
void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) {
if ((config.stack_state ==
GarbageCollector::Config::StackState::kNoHeapPointers) ||
(stack_support_ ==
cppgc::Heap::StackSupport::kSupportsConservativeStackScan)) {
collector_->CollectGarbage(config);
} else if (platform_->GetForegroundTaskRunner()->NonNestableTasksEnabled()) {
if (!gc_task_handle_) {
gc_task_handle_ =
GCTask::Post(collector_, platform_->GetForegroundTaskRunner().get());
}
}
}
GCInvoker::GCInvoker(GarbageCollector* collector, cppgc::Platform* platform,
cppgc::Heap::StackSupport stack_support)
: impl_(std::make_unique<GCInvoker::GCInvokerImpl>(collector, platform,
stack_support)) {}
GCInvoker::~GCInvoker() = default;
void GCInvoker::CollectGarbage(GarbageCollector::Config config) {
impl_->CollectGarbage(config);
}
size_t GCInvoker::epoch() const { return impl_->epoch(); }
} // namespace internal
} // namespace cppgc
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_GC_INVOKER_H_
#define V8_HEAP_CPPGC_GC_INVOKER_H_
#include "include/cppgc/heap.h"
#include "src/base/macros.h"
#include "src/heap/cppgc/garbage-collector.h"
namespace cppgc {
class Platform;
namespace internal {
// GC invoker that dispatches GC depending on StackSupport and StackState:
// 1. If StackState specifies no stack scan needed the GC is invoked
// synchronously.
// 2. If StackState specifies conservative GC and StackSupport prohibits stack
// scanning: Delay GC until it can be invoked without accessing the stack.
// To do so, a precise GC without stack scan is scheduled using the platform
// if non-nestable tasks are supported, and otherwise no operation is carried
// out. This means that the heuristics allows to arbitrary go over the limit
// in case non-nestable tasks are not supported and only conservative GCs are
// requested.
class V8_EXPORT_PRIVATE GCInvoker final : public GarbageCollector {
public:
GCInvoker(GarbageCollector*, cppgc::Platform*, cppgc::Heap::StackSupport);
~GCInvoker();
GCInvoker(const GCInvoker&) = delete;
GCInvoker& operator=(const GCInvoker&) = delete;
void CollectGarbage(GarbageCollector::Config) final;
size_t epoch() const final;
private:
class GCInvokerImpl;
std::unique_ptr<GCInvokerImpl> impl_;
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_GC_INVOKER_H_
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/heap-growing.h"
#include <memory>
#include "include/cppgc/platform.h"
#include "src/base/macros.h"
#include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/stats-collector.h"
#include "src/heap/cppgc/task-handle.h"
namespace cppgc {
namespace internal {
class HeapGrowing::HeapGrowingImpl final
: public StatsCollector::AllocationObserver {
public:
HeapGrowingImpl(GarbageCollector*, StatsCollector*,
cppgc::Heap::ResourceConstraints);
~HeapGrowingImpl();
HeapGrowingImpl(const HeapGrowingImpl&) = delete;
HeapGrowingImpl& operator=(const HeapGrowingImpl&) = delete;
void AllocatedObjectSizeIncreased(size_t) final;
// Only trigger GC on growing.
void AllocatedObjectSizeDecreased(size_t) final {}
void ResetAllocatedObjectSize(size_t) final;
size_t limit() const { return limit_; }
private:
void ConfigureLimit(size_t allocated_object_size);
GarbageCollector* collector_;
StatsCollector* stats_collector_;
// Allow 1 MB heap by default;
size_t initial_heap_size_ = 1 * kMB;
size_t limit_ = 0; // See ConfigureLimit().
SingleThreadedHandle gc_task_handle_;
};
HeapGrowing::HeapGrowingImpl::HeapGrowingImpl(
GarbageCollector* collector, StatsCollector* stats_collector,
cppgc::Heap::ResourceConstraints constraints)
: collector_(collector),
stats_collector_(stats_collector),
gc_task_handle_(SingleThreadedHandle::NonEmptyTag{}) {
if (constraints.initial_heap_size_bytes > 0) {
initial_heap_size_ = constraints.initial_heap_size_bytes;
}
constexpr size_t kNoAllocatedBytes = 0;
ConfigureLimit(kNoAllocatedBytes);
stats_collector->RegisterObserver(this);
}
HeapGrowing::HeapGrowingImpl::~HeapGrowingImpl() {
stats_collector_->UnregisterObserver(this);
}
void HeapGrowing::HeapGrowingImpl::AllocatedObjectSizeIncreased(size_t) {
if (stats_collector_->allocated_object_size() > limit_) {
collector_->CollectGarbage(
GarbageCollector::Config::ConservativeAtomicConfig());
}
}
void HeapGrowing::HeapGrowingImpl::ResetAllocatedObjectSize(
size_t allocated_object_size) {
ConfigureLimit(allocated_object_size);
}
void HeapGrowing::HeapGrowingImpl::ConfigureLimit(
size_t allocated_object_size) {
const size_t size = std::max(allocated_object_size, initial_heap_size_);
limit_ = std::max(static_cast<size_t>(size * kGrowingFactor),
size + kMinLimitIncrease);
}
HeapGrowing::HeapGrowing(GarbageCollector* collector,
StatsCollector* stats_collector,
cppgc::Heap::ResourceConstraints constraints)
: impl_(std::make_unique<HeapGrowing::HeapGrowingImpl>(
collector, stats_collector, constraints)) {}
HeapGrowing::~HeapGrowing() = default;
size_t HeapGrowing::limit() const { return impl_->limit(); }
// static
constexpr double HeapGrowing::kGrowingFactor;
} // namespace internal
} // namespace cppgc
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_HEAP_GROWING_H_
#define V8_HEAP_CPPGC_HEAP_GROWING_H_
#include "include/cppgc/heap.h"
#include "src/base/macros.h"
#include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/raw-heap.h"
namespace cppgc {
class Platform;
namespace internal {
class GarbageCollector;
class StatsCollector;
// Growing strategy that invokes garbage collection using GarbageCollector based
// on allocation statistics provided by StatsCollector and ResourceConstraints.
//
// Implements a fixed-ratio growing strategy with an initial heap size that the
// GC can ignore to avoid excessive GCs for smaller heaps.
class V8_EXPORT_PRIVATE HeapGrowing final {
public:
// Constant growing factor for growing the heap limit.
static constexpr double kGrowingFactor = 1.5;
// For smaller heaps, allow allocating at least LAB in each regular space
// before triggering GC again.
static constexpr size_t kMinLimitIncrease =
kPageSize * RawHeap::kNumberOfRegularSpaces;
HeapGrowing(GarbageCollector*, StatsCollector*,
cppgc::Heap::ResourceConstraints);
~HeapGrowing();
HeapGrowing(const HeapGrowing&) = delete;
HeapGrowing& operator=(const HeapGrowing&) = delete;
size_t limit() const;
private:
class HeapGrowingImpl;
std::unique_ptr<HeapGrowingImpl> impl_;
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_HEAP_GROWING_H_
......@@ -9,6 +9,7 @@
#include "src/base/bounded-page-allocator.h"
#include "src/base/page-allocator.h"
#include "src/base/platform/platform.h"
#include "src/heap/cppgc/gc-invoker.h"
#include "src/heap/cppgc/heap-object-header-inl.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h"
......@@ -41,7 +42,7 @@ std::unique_ptr<Heap> Heap::Create(std::shared_ptr<cppgc::Platform> platform,
DCHECK(platform.get());
VerifyCustomSpaces(options.custom_spaces);
return std::make_unique<internal::Heap>(std::move(platform),
options.custom_spaces.size());
std::move(options));
}
void Heap::ForceGarbageCollectionSlow(const char* source, const char* reason,
......@@ -121,8 +122,9 @@ cppgc::LivenessBroker LivenessBrokerFactory::Create() {
return cppgc::LivenessBroker();
}
Heap::Heap(std::shared_ptr<cppgc::Platform> platform, size_t custom_spaces)
: raw_heap_(this, custom_spaces),
Heap::Heap(std::shared_ptr<cppgc::Platform> platform,
cppgc::Heap::HeapOptions options)
: raw_heap_(this, options.custom_spaces.size()),
platform_(std::move(platform)),
#if defined(CPPGC_CAGED_HEAP)
reserved_area_(ReserveCagedHeap(platform_->GetPageAllocator())),
......@@ -136,6 +138,9 @@ Heap::Heap(std::shared_ptr<cppgc::Platform> platform, size_t custom_spaces)
stats_collector_(std::make_unique<StatsCollector>()),
object_allocator_(&raw_heap_, stats_collector_.get()),
sweeper_(&raw_heap_, platform_.get(), stats_collector_.get()),
gc_invoker_(this, platform_.get(), options.stack_support),
growing_(&gc_invoker_, stats_collector_.get(),
options.resource_constraints),
stack_(std::make_unique<Stack>(v8::base::Stack::GetStackStart())),
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>()) {
}
......@@ -146,7 +151,7 @@ Heap::~Heap() {
sweeper_.Finish();
}
void Heap::CollectGarbage(GCConfig config) {
void Heap::CollectGarbage(Config config) {
if (in_no_gc_scope()) return;
epoch_++;
......
......@@ -14,6 +14,9 @@
#include "include/cppgc/liveness-broker.h"
#include "include/cppgc/macros.h"
#include "src/base/page-allocator.h"
#include "src/heap/cppgc/garbage-collector.h"
#include "src/heap/cppgc/gc-invoker.h"
#include "src/heap/cppgc/heap-growing.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/object-allocator.h"
......@@ -42,7 +45,8 @@ class V8_EXPORT_PRIVATE LivenessBrokerFactory {
static LivenessBroker Create();
};
class V8_EXPORT_PRIVATE Heap final : public cppgc::Heap {
class V8_EXPORT_PRIVATE Heap final : public cppgc::Heap,
public GarbageCollector {
public:
// NoGCScope allows going over limits and avoids triggering garbage
// collection triggered through allocations or even explicitly.
......@@ -76,29 +80,17 @@ class V8_EXPORT_PRIVATE Heap final : public cppgc::Heap {
Heap* const heap_;
};
struct GCConfig {
using StackState = Heap::StackState;
using MarkingType = Marker::MarkingConfig::MarkingType;
using SweepingType = Sweeper::Config;
static constexpr GCConfig Default() { return {}; }
StackState stack_state = StackState::kMayContainHeapPointers;
MarkingType marking_type = MarkingType::kAtomic;
SweepingType sweeping_type = SweepingType::kAtomic;
};
static Heap* From(cppgc::Heap* heap) { return static_cast<Heap*>(heap); }
explicit Heap(std::shared_ptr<cppgc::Platform> platform,
size_t custom_spaces);
Heap(std::shared_ptr<cppgc::Platform> platform,
cppgc::Heap::HeapOptions options);
~Heap() final;
inline void* Allocate(size_t size, GCInfoIndex index);
inline void* Allocate(size_t size, GCInfoIndex index,
CustomSpaceIndex space_index);
void CollectGarbage(GCConfig config = GCConfig::Default());
void CollectGarbage(Config config) final;
PreFinalizerHandler* prefinalizer_handler() {
return prefinalizer_handler_.get();
......@@ -130,9 +122,12 @@ class V8_EXPORT_PRIVATE Heap final : public cppgc::Heap {
PageBackend* page_backend() { return page_backend_.get(); }
const PageBackend* page_backend() const { return page_backend_.get(); }
cppgc::Platform* platform() { return platform_.get(); }
const cppgc::Platform* platform() const { return platform_.get(); }
Sweeper& sweeper() { return sweeper_; }
size_t epoch() const { return epoch_; }
size_t epoch() const final { return epoch_; }
size_t ObjectPayloadSize() const;
......@@ -153,6 +148,8 @@ class V8_EXPORT_PRIVATE Heap final : public cppgc::Heap {
std::unique_ptr<StatsCollector> stats_collector_;
ObjectAllocator object_allocator_;
Sweeper sweeper_;
GCInvoker gc_invoker_;
HeapGrowing growing_;
std::unique_ptr<Stack> stack_;
std::unique_ptr<PreFinalizerHandler> prefinalizer_handler_;
......
......@@ -23,6 +23,7 @@
#include "src/heap/cppgc/raw-heap.h"
#include "src/heap/cppgc/sanitizers.h"
#include "src/heap/cppgc/stats-collector.h"
#include "src/heap/cppgc/task-handle.h"
namespace cppgc {
namespace internal {
......@@ -515,31 +516,7 @@ class Sweeper::SweeperImpl final {
private:
class IncrementalSweepTask : public v8::IdleTask {
public:
struct Handle {
Handle() = default;
void Cancel() {
DCHECK(is_cancelled_);
*is_cancelled_ = true;
}
bool IsCanceled() const {
DCHECK(is_cancelled_);
return *is_cancelled_;
}
explicit operator bool() const { return is_cancelled_.get(); }
private:
struct NonEmptyTag {};
explicit Handle(NonEmptyTag)
: is_cancelled_(std::make_shared<bool>(false)) {}
std::shared_ptr<bool> is_cancelled_;
friend class IncrementalSweepTask;
};
using Handle = SingleThreadedHandle;
explicit IncrementalSweepTask(SweeperImpl* sweeper)
: sweeper_(sweeper), handle_(Handle::NonEmptyTag{}) {}
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_TASK_HANDLE_H_
#define V8_HEAP_CPPGC_TASK_HANDLE_H_
#include <memory>
#include "src/base/logging.h"
namespace cppgc {
namespace internal {
// A handle that is used for cancelling individual tasks.
struct SingleThreadedHandle {
struct NonEmptyTag {};
// Default construction results in empty handle.
SingleThreadedHandle() = default;
explicit SingleThreadedHandle(NonEmptyTag)
: is_cancelled_(std::make_shared<bool>(false)) {}
void Cancel() {
DCHECK(is_cancelled_);
*is_cancelled_ = true;
}
bool IsCanceled() const {
DCHECK(is_cancelled_);
return *is_cancelled_;
}
// A handle is active if it is non-empty and not cancelled.
explicit operator bool() const {
return is_cancelled_.get() && !*is_cancelled_.get();
}
private:
std::shared_ptr<bool> is_cancelled_;
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_TASK_HANDLE_H_
......@@ -50,6 +50,8 @@ v8_source_set("cppgc_unittests_sources") {
"heap/cppgc/free-list-unittest.cc",
"heap/cppgc/garbage-collected-unittest.cc",
"heap/cppgc/gc-info-unittest.cc",
"heap/cppgc/gc-invoker-unittest.cc",
"heap/cppgc/heap-growing-unittest.cc",
"heap/cppgc/heap-object-header-unittest.cc",
"heap/cppgc/heap-page-unittest.cc",
"heap/cppgc/heap-unittest.cc",
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/gc-invoker.h"
#include "include/cppgc/platform.h"
#include "src/heap/cppgc/heap.h"
#include "testing/gmock/include/gmock/gmock-matchers.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace cppgc {
namespace internal {
namespace {
class MockGarbageCollector : public GarbageCollector {
public:
MOCK_METHOD1(CollectGarbage, void(GarbageCollector::Config));
MOCK_CONST_METHOD0(epoch, size_t());
};
class MockTaskRunner : public cppgc::TaskRunner {
public:
MOCK_METHOD1(PostTask, void(std::unique_ptr<cppgc::Task>));
MOCK_METHOD1(PostNonNestableTask, void(std::unique_ptr<cppgc::Task>));
MOCK_METHOD2(PostDelayedTask, void(std::unique_ptr<cppgc::Task>, double));
MOCK_METHOD2(PostNonNestableDelayedTask,
void(std::unique_ptr<cppgc::Task>, double));
MOCK_METHOD1(PostIdleTask, void(std::unique_ptr<cppgc::IdleTask>));
virtual bool IdleTasksEnabled() { return true; } // NOLINT
virtual bool NonNestableTasksEnabled() const { return true; } // NOLINT
virtual bool NonNestableDelayedTasksEnabled() const { // NOLINT
return true;
}
};
class MockPlatform : public cppgc::Platform {
public:
explicit MockPlatform(std::shared_ptr<TaskRunner> runner)
: runner_(std::move(runner)) {}
PageAllocator* GetPageAllocator() override { return nullptr; }
double MonotonicallyIncreasingTime() override { return 0.0; }
std::shared_ptr<TaskRunner> GetForegroundTaskRunner() override {
return runner_;
}
private:
std::shared_ptr<TaskRunner> runner_;
};
} // namespace
TEST(GCInvokerTest, PrecideGCIsInvokedSynchronously) {
MockPlatform platform(nullptr);
MockGarbageCollector gc;
GCInvoker invoker(&gc, &platform,
cppgc::Heap::StackSupport::kNoConservativeStackScan);
EXPECT_CALL(gc, CollectGarbage(::testing::Field(
&GarbageCollector::Config::stack_state,
GarbageCollector::Config::StackState::kNoHeapPointers)));
invoker.CollectGarbage(GarbageCollector::Config::PreciseAtomicConfig());
}
TEST(GCInvokerTest, ConservativeGCIsInvokedSynchronouslyWhenSupported) {
MockPlatform platform(nullptr);
MockGarbageCollector gc;
GCInvoker invoker(&gc, &platform,
cppgc::Heap::StackSupport::kSupportsConservativeStackScan);
EXPECT_CALL(
gc, CollectGarbage(::testing::Field(
&GarbageCollector::Config::stack_state,
GarbageCollector::Config::StackState::kMayContainHeapPointers)));
invoker.CollectGarbage(GarbageCollector::Config::ConservativeAtomicConfig());
}
TEST(GCInvokerTest, ConservativeGCIsInvokedAsPreciseGCViaPlatform) {
std::shared_ptr<cppgc::TaskRunner> runner =
std::shared_ptr<cppgc::TaskRunner>(new MockTaskRunner());
MockPlatform platform(runner);
MockGarbageCollector gc;
GCInvoker invoker(&gc, &platform,
cppgc::Heap::StackSupport::kNoConservativeStackScan);
EXPECT_CALL(gc, epoch).WillOnce(::testing::Return(0));
EXPECT_CALL(*static_cast<MockTaskRunner*>(runner.get()),
PostNonNestableTask(::testing::_));
invoker.CollectGarbage(GarbageCollector::Config::ConservativeAtomicConfig());
}
} // namespace internal
} // namespace cppgc
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/cppgc/heap-growing.h"
#include "include/cppgc/platform.h"
#include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/stats-collector.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace cppgc {
namespace internal {
namespace {
class FakeGarbageCollector : public GarbageCollector {
public:
explicit FakeGarbageCollector(StatsCollector* stats_collector)
: stats_collector_(stats_collector) {}
void SetLiveBytes(size_t live_bytes) { live_bytes_ = live_bytes; }
void CollectGarbage(GarbageCollector::Config config) override {
stats_collector_->NotifyMarkingStarted();
stats_collector_->NotifyMarkingCompleted(live_bytes_);
stats_collector_->NotifySweepingCompleted();
callcount_++;
}
size_t epoch() const override { return callcount_; }
private:
StatsCollector* stats_collector_;
size_t live_bytes_ = 0;
size_t callcount_ = 0;
};
class MockGarbageCollector : public GarbageCollector {
public:
MOCK_METHOD1(CollectGarbage, void(GarbageCollector::Config));
MOCK_CONST_METHOD0(epoch, size_t());
};
void FakeAllocate(StatsCollector* stats_collector, size_t bytes) {
stats_collector->NotifyAllocation(bytes);
stats_collector->NotifySafePointForConservativeCollection();
}
} // namespace
TEST(HeapGrowingTest, ConservativeGCInvoked) {
StatsCollector stats_collector;
MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update.
constraints.initial_heap_size_bytes = 1;
HeapGrowing growing(&gc, &stats_collector, constraints);
EXPECT_CALL(gc, CollectGarbage(::testing::_));
FakeAllocate(&stats_collector, 100 * kMB);
}
TEST(HeapGrowingTest, InitialHeapSize) {
StatsCollector stats_collector;
MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints;
// Use larger size to avoid running into small heap optimizations.
constexpr size_t kObjectSize = 10 * HeapGrowing::kMinLimitIncrease;
constraints.initial_heap_size_bytes = kObjectSize;
HeapGrowing growing(&gc, &stats_collector, constraints);
FakeAllocate(&stats_collector, kObjectSize - 1);
EXPECT_CALL(gc, CollectGarbage(::testing::_));
FakeAllocate(&stats_collector, kObjectSize);
}
TEST(HeapGrowingTest, ConstantGrowingFactor) {
// Use larger size to avoid running into small heap optimizations.
constexpr size_t kObjectSize = 10 * HeapGrowing::kMinLimitIncrease;
StatsCollector stats_collector;
FakeGarbageCollector gc(&stats_collector);
cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update.
constraints.initial_heap_size_bytes = HeapGrowing::kMinLimitIncrease;
HeapGrowing growing(&gc, &stats_collector, constraints);
EXPECT_EQ(0u, gc.epoch());
gc.SetLiveBytes(kObjectSize);
FakeAllocate(&stats_collector, kObjectSize + 1);
EXPECT_EQ(1u, gc.epoch());
EXPECT_EQ(1.5 * kObjectSize, growing.limit());
}
TEST(HeapGrowingTest, SmallHeapGrowing) {
// Larger constant to avoid running into special handling for smaller heaps.
constexpr size_t kLargeAllocation = 100 * kMB;
StatsCollector stats_collector;
FakeGarbageCollector gc(&stats_collector);
cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update.
constraints.initial_heap_size_bytes = 1;
HeapGrowing growing(&gc, &stats_collector, constraints);
EXPECT_EQ(0u, gc.epoch());
gc.SetLiveBytes(1);
FakeAllocate(&stats_collector, kLargeAllocation);
EXPECT_EQ(1u, gc.epoch());
EXPECT_EQ(1 + HeapGrowing::kMinLimitIncrease, growing.limit());
}
} // namespace internal
} // namespace cppgc
......@@ -88,7 +88,8 @@ TEST_F(GCHeapTest, ObjectPayloadSize) {
static constexpr size_t kObjectSizes[] = {1, 32, 64, 128,
2 * kLargeObjectSizeThreshold};
Heap::From(GetHeap())->CollectGarbage();
Heap::From(GetHeap())->CollectGarbage(
GarbageCollector::Config::ConservativeAtomicConfig());
for (size_t k = 0; k < kNumberOfObjectsPerArena; ++k) {
MakeGarbageCollected<GCed<kObjectSizes[0]>>(GetHeap());
......
......@@ -232,7 +232,8 @@ class GCInDestructor final : public GarbageCollected<GCInDestructor> {
~GCInDestructor() {
// Instead of directly calling GC, allocations should be supported here as
// well.
heap_->CollectGarbage(internal::Heap::GCConfig::Default());
heap_->CollectGarbage(
internal::GarbageCollector::Config::ConservativeAtomicConfig());
}
void Trace(Visitor*) const {}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment