Commit 2a24668a authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

cppgc: Move configs to heap-config.h

Change-Id: Ibaea8f237d3bbee983f763a178eda0f7ca97d419
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3911515Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarAnton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83397}
parent f08547af
...@@ -286,7 +286,8 @@ class UnifiedHeapConservativeMarkingVisitor final ...@@ -286,7 +286,8 @@ class UnifiedHeapConservativeMarkingVisitor final
class UnifiedHeapMarker final : public cppgc::internal::MarkerBase { class UnifiedHeapMarker final : public cppgc::internal::MarkerBase {
public: public:
UnifiedHeapMarker(Heap* v8_heap, cppgc::internal::HeapBase& cpp_heap, UnifiedHeapMarker(Heap* v8_heap, cppgc::internal::HeapBase& cpp_heap,
cppgc::Platform* platform, MarkingConfig config); cppgc::Platform* platform,
cppgc::internal::MarkingConfig config);
~UnifiedHeapMarker() final = default; ~UnifiedHeapMarker() final = default;
...@@ -324,7 +325,7 @@ class UnifiedHeapMarker final : public cppgc::internal::MarkerBase { ...@@ -324,7 +325,7 @@ class UnifiedHeapMarker final : public cppgc::internal::MarkerBase {
UnifiedHeapMarker::UnifiedHeapMarker(Heap* v8_heap, UnifiedHeapMarker::UnifiedHeapMarker(Heap* v8_heap,
cppgc::internal::HeapBase& heap, cppgc::internal::HeapBase& heap,
cppgc::Platform* platform, cppgc::Platform* platform,
MarkingConfig config) cppgc::internal::MarkingConfig config)
: cppgc::internal::MarkerBase(heap, platform, config), : cppgc::internal::MarkerBase(heap, platform, config),
mutator_unified_heap_marking_state_(v8_heap, nullptr), mutator_unified_heap_marking_state_(v8_heap, nullptr),
marking_visitor_(config.collection_type == CppHeap::CollectionType::kMajor marking_visitor_(config.collection_type == CppHeap::CollectionType::kMajor
...@@ -625,11 +626,11 @@ void CppHeap::InitializeTracing(CollectionType collection_type, ...@@ -625,11 +626,11 @@ void CppHeap::InitializeTracing(CollectionType collection_type,
current_gc_flags_ = gc_flags; current_gc_flags_ = gc_flags;
const UnifiedHeapMarker::MarkingConfig marking_config{ const cppgc::internal::MarkingConfig marking_config{
*collection_type_, StackState::kNoHeapPointers, SelectMarkingType(), *collection_type_, StackState::kNoHeapPointers, SelectMarkingType(),
IsForceGC(current_gc_flags_) IsForceGC(current_gc_flags_)
? UnifiedHeapMarker::MarkingConfig::IsForcedGC::kForced ? cppgc::internal::MarkingConfig::IsForcedGC::kForced
: UnifiedHeapMarker::MarkingConfig::IsForcedGC::kNotForced}; : cppgc::internal::MarkingConfig::IsForcedGC::kNotForced};
DCHECK_IMPLIES(!isolate_, DCHECK_IMPLIES(!isolate_,
(MarkingType::kAtomic == marking_config.marking_type) || (MarkingType::kAtomic == marking_config.marking_type) ||
force_incremental_marking_for_testing_); force_incremental_marking_for_testing_);
...@@ -1004,12 +1005,13 @@ CppHeap::PauseConcurrentMarkingScope::PauseConcurrentMarkingScope( ...@@ -1004,12 +1005,13 @@ CppHeap::PauseConcurrentMarkingScope::PauseConcurrentMarkingScope(
} }
} }
void CppHeap::CollectGarbage(Config config) { void CppHeap::CollectGarbage(cppgc::internal::GCConfig config) {
if (in_no_gc_scope() || !isolate_) return; if (in_no_gc_scope() || !isolate_) return;
// TODO(mlippautz): Respect full config. // TODO(mlippautz): Respect full config.
const int flags = (config.free_memory_handling == const int flags =
Config::FreeMemoryHandling::kDiscardWherePossible) (config.free_memory_handling ==
cppgc::internal::GCConfig::FreeMemoryHandling::kDiscardWherePossible)
? Heap::kReduceMemoryFootprintMask ? Heap::kReduceMemoryFootprintMask
: Heap::kNoGCFlags; : Heap::kNoGCFlags;
isolate_->heap()->CollectAllGarbage( isolate_->heap()->CollectAllGarbage(
...@@ -1020,7 +1022,9 @@ const cppgc::EmbedderStackState* CppHeap::override_stack_state() const { ...@@ -1020,7 +1022,9 @@ const cppgc::EmbedderStackState* CppHeap::override_stack_state() const {
return HeapBase::override_stack_state(); return HeapBase::override_stack_state();
} }
void CppHeap::StartIncrementalGarbageCollection(Config) { UNIMPLEMENTED(); } void CppHeap::StartIncrementalGarbageCollection(cppgc::internal::GCConfig) {
UNIMPLEMENTED();
}
size_t CppHeap::epoch() const { UNIMPLEMENTED(); } size_t CppHeap::epoch() const { UNIMPLEMENTED(); }
} // namespace internal } // namespace internal
......
...@@ -43,9 +43,8 @@ class V8_EXPORT_PRIVATE CppHeap final ...@@ -43,9 +43,8 @@ class V8_EXPORT_PRIVATE CppHeap final
}; };
using GarbageCollectionFlags = base::Flags<GarbageCollectionFlagValues>; using GarbageCollectionFlags = base::Flags<GarbageCollectionFlagValues>;
using StackState = cppgc::internal::GarbageCollector::Config::StackState; using StackState = cppgc::internal::StackState;
using CollectionType = using CollectionType = cppgc::internal::CollectionType;
cppgc::internal::GarbageCollector::Config::CollectionType;
class MetricRecorderAdapter final : public cppgc::internal::MetricRecorder { class MetricRecorderAdapter final : public cppgc::internal::MetricRecorder {
public: public:
...@@ -139,9 +138,7 @@ class V8_EXPORT_PRIVATE CppHeap final ...@@ -139,9 +138,7 @@ class V8_EXPORT_PRIVATE CppHeap final
void FinishSweepingIfRunning(); void FinishSweepingIfRunning();
void FinishSweepingIfOutOfWork(); void FinishSweepingIfOutOfWork();
void InitializeTracing( void InitializeTracing(CollectionType, GarbageCollectionFlags);
cppgc::internal::GarbageCollector::Config::CollectionType,
GarbageCollectionFlags);
void StartTracing(); void StartTracing();
bool AdvanceTracing(double max_duration); bool AdvanceTracing(double max_duration);
bool IsTracingDone(); bool IsTracingDone();
...@@ -168,9 +165,9 @@ class V8_EXPORT_PRIVATE CppHeap final ...@@ -168,9 +165,9 @@ class V8_EXPORT_PRIVATE CppHeap final
std::unique_ptr<CppMarkingState> CreateCppMarkingStateForMutatorThread(); std::unique_ptr<CppMarkingState> CreateCppMarkingStateForMutatorThread();
// cppgc::internal::GarbageCollector interface. // cppgc::internal::GarbageCollector interface.
void CollectGarbage(Config) override; void CollectGarbage(cppgc::internal::GCConfig) override;
const cppgc::EmbedderStackState* override_stack_state() const override; const cppgc::EmbedderStackState* override_stack_state() const override;
void StartIncrementalGarbageCollection(Config) override; void StartIncrementalGarbageCollection(cppgc::internal::GCConfig) override;
size_t epoch() const override; size_t epoch() const override;
private: private:
...@@ -194,8 +191,7 @@ class V8_EXPORT_PRIVATE CppHeap final ...@@ -194,8 +191,7 @@ class V8_EXPORT_PRIVATE CppHeap final
Isolate* isolate_ = nullptr; Isolate* isolate_ = nullptr;
bool marking_done_ = false; bool marking_done_ = false;
// |collection_type_| is initialized when marking is in progress. // |collection_type_| is initialized when marking is in progress.
base::Optional<cppgc::internal::GarbageCollector::Config::CollectionType> base::Optional<CollectionType> collection_type_;
collection_type_;
GarbageCollectionFlags current_gc_flags_; GarbageCollectionFlags current_gc_flags_;
// Buffered allocated bytes. Reporting allocated bytes to V8 can trigger a GC // Buffered allocated bytes. Reporting allocated bytes to V8 can trigger a GC
......
...@@ -57,7 +57,7 @@ class UnifiedHeapVerificationVisitor final : public JSVisitor { ...@@ -57,7 +57,7 @@ class UnifiedHeapVerificationVisitor final : public JSVisitor {
UnifiedHeapMarkingVerifier::UnifiedHeapMarkingVerifier( UnifiedHeapMarkingVerifier::UnifiedHeapMarkingVerifier(
cppgc::internal::HeapBase& heap_base, cppgc::internal::HeapBase& heap_base,
cppgc::internal::Heap::Config::CollectionType collection_type) cppgc::internal::CollectionType collection_type)
: MarkingVerifierBase( : MarkingVerifierBase(
heap_base, collection_type, state_, heap_base, collection_type, state_,
std::make_unique<UnifiedHeapVerificationVisitor>(state_)) {} std::make_unique<UnifiedHeapVerificationVisitor>(state_)) {}
......
...@@ -14,7 +14,7 @@ class V8_EXPORT_PRIVATE UnifiedHeapMarkingVerifier final ...@@ -14,7 +14,7 @@ class V8_EXPORT_PRIVATE UnifiedHeapMarkingVerifier final
: public cppgc::internal::MarkingVerifierBase { : public cppgc::internal::MarkingVerifierBase {
public: public:
UnifiedHeapMarkingVerifier(cppgc::internal::HeapBase&, UnifiedHeapMarkingVerifier(cppgc::internal::HeapBase&,
cppgc::internal::Heap::Config::CollectionType); cppgc::internal::CollectionType);
~UnifiedHeapMarkingVerifier() final = default; ~UnifiedHeapMarkingVerifier() final = default;
private: private:
......
...@@ -452,13 +452,11 @@ Compactor::Compactor(RawHeap& heap) : heap_(heap) { ...@@ -452,13 +452,11 @@ Compactor::Compactor(RawHeap& heap) : heap_(heap) {
} }
} }
bool Compactor::ShouldCompact( bool Compactor::ShouldCompact(GCConfig::MarkingType marking_type,
GarbageCollector::Config::MarkingType marking_type, StackState stack_state) const {
GarbageCollector::Config::StackState stack_state) const {
if (compactable_spaces_.empty() || if (compactable_spaces_.empty() ||
(marking_type == GarbageCollector::Config::MarkingType::kAtomic && (marking_type == GCConfig::MarkingType::kAtomic &&
stack_state == stack_state == StackState::kMayContainHeapPointers)) {
GarbageCollector::Config::StackState::kMayContainHeapPointers)) {
// The following check ensures that tests that want to test compaction are // The following check ensures that tests that want to test compaction are
// not interrupted by garbage collections that cannot use compaction. // not interrupted by garbage collections that cannot use compaction.
DCHECK(!enable_for_next_gc_for_testing_); DCHECK(!enable_for_next_gc_for_testing_);
...@@ -474,9 +472,8 @@ bool Compactor::ShouldCompact( ...@@ -474,9 +472,8 @@ bool Compactor::ShouldCompact(
return free_list_size > kFreeListSizeThreshold; return free_list_size > kFreeListSizeThreshold;
} }
void Compactor::InitializeIfShouldCompact( void Compactor::InitializeIfShouldCompact(GCConfig::MarkingType marking_type,
GarbageCollector::Config::MarkingType marking_type, StackState stack_state) {
GarbageCollector::Config::StackState stack_state) {
DCHECK(!is_enabled_); DCHECK(!is_enabled_);
if (!ShouldCompact(marking_type, stack_state)) return; if (!ShouldCompact(marking_type, stack_state)) return;
...@@ -487,9 +484,8 @@ void Compactor::InitializeIfShouldCompact( ...@@ -487,9 +484,8 @@ void Compactor::InitializeIfShouldCompact(
is_cancelled_ = false; is_cancelled_ = false;
} }
void Compactor::CancelIfShouldNotCompact( void Compactor::CancelIfShouldNotCompact(GCConfig::MarkingType marking_type,
GarbageCollector::Config::MarkingType marking_type, StackState stack_state) {
GarbageCollector::Config::StackState stack_state) {
if (!is_enabled_ || ShouldCompact(marking_type, stack_state)) return; if (!is_enabled_ || ShouldCompact(marking_type, stack_state)) return;
is_cancelled_ = true; is_cancelled_ = true;
......
...@@ -12,6 +12,8 @@ ...@@ -12,6 +12,8 @@
namespace cppgc { namespace cppgc {
namespace internal { namespace internal {
class NormalPageSpace;
class V8_EXPORT_PRIVATE Compactor final { class V8_EXPORT_PRIVATE Compactor final {
using CompactableSpaceHandling = SweepingConfig::CompactableSpaceHandling; using CompactableSpaceHandling = SweepingConfig::CompactableSpaceHandling;
...@@ -22,10 +24,8 @@ class V8_EXPORT_PRIVATE Compactor final { ...@@ -22,10 +24,8 @@ class V8_EXPORT_PRIVATE Compactor final {
Compactor(const Compactor&) = delete; Compactor(const Compactor&) = delete;
Compactor& operator=(const Compactor&) = delete; Compactor& operator=(const Compactor&) = delete;
void InitializeIfShouldCompact(GarbageCollector::Config::MarkingType, void InitializeIfShouldCompact(GCConfig::MarkingType, StackState);
GarbageCollector::Config::StackState); void CancelIfShouldNotCompact(GCConfig::MarkingType, StackState);
void CancelIfShouldNotCompact(GarbageCollector::Config::MarkingType,
GarbageCollector::Config::StackState);
// Returns whether spaces need to be processed by the Sweeper after // Returns whether spaces need to be processed by the Sweeper after
// compaction. // compaction.
CompactableSpaceHandling CompactSpacesIfEnabled(); CompactableSpaceHandling CompactSpacesIfEnabled();
...@@ -38,8 +38,7 @@ class V8_EXPORT_PRIVATE Compactor final { ...@@ -38,8 +38,7 @@ class V8_EXPORT_PRIVATE Compactor final {
bool IsEnabledForTesting() const { return is_enabled_; } bool IsEnabledForTesting() const { return is_enabled_; }
private: private:
bool ShouldCompact(GarbageCollector::Config::MarkingType, bool ShouldCompact(GCConfig::MarkingType, StackState) const;
GarbageCollector::Config::StackState) const;
RawHeap& heap_; RawHeap& heap_;
// Compactor does not own the compactable spaces. The heap owns all spaces. // Compactor does not own the compactable spaces. The heap owns all spaces.
......
...@@ -7,7 +7,6 @@ ...@@ -7,7 +7,6 @@
#include "include/cppgc/common.h" #include "include/cppgc/common.h"
#include "src/heap/cppgc/heap-config.h" #include "src/heap/cppgc/heap-config.h"
#include "src/heap/cppgc/marker.h"
namespace cppgc { namespace cppgc {
namespace internal { namespace internal {
...@@ -16,62 +15,9 @@ namespace internal { ...@@ -16,62 +15,9 @@ namespace internal {
// needed to mock/fake GC for testing. // needed to mock/fake GC for testing.
class GarbageCollector { class GarbageCollector {
public: public:
struct Config {
using CollectionType = Marker::MarkingConfig::CollectionType;
using StackState = cppgc::Heap::StackState;
using MarkingType = Marker::MarkingConfig::MarkingType;
using SweepingType = SweepingConfig::SweepingType;
using FreeMemoryHandling = SweepingConfig::FreeMemoryHandling;
using IsForcedGC = Marker::MarkingConfig::IsForcedGC;
static constexpr Config ConservativeAtomicConfig() {
return {CollectionType::kMajor, StackState::kMayContainHeapPointers,
MarkingType::kAtomic, SweepingType::kAtomic};
}
static constexpr Config PreciseAtomicConfig() {
return {CollectionType::kMajor, StackState::kNoHeapPointers,
MarkingType::kAtomic, SweepingType::kAtomic};
}
static constexpr Config ConservativeIncrementalConfig() {
return {CollectionType::kMajor, StackState::kMayContainHeapPointers,
MarkingType::kIncremental, SweepingType::kAtomic};
}
static constexpr Config PreciseIncrementalConfig() {
return {CollectionType::kMajor, StackState::kNoHeapPointers,
MarkingType::kIncremental, SweepingType::kAtomic};
}
static constexpr Config
PreciseIncrementalMarkingConcurrentSweepingConfig() {
return {CollectionType::kMajor, StackState::kNoHeapPointers,
MarkingType::kIncremental,
SweepingType::kIncrementalAndConcurrent};
}
static constexpr Config MinorPreciseAtomicConfig() {
return {CollectionType::kMinor, StackState::kNoHeapPointers,
MarkingType::kAtomic, SweepingType::kAtomic};
}
static constexpr Config MinorConservativeAtomicConfig() {
return {CollectionType::kMinor, StackState::kMayContainHeapPointers,
MarkingType::kAtomic, SweepingType::kAtomic};
}
CollectionType collection_type = CollectionType::kMajor;
StackState stack_state = StackState::kMayContainHeapPointers;
MarkingType marking_type = MarkingType::kAtomic;
SweepingType sweeping_type = SweepingType::kAtomic;
FreeMemoryHandling free_memory_handling = FreeMemoryHandling::kDoNotDiscard;
IsForcedGC is_forced_gc = IsForcedGC::kNotForced;
};
// Executes a garbage collection specified in config. // Executes a garbage collection specified in config.
virtual void CollectGarbage(Config) = 0; virtual void CollectGarbage(GCConfig) = 0;
virtual void StartIncrementalGarbageCollection(Config) = 0; virtual void StartIncrementalGarbageCollection(GCConfig) = 0;
// The current epoch that the GC maintains. The epoch is increased on every // The current epoch that the GC maintains. The epoch is increased on every
// GC invocation. // GC invocation.
......
...@@ -8,7 +8,6 @@ ...@@ -8,7 +8,6 @@
#include "include/cppgc/common.h" #include "include/cppgc/common.h"
#include "include/cppgc/platform.h" #include "include/cppgc/platform.h"
#include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/task-handle.h" #include "src/heap/cppgc/task-handle.h"
namespace cppgc { namespace cppgc {
...@@ -22,8 +21,8 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector { ...@@ -22,8 +21,8 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector {
GCInvokerImpl(const GCInvokerImpl&) = delete; GCInvokerImpl(const GCInvokerImpl&) = delete;
GCInvokerImpl& operator=(const GCInvokerImpl&) = delete; GCInvokerImpl& operator=(const GCInvokerImpl&) = delete;
void CollectGarbage(GarbageCollector::Config) final; void CollectGarbage(GCConfig) final;
void StartIncrementalGarbageCollection(GarbageCollector::Config) final; void StartIncrementalGarbageCollection(GCConfig) final;
size_t epoch() const final { return collector_->epoch(); } size_t epoch() const final { return collector_->epoch(); }
const EmbedderStackState* override_stack_state() const final { const EmbedderStackState* override_stack_state() const final {
return collector_->override_stack_state(); return collector_->override_stack_state();
...@@ -35,7 +34,7 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector { ...@@ -35,7 +34,7 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector {
using Handle = SingleThreadedHandle; using Handle = SingleThreadedHandle;
static Handle Post(GarbageCollector* collector, cppgc::TaskRunner* runner, static Handle Post(GarbageCollector* collector, cppgc::TaskRunner* runner,
GarbageCollector::Config config) { GCConfig config) {
auto task = auto task =
std::make_unique<GCInvoker::GCInvokerImpl::GCTask>(collector, config); std::make_unique<GCInvoker::GCInvokerImpl::GCTask>(collector, config);
auto handle = task->GetHandle(); auto handle = task->GetHandle();
...@@ -43,8 +42,7 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector { ...@@ -43,8 +42,7 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector {
return handle; return handle;
} }
explicit GCTask(GarbageCollector* collector, explicit GCTask(GarbageCollector* collector, GCConfig config)
GarbageCollector::Config config)
: collector_(collector), : collector_(collector),
config_(config), config_(config),
handle_(Handle::NonEmptyTag{}), handle_(Handle::NonEmptyTag{}),
...@@ -63,7 +61,7 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector { ...@@ -63,7 +61,7 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector {
Handle GetHandle() { return handle_; } Handle GetHandle() { return handle_; }
GarbageCollector* collector_; GarbageCollector* collector_;
GarbageCollector::Config config_; GCConfig config_;
Handle handle_; Handle handle_;
size_t saved_epoch_; size_t saved_epoch_;
}; };
...@@ -87,10 +85,9 @@ GCInvoker::GCInvokerImpl::~GCInvokerImpl() { ...@@ -87,10 +85,9 @@ GCInvoker::GCInvokerImpl::~GCInvokerImpl() {
} }
} }
void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) { void GCInvoker::GCInvokerImpl::CollectGarbage(GCConfig config) {
DCHECK_EQ(config.marking_type, cppgc::Heap::MarkingType::kAtomic); DCHECK_EQ(config.marking_type, cppgc::Heap::MarkingType::kAtomic);
if ((config.stack_state == if ((config.stack_state == StackState::kNoHeapPointers) ||
GarbageCollector::Config::StackState::kNoHeapPointers) ||
(stack_support_ == (stack_support_ ==
cppgc::Heap::StackSupport::kSupportsConservativeStackScan)) { cppgc::Heap::StackSupport::kSupportsConservativeStackScan)) {
collector_->CollectGarbage(config); collector_->CollectGarbage(config);
...@@ -98,8 +95,7 @@ void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) { ...@@ -98,8 +95,7 @@ void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) {
platform_->GetForegroundTaskRunner()->NonNestableTasksEnabled()) { platform_->GetForegroundTaskRunner()->NonNestableTasksEnabled()) {
if (!gc_task_handle_) { if (!gc_task_handle_) {
// Force a precise GC since it will run in a non-nestable task. // Force a precise GC since it will run in a non-nestable task.
config.stack_state = config.stack_state = StackState::kNoHeapPointers;
GarbageCollector::Config::StackState::kNoHeapPointers;
DCHECK_NE(cppgc::Heap::StackSupport::kSupportsConservativeStackScan, DCHECK_NE(cppgc::Heap::StackSupport::kSupportsConservativeStackScan,
stack_support_); stack_support_);
gc_task_handle_ = GCTask::Post( gc_task_handle_ = GCTask::Post(
...@@ -109,7 +105,7 @@ void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) { ...@@ -109,7 +105,7 @@ void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) {
} }
void GCInvoker::GCInvokerImpl::StartIncrementalGarbageCollection( void GCInvoker::GCInvokerImpl::StartIncrementalGarbageCollection(
GarbageCollector::Config config) { GCConfig config) {
DCHECK_NE(config.marking_type, cppgc::Heap::MarkingType::kAtomic); DCHECK_NE(config.marking_type, cppgc::Heap::MarkingType::kAtomic);
if ((stack_support_ != if ((stack_support_ !=
cppgc::Heap::StackSupport::kSupportsConservativeStackScan) && cppgc::Heap::StackSupport::kSupportsConservativeStackScan) &&
...@@ -134,12 +130,11 @@ GCInvoker::GCInvoker(GarbageCollector* collector, cppgc::Platform* platform, ...@@ -134,12 +130,11 @@ GCInvoker::GCInvoker(GarbageCollector* collector, cppgc::Platform* platform,
GCInvoker::~GCInvoker() = default; GCInvoker::~GCInvoker() = default;
void GCInvoker::CollectGarbage(GarbageCollector::Config config) { void GCInvoker::CollectGarbage(GCConfig config) {
impl_->CollectGarbage(config); impl_->CollectGarbage(config);
} }
void GCInvoker::StartIncrementalGarbageCollection( void GCInvoker::StartIncrementalGarbageCollection(GCConfig config) {
GarbageCollector::Config config) {
impl_->StartIncrementalGarbageCollection(config); impl_->StartIncrementalGarbageCollection(config);
} }
......
...@@ -34,8 +34,8 @@ class V8_EXPORT_PRIVATE GCInvoker final : public GarbageCollector { ...@@ -34,8 +34,8 @@ class V8_EXPORT_PRIVATE GCInvoker final : public GarbageCollector {
GCInvoker(const GCInvoker&) = delete; GCInvoker(const GCInvoker&) = delete;
GCInvoker& operator=(const GCInvoker&) = delete; GCInvoker& operator=(const GCInvoker&) = delete;
void CollectGarbage(GarbageCollector::Config) final; void CollectGarbage(GCConfig) final;
void StartIncrementalGarbageCollection(GarbageCollector::Config) final; void StartIncrementalGarbageCollection(GCConfig) final;
size_t epoch() const final; size_t epoch() const final;
const EmbedderStackState* override_stack_state() const final; const EmbedderStackState* override_stack_state() const final;
......
...@@ -250,10 +250,9 @@ void HeapBase::Terminate() { ...@@ -250,10 +250,9 @@ void HeapBase::Terminate() {
#endif // defined(CPPGC_YOUNG_GENERATION) #endif // defined(CPPGC_YOUNG_GENERATION)
in_atomic_pause_ = true; in_atomic_pause_ = true;
stats_collector()->NotifyMarkingStarted( stats_collector()->NotifyMarkingStarted(CollectionType::kMajor,
GarbageCollector::Config::CollectionType::kMajor, GCConfig::MarkingType::kAtomic,
GarbageCollector::Config::MarkingType::kAtomic, GCConfig::IsForcedGC::kForced);
GarbageCollector::Config::IsForcedGC::kForced);
object_allocator().ResetLinearAllocationBuffers(); object_allocator().ResetLinearAllocationBuffers();
stats_collector()->NotifyMarkingCompleted(0); stats_collector()->NotifyMarkingCompleted(0);
ExecutePreFinalizers(); ExecutePreFinalizers();
......
// Copyright 2020 the V8 project authors. All rights reserved. // Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. // found in the LICENSE file.
...@@ -9,6 +9,28 @@ ...@@ -9,6 +9,28 @@
namespace cppgc::internal { namespace cppgc::internal {
using StackState = cppgc::Heap::StackState;
enum class CollectionType : uint8_t {
kMinor,
kMajor,
};
struct MarkingConfig {
using MarkingType = cppgc::Heap::MarkingType;
enum class IsForcedGC : uint8_t {
kNotForced,
kForced,
};
static constexpr MarkingConfig Default() { return {}; }
const CollectionType collection_type = CollectionType::kMajor;
StackState stack_state = StackState::kMayContainHeapPointers;
MarkingType marking_type = MarkingType::kIncremental;
IsForcedGC is_forced_gc = IsForcedGC::kNotForced;
};
struct SweepingConfig { struct SweepingConfig {
using SweepingType = cppgc::Heap::SweepingType; using SweepingType = cppgc::Heap::SweepingType;
enum class CompactableSpaceHandling { kSweep, kIgnore }; enum class CompactableSpaceHandling { kSweep, kIgnore };
...@@ -20,6 +42,62 @@ struct SweepingConfig { ...@@ -20,6 +42,62 @@ struct SweepingConfig {
FreeMemoryHandling free_memory_handling = FreeMemoryHandling::kDoNotDiscard; FreeMemoryHandling free_memory_handling = FreeMemoryHandling::kDoNotDiscard;
}; };
struct GCConfig {
using MarkingType = MarkingConfig::MarkingType;
using SweepingType = SweepingConfig::SweepingType;
using FreeMemoryHandling = SweepingConfig::FreeMemoryHandling;
using IsForcedGC = MarkingConfig::IsForcedGC;
static constexpr GCConfig ConservativeAtomicConfig() {
return {CollectionType::kMajor, StackState::kMayContainHeapPointers,
MarkingType::kAtomic, SweepingType::kAtomic};
}
static constexpr GCConfig PreciseAtomicConfig() {
return {CollectionType::kMajor, StackState::kNoHeapPointers,
MarkingType::kAtomic, SweepingType::kAtomic};
}
static constexpr GCConfig ConservativeIncrementalConfig() {
return {CollectionType::kMajor, StackState::kMayContainHeapPointers,
MarkingType::kIncremental, SweepingType::kAtomic};
}
static constexpr GCConfig PreciseIncrementalConfig() {
return {CollectionType::kMajor, StackState::kNoHeapPointers,
MarkingType::kIncremental, SweepingType::kAtomic};
}
static constexpr GCConfig
PreciseIncrementalMarkingConcurrentSweepingConfig() {
return {CollectionType::kMajor, StackState::kNoHeapPointers,
MarkingType::kIncremental, SweepingType::kIncrementalAndConcurrent};
}
static constexpr GCConfig PreciseConcurrentConfig() {
return {CollectionType::kMajor, StackState::kNoHeapPointers,
MarkingType::kIncrementalAndConcurrent,
SweepingType::kIncrementalAndConcurrent};
}
static constexpr GCConfig MinorPreciseAtomicConfig() {
return {CollectionType::kMinor, StackState::kNoHeapPointers,
MarkingType::kAtomic, SweepingType::kAtomic};
}
static constexpr GCConfig MinorConservativeAtomicConfig() {
return {CollectionType::kMinor, StackState::kMayContainHeapPointers,
MarkingType::kAtomic, SweepingType::kAtomic};
}
CollectionType collection_type = CollectionType::kMajor;
StackState stack_state = StackState::kMayContainHeapPointers;
MarkingType marking_type = MarkingType::kAtomic;
SweepingType sweeping_type = SweepingType::kAtomic;
FreeMemoryHandling free_memory_handling = FreeMemoryHandling::kDoNotDiscard;
IsForcedGC is_forced_gc = IsForcedGC::kNotForced;
};
} // namespace cppgc::internal } // namespace cppgc::internal
#endif // V8_HEAP_CPPGC_HEAP_CONFIG_H_ #endif // V8_HEAP_CPPGC_HEAP_CONFIG_H_
...@@ -93,14 +93,12 @@ void HeapGrowing::HeapGrowingImpl::AllocatedObjectSizeIncreased(size_t) { ...@@ -93,14 +93,12 @@ void HeapGrowing::HeapGrowingImpl::AllocatedObjectSizeIncreased(size_t) {
size_t allocated_object_size = stats_collector_->allocated_object_size(); size_t allocated_object_size = stats_collector_->allocated_object_size();
if (allocated_object_size > limit_for_atomic_gc_) { if (allocated_object_size > limit_for_atomic_gc_) {
collector_->CollectGarbage( collector_->CollectGarbage(
{GarbageCollector::Config::CollectionType::kMajor, {CollectionType::kMajor, StackState::kMayContainHeapPointers,
GarbageCollector::Config::StackState::kMayContainHeapPointers, GCConfig::MarkingType::kAtomic, sweeping_support_});
GarbageCollector::Config::MarkingType::kAtomic, sweeping_support_});
} else if (allocated_object_size > limit_for_incremental_gc_) { } else if (allocated_object_size > limit_for_incremental_gc_) {
if (marking_support_ == cppgc::Heap::MarkingType::kAtomic) return; if (marking_support_ == cppgc::Heap::MarkingType::kAtomic) return;
collector_->StartIncrementalGarbageCollection( collector_->StartIncrementalGarbageCollection(
{GarbageCollector::Config::CollectionType::kMajor, {CollectionType::kMajor, StackState::kMayContainHeapPointers,
GarbageCollector::Config::StackState::kMayContainHeapPointers,
marking_support_, sweeping_support_}); marking_support_, sweeping_support_});
} }
} }
......
...@@ -45,11 +45,10 @@ std::unique_ptr<Heap> Heap::Create(std::shared_ptr<cppgc::Platform> platform, ...@@ -45,11 +45,10 @@ std::unique_ptr<Heap> Heap::Create(std::shared_ptr<cppgc::Platform> platform,
void Heap::ForceGarbageCollectionSlow(const char* source, const char* reason, void Heap::ForceGarbageCollectionSlow(const char* source, const char* reason,
Heap::StackState stack_state) { Heap::StackState stack_state) {
internal::Heap::From(this)->CollectGarbage( internal::Heap::From(this)->CollectGarbage(
{internal::GarbageCollector::Config::CollectionType::kMajor, stack_state, {internal::CollectionType::kMajor, stack_state, MarkingType::kAtomic,
MarkingType::kAtomic, SweepingType::kAtomic, SweepingType::kAtomic,
internal::GarbageCollector::Config::FreeMemoryHandling:: internal::GCConfig::FreeMemoryHandling::kDiscardWherePossible,
kDiscardWherePossible, internal::GCConfig::IsForcedGC::kForced});
internal::GarbageCollector::Config::IsForcedGC::kForced});
} }
AllocationHandle& Heap::GetAllocationHandle() { AllocationHandle& Heap::GetAllocationHandle() {
...@@ -62,11 +61,10 @@ namespace internal { ...@@ -62,11 +61,10 @@ namespace internal {
namespace { namespace {
void CheckConfig(Heap::Config config, HeapBase::MarkingType marking_support, void CheckConfig(GCConfig config, HeapBase::MarkingType marking_support,
HeapBase::SweepingType sweeping_support) { HeapBase::SweepingType sweeping_support) {
CHECK_WITH_MSG( CHECK_WITH_MSG((config.collection_type != CollectionType::kMinor) ||
(config.collection_type != Heap::Config::CollectionType::kMinor) || (config.stack_state == StackState::kNoHeapPointers),
(config.stack_state == Heap::Config::StackState::kNoHeapPointers),
"Minor GCs with stack is currently not supported"); "Minor GCs with stack is currently not supported");
CHECK_LE(static_cast<int>(config.marking_type), CHECK_LE(static_cast<int>(config.marking_type),
static_cast<int>(marking_support)); static_cast<int>(marking_support));
...@@ -94,17 +92,16 @@ Heap::~Heap() { ...@@ -94,17 +92,16 @@ Heap::~Heap() {
// Gracefully finish already running GC if any, but don't finalize live // Gracefully finish already running GC if any, but don't finalize live
// objects. // objects.
FinalizeIncrementalGarbageCollectionIfRunning( FinalizeIncrementalGarbageCollectionIfRunning(
{Config::CollectionType::kMajor, {CollectionType::kMajor, StackState::kMayContainHeapPointers,
Config::StackState::kMayContainHeapPointers, GCConfig::MarkingType::kAtomic, GCConfig::SweepingType::kAtomic});
Config::MarkingType::kAtomic, Config::SweepingType::kAtomic});
{ {
subtle::NoGarbageCollectionScope no_gc(*this); subtle::NoGarbageCollectionScope no_gc(*this);
sweeper_.FinishIfRunning(); sweeper_.FinishIfRunning();
} }
} }
void Heap::CollectGarbage(Config config) { void Heap::CollectGarbage(GCConfig config) {
DCHECK_EQ(Config::MarkingType::kAtomic, config.marking_type); DCHECK_EQ(GCConfig::MarkingType::kAtomic, config.marking_type);
CheckConfig(config, marking_support_, sweeping_support_); CheckConfig(config, marking_support_, sweeping_support_);
if (in_no_gc_scope()) return; if (in_no_gc_scope()) return;
...@@ -118,9 +115,9 @@ void Heap::CollectGarbage(Config config) { ...@@ -118,9 +115,9 @@ void Heap::CollectGarbage(Config config) {
FinalizeGarbageCollection(config.stack_state); FinalizeGarbageCollection(config.stack_state);
} }
void Heap::StartIncrementalGarbageCollection(Config config) { void Heap::StartIncrementalGarbageCollection(GCConfig config) {
DCHECK_NE(Config::MarkingType::kAtomic, config.marking_type); DCHECK_NE(GCConfig::MarkingType::kAtomic, config.marking_type);
DCHECK_NE(marking_support_, Config::MarkingType::kAtomic); DCHECK_NE(marking_support_, GCConfig::MarkingType::kAtomic);
CheckConfig(config, marking_support_, sweeping_support_); CheckConfig(config, marking_support_, sweeping_support_);
if (IsMarking() || in_no_gc_scope()) return; if (IsMarking() || in_no_gc_scope()) return;
...@@ -130,19 +127,19 @@ void Heap::StartIncrementalGarbageCollection(Config config) { ...@@ -130,19 +127,19 @@ void Heap::StartIncrementalGarbageCollection(Config config) {
StartGarbageCollection(config); StartGarbageCollection(config);
} }
void Heap::FinalizeIncrementalGarbageCollectionIfRunning(Config config) { void Heap::FinalizeIncrementalGarbageCollectionIfRunning(GCConfig config) {
CheckConfig(config, marking_support_, sweeping_support_); CheckConfig(config, marking_support_, sweeping_support_);
if (!IsMarking()) return; if (!IsMarking()) return;
DCHECK(!in_no_gc_scope()); DCHECK(!in_no_gc_scope());
DCHECK_NE(Config::MarkingType::kAtomic, config_.marking_type); DCHECK_NE(GCConfig::MarkingType::kAtomic, config_.marking_type);
config_ = config; config_ = config;
FinalizeGarbageCollection(config.stack_state); FinalizeGarbageCollection(config.stack_state);
} }
void Heap::StartGarbageCollection(Config config) { void Heap::StartGarbageCollection(GCConfig config) {
DCHECK(!IsMarking()); DCHECK(!IsMarking());
DCHECK(!in_no_gc_scope()); DCHECK(!in_no_gc_scope());
...@@ -152,18 +149,17 @@ void Heap::StartGarbageCollection(Config config) { ...@@ -152,18 +149,17 @@ void Heap::StartGarbageCollection(Config config) {
epoch_++; epoch_++;
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
if (config.collection_type == Config::CollectionType::kMajor) if (config.collection_type == CollectionType::kMajor)
SequentialUnmarker unmarker(raw_heap()); SequentialUnmarker unmarker(raw_heap());
#endif // defined(CPPGC_YOUNG_GENERATION) #endif // defined(CPPGC_YOUNG_GENERATION)
const Marker::MarkingConfig marking_config{ const MarkingConfig marking_config{config.collection_type, config.stack_state,
config.collection_type, config.stack_state, config.marking_type, config.marking_type, config.is_forced_gc};
config.is_forced_gc};
marker_ = std::make_unique<Marker>(AsBase(), platform_.get(), marking_config); marker_ = std::make_unique<Marker>(AsBase(), platform_.get(), marking_config);
marker_->StartMarking(); marker_->StartMarking();
} }
void Heap::FinalizeGarbageCollection(Config::StackState stack_state) { void Heap::FinalizeGarbageCollection(StackState stack_state) {
DCHECK(IsMarking()); DCHECK(IsMarking());
DCHECK(!in_no_gc_scope()); DCHECK(!in_no_gc_scope());
CHECK(!in_disallow_gc_scope()); CHECK(!in_disallow_gc_scope());
...@@ -220,7 +216,7 @@ void Heap::EnableGenerationalGC() { ...@@ -220,7 +216,7 @@ void Heap::EnableGenerationalGC() {
void Heap::DisableHeapGrowingForTesting() { growing_.DisableForTesting(); } void Heap::DisableHeapGrowingForTesting() { growing_.DisableForTesting(); }
void Heap::FinalizeIncrementalGarbageCollectionIfNeeded( void Heap::FinalizeIncrementalGarbageCollectionIfNeeded(
Config::StackState stack_state) { StackState stack_state) {
StatsCollector::EnabledScope stats_scope( StatsCollector::EnabledScope stats_scope(
stats_collector(), StatsCollector::kMarkIncrementalFinalize); stats_collector(), StatsCollector::kMarkIncrementalFinalize);
FinalizeGarbageCollection(stack_state); FinalizeGarbageCollection(stack_state);
...@@ -229,10 +225,9 @@ void Heap::FinalizeIncrementalGarbageCollectionIfNeeded( ...@@ -229,10 +225,9 @@ void Heap::FinalizeIncrementalGarbageCollectionIfNeeded(
void Heap::StartIncrementalGarbageCollectionForTesting() { void Heap::StartIncrementalGarbageCollectionForTesting() {
DCHECK(!IsMarking()); DCHECK(!IsMarking());
DCHECK(!in_no_gc_scope()); DCHECK(!in_no_gc_scope());
StartGarbageCollection({Config::CollectionType::kMajor, StartGarbageCollection({CollectionType::kMajor, StackState::kNoHeapPointers,
Config::StackState::kNoHeapPointers, GCConfig::MarkingType::kIncrementalAndConcurrent,
Config::MarkingType::kIncrementalAndConcurrent, GCConfig::SweepingType::kIncrementalAndConcurrent});
Config::SweepingType::kIncrementalAndConcurrent});
} }
void Heap::FinalizeIncrementalGarbageCollectionForTesting( void Heap::FinalizeIncrementalGarbageCollectionForTesting(
......
...@@ -32,9 +32,9 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase, ...@@ -32,9 +32,9 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase,
HeapBase& AsBase() { return *this; } HeapBase& AsBase() { return *this; }
const HeapBase& AsBase() const { return *this; } const HeapBase& AsBase() const { return *this; }
void CollectGarbage(Config) final; void CollectGarbage(GCConfig) final;
void StartIncrementalGarbageCollection(Config) final; void StartIncrementalGarbageCollection(GCConfig) final;
void FinalizeIncrementalGarbageCollectionIfRunning(Config); void FinalizeIncrementalGarbageCollectionIfRunning(GCConfig);
size_t epoch() const final { return epoch_; } size_t epoch() const final { return epoch_; }
const EmbedderStackState* override_stack_state() const final { const EmbedderStackState* override_stack_state() const final {
...@@ -46,15 +46,15 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase, ...@@ -46,15 +46,15 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase,
void DisableHeapGrowingForTesting(); void DisableHeapGrowingForTesting();
private: private:
void StartGarbageCollection(Config); void StartGarbageCollection(GCConfig);
void FinalizeGarbageCollection(Config::StackState); void FinalizeGarbageCollection(StackState);
void FinalizeIncrementalGarbageCollectionIfNeeded(Config::StackState) final; void FinalizeIncrementalGarbageCollectionIfNeeded(StackState) final;
void StartIncrementalGarbageCollectionForTesting() final; void StartIncrementalGarbageCollectionForTesting() final;
void FinalizeIncrementalGarbageCollectionForTesting(EmbedderStackState) final; void FinalizeIncrementalGarbageCollectionForTesting(EmbedderStackState) final;
Config config_; GCConfig config_;
GCInvoker gc_invoker_; GCInvoker gc_invoker_;
HeapGrowing growing_; HeapGrowing growing_;
bool generational_gc_enabled_ = false; bool generational_gc_enabled_ = false;
......
...@@ -32,11 +32,10 @@ namespace internal { ...@@ -32,11 +32,10 @@ namespace internal {
namespace { namespace {
bool EnterIncrementalMarkingIfNeeded(Marker::MarkingConfig config, bool EnterIncrementalMarkingIfNeeded(MarkingConfig config, HeapBase& heap) {
HeapBase& heap) { if (config.marking_type == MarkingConfig::MarkingType::kIncremental ||
if (config.marking_type == Marker::MarkingConfig::MarkingType::kIncremental ||
config.marking_type == config.marking_type ==
Marker::MarkingConfig::MarkingType::kIncrementalAndConcurrent) { MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
WriteBarrier::FlagUpdater::Enter(); WriteBarrier::FlagUpdater::Enter();
heap.set_incremental_marking_in_progress(true); heap.set_incremental_marking_in_progress(true);
return true; return true;
...@@ -44,11 +43,10 @@ bool EnterIncrementalMarkingIfNeeded(Marker::MarkingConfig config, ...@@ -44,11 +43,10 @@ bool EnterIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
return false; return false;
} }
bool ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config, bool ExitIncrementalMarkingIfNeeded(MarkingConfig config, HeapBase& heap) {
HeapBase& heap) { if (config.marking_type == MarkingConfig::MarkingType::kIncremental ||
if (config.marking_type == Marker::MarkingConfig::MarkingType::kIncremental ||
config.marking_type == config.marking_type ==
Marker::MarkingConfig::MarkingType::kIncrementalAndConcurrent) { MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
WriteBarrier::FlagUpdater::Exit(); WriteBarrier::FlagUpdater::Exit();
heap.set_incremental_marking_in_progress(false); heap.set_incremental_marking_in_progress(false);
return true; return true;
...@@ -87,7 +85,7 @@ class MarkerBase::IncrementalMarkingTask final : public cppgc::Task { ...@@ -87,7 +85,7 @@ class MarkerBase::IncrementalMarkingTask final : public cppgc::Task {
public: public:
using Handle = SingleThreadedHandle; using Handle = SingleThreadedHandle;
IncrementalMarkingTask(MarkerBase*, MarkingConfig::StackState); IncrementalMarkingTask(MarkerBase*, StackState);
static Handle Post(cppgc::TaskRunner*, MarkerBase*); static Handle Post(cppgc::TaskRunner*, MarkerBase*);
...@@ -95,13 +93,13 @@ class MarkerBase::IncrementalMarkingTask final : public cppgc::Task { ...@@ -95,13 +93,13 @@ class MarkerBase::IncrementalMarkingTask final : public cppgc::Task {
void Run() final; void Run() final;
MarkerBase* const marker_; MarkerBase* const marker_;
MarkingConfig::StackState stack_state_; StackState stack_state_;
// TODO(chromium:1056170): Change to CancelableTask. // TODO(chromium:1056170): Change to CancelableTask.
Handle handle_; Handle handle_;
}; };
MarkerBase::IncrementalMarkingTask::IncrementalMarkingTask( MarkerBase::IncrementalMarkingTask::IncrementalMarkingTask(
MarkerBase* marker, MarkingConfig::StackState stack_state) MarkerBase* marker, StackState stack_state)
: marker_(marker), : marker_(marker),
stack_state_(stack_state), stack_state_(stack_state),
handle_(Handle::NonEmptyTag{}) {} handle_(Handle::NonEmptyTag{}) {}
...@@ -117,10 +115,9 @@ MarkerBase::IncrementalMarkingTask::Post(cppgc::TaskRunner* runner, ...@@ -117,10 +115,9 @@ MarkerBase::IncrementalMarkingTask::Post(cppgc::TaskRunner* runner,
DCHECK_IMPLIES(marker->heap().stack_support() != DCHECK_IMPLIES(marker->heap().stack_support() !=
HeapBase::StackSupport::kSupportsConservativeStackScan, HeapBase::StackSupport::kSupportsConservativeStackScan,
runner->NonNestableTasksEnabled()); runner->NonNestableTasksEnabled());
MarkingConfig::StackState stack_state_for_task = const auto stack_state_for_task = runner->NonNestableTasksEnabled()
runner->NonNestableTasksEnabled() ? StackState::kNoHeapPointers
? MarkingConfig::StackState::kNoHeapPointers : StackState::kMayContainHeapPointers;
: MarkingConfig::StackState::kMayContainHeapPointers;
auto task = auto task =
std::make_unique<IncrementalMarkingTask>(marker, stack_state_for_task); std::make_unique<IncrementalMarkingTask>(marker, stack_state_for_task);
auto handle = task->handle_; auto handle = task->handle_;
...@@ -152,8 +149,7 @@ MarkerBase::MarkerBase(HeapBase& heap, cppgc::Platform* platform, ...@@ -152,8 +149,7 @@ MarkerBase::MarkerBase(HeapBase& heap, cppgc::Platform* platform,
foreground_task_runner_(platform_->GetForegroundTaskRunner()), foreground_task_runner_(platform_->GetForegroundTaskRunner()),
mutator_marking_state_(heap, marking_worklists_, mutator_marking_state_(heap, marking_worklists_,
heap.compactor().compaction_worklists()) { heap.compactor().compaction_worklists()) {
DCHECK_IMPLIES( DCHECK_IMPLIES(config_.collection_type == CollectionType::kMinor,
config_.collection_type == MarkingConfig::CollectionType::kMinor,
heap_.generational_gc_supported()); heap_.generational_gc_supported());
} }
...@@ -163,7 +159,7 @@ MarkerBase::~MarkerBase() { ...@@ -163,7 +159,7 @@ MarkerBase::~MarkerBase() {
// and should thus already be marked. // and should thus already be marked.
if (!marking_worklists_.not_fully_constructed_worklist()->IsEmpty()) { if (!marking_worklists_.not_fully_constructed_worklist()->IsEmpty()) {
#if DEBUG #if DEBUG
DCHECK_NE(MarkingConfig::StackState::kNoHeapPointers, config_.stack_state); DCHECK_NE(StackState::kNoHeapPointers, config_.stack_state);
std::unordered_set<HeapObjectHeader*> objects = std::unordered_set<HeapObjectHeader*> objects =
mutator_marking_state_.not_fully_constructed_worklist().Extract(); mutator_marking_state_.not_fully_constructed_worklist().Extract();
for (HeapObjectHeader* object : objects) DCHECK(object->IsMarked()); for (HeapObjectHeader* object : objects) DCHECK(object->IsMarked());
...@@ -229,7 +225,7 @@ void MarkerBase::StartMarking() { ...@@ -229,7 +225,7 @@ void MarkerBase::StartMarking() {
// Performing incremental or concurrent marking. // Performing incremental or concurrent marking.
schedule_.NotifyIncrementalMarkingStart(); schedule_.NotifyIncrementalMarkingStart();
// Scanning the stack is expensive so we only do it at the atomic pause. // Scanning the stack is expensive so we only do it at the atomic pause.
VisitRoots(MarkingConfig::StackState::kNoHeapPointers); VisitRoots(StackState::kNoHeapPointers);
ScheduleIncrementalMarkingTask(); ScheduleIncrementalMarkingTask();
if (config_.marking_type == if (config_.marking_type ==
MarkingConfig::MarkingType::kIncrementalAndConcurrent) { MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
...@@ -244,14 +240,14 @@ void MarkerBase::StartMarking() { ...@@ -244,14 +240,14 @@ void MarkerBase::StartMarking() {
} }
void MarkerBase::HandleNotFullyConstructedObjects() { void MarkerBase::HandleNotFullyConstructedObjects() {
if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) { if (config_.stack_state == StackState::kNoHeapPointers) {
mutator_marking_state_.FlushNotFullyConstructedObjects(); mutator_marking_state_.FlushNotFullyConstructedObjects();
} else { } else {
MarkNotFullyConstructedObjects(); MarkNotFullyConstructedObjects();
} }
} }
void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) { void MarkerBase::EnterAtomicPause(StackState stack_state) {
StatsCollector::EnabledScope top_stats_scope(heap().stats_collector(), StatsCollector::EnabledScope top_stats_scope(heap().stats_collector(),
StatsCollector::kAtomicMark); StatsCollector::kAtomicMark);
StatsCollector::EnabledScope stats_scope(heap().stats_collector(), StatsCollector::EnabledScope stats_scope(heap().stats_collector(),
...@@ -310,7 +306,7 @@ void MarkerBase::LeaveAtomicPause() { ...@@ -310,7 +306,7 @@ void MarkerBase::LeaveAtomicPause() {
heap().SetStackStateOfPrevGC(config_.stack_state); heap().SetStackStateOfPrevGC(config_.stack_state);
} }
void MarkerBase::FinishMarking(MarkingConfig::StackState stack_state) { void MarkerBase::FinishMarking(StackState stack_state) {
DCHECK(is_marking_); DCHECK(is_marking_);
EnterAtomicPause(stack_state); EnterAtomicPause(stack_state);
{ {
...@@ -383,7 +379,7 @@ void MarkerBase::ProcessWeakness() { ...@@ -383,7 +379,7 @@ void MarkerBase::ProcessWeakness() {
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
if (heap().generational_gc_supported()) { if (heap().generational_gc_supported()) {
auto& remembered_set = heap().remembered_set(); auto& remembered_set = heap().remembered_set();
if (config_.collection_type == MarkingConfig::CollectionType::kMinor) { if (config_.collection_type == CollectionType::kMinor) {
// Custom callbacks assume that untraced pointers point to not yet freed // Custom callbacks assume that untraced pointers point to not yet freed
// objects. They must make sure that upon callback completion no // objects. They must make sure that upon callback completion no
// UntracedMember points to a freed object. This may not hold true if a // UntracedMember points to a freed object. This may not hold true if a
...@@ -425,7 +421,7 @@ void MarkerBase::ProcessWeakness() { ...@@ -425,7 +421,7 @@ void MarkerBase::ProcessWeakness() {
DCHECK(marking_worklists_.marking_worklist()->IsEmpty()); DCHECK(marking_worklists_.marking_worklist()->IsEmpty());
} }
void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) { void MarkerBase::VisitRoots(StackState stack_state) {
StatsCollector::EnabledScope stats_scope(heap().stats_collector(), StatsCollector::EnabledScope stats_scope(heap().stats_collector(),
StatsCollector::kMarkVisitRoots); StatsCollector::kMarkVisitRoots);
...@@ -442,13 +438,13 @@ void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) { ...@@ -442,13 +438,13 @@ void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) {
} }
} }
if (stack_state != MarkingConfig::StackState::kNoHeapPointers) { if (stack_state != StackState::kNoHeapPointers) {
StatsCollector::DisabledScope stack_stats_scope( StatsCollector::DisabledScope stack_stats_scope(
heap().stats_collector(), StatsCollector::kMarkVisitStack); heap().stats_collector(), StatsCollector::kMarkVisitStack);
heap().stack()->IteratePointers(&stack_visitor()); heap().stack()->IteratePointers(&stack_visitor());
} }
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
if (config_.collection_type == MarkingConfig::CollectionType::kMinor) { if (config_.collection_type == CollectionType::kMinor) {
StatsCollector::EnabledScope stats_scope( StatsCollector::EnabledScope stats_scope(
heap().stats_collector(), StatsCollector::kMarkVisitRememberedSets); heap().stats_collector(), StatsCollector::kMarkVisitRememberedSets);
heap().remembered_set().Visit(visitor(), mutator_marking_state_); heap().remembered_set().Visit(visitor(), mutator_marking_state_);
...@@ -482,13 +478,12 @@ void MarkerBase::ScheduleIncrementalMarkingTask() { ...@@ -482,13 +478,12 @@ void MarkerBase::ScheduleIncrementalMarkingTask() {
IncrementalMarkingTask::Post(foreground_task_runner_.get(), this); IncrementalMarkingTask::Post(foreground_task_runner_.get(), this);
} }
bool MarkerBase::IncrementalMarkingStepForTesting( bool MarkerBase::IncrementalMarkingStepForTesting(StackState stack_state) {
MarkingConfig::StackState stack_state) {
return IncrementalMarkingStep(stack_state); return IncrementalMarkingStep(stack_state);
} }
bool MarkerBase::IncrementalMarkingStep(MarkingConfig::StackState stack_state) { bool MarkerBase::IncrementalMarkingStep(StackState stack_state) {
if (stack_state == MarkingConfig::StackState::kNoHeapPointers) { if (stack_state == StackState::kNoHeapPointers) {
mutator_marking_state_.FlushNotFullyConstructedObjects(); mutator_marking_state_.FlushNotFullyConstructedObjects();
} }
config_.stack_state = stack_state; config_.stack_state = stack_state;
......
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
#include "src/heap/base/worklist.h" #include "src/heap/base/worklist.h"
#include "src/heap/cppgc/concurrent-marker.h" #include "src/heap/cppgc/concurrent-marker.h"
#include "src/heap/cppgc/globals.h" #include "src/heap/cppgc/globals.h"
#include "src/heap/cppgc/heap-config.h"
#include "src/heap/cppgc/incremental-marking-schedule.h" #include "src/heap/cppgc/incremental-marking-schedule.h"
#include "src/heap/cppgc/marking-state.h" #include "src/heap/cppgc/marking-state.h"
#include "src/heap/cppgc/marking-visitor.h" #include "src/heap/cppgc/marking-visitor.h"
...@@ -39,26 +40,6 @@ class V8_EXPORT_PRIVATE MarkerBase { ...@@ -39,26 +40,6 @@ class V8_EXPORT_PRIVATE MarkerBase {
public: public:
class IncrementalMarkingTask; class IncrementalMarkingTask;
struct MarkingConfig {
enum class CollectionType : uint8_t {
kMinor,
kMajor,
};
using StackState = cppgc::Heap::StackState;
using MarkingType = cppgc::Heap::MarkingType;
enum class IsForcedGC : uint8_t {
kNotForced,
kForced,
};
static constexpr MarkingConfig Default() { return {}; }
const CollectionType collection_type = CollectionType::kMajor;
StackState stack_state = StackState::kMayContainHeapPointers;
MarkingType marking_type = MarkingType::kIncremental;
IsForcedGC is_forced_gc = IsForcedGC::kNotForced;
};
enum class WriteBarrierType { enum class WriteBarrierType {
kDijkstra, kDijkstra,
kSteele, kSteele,
...@@ -89,7 +70,7 @@ class V8_EXPORT_PRIVATE MarkerBase { ...@@ -89,7 +70,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
// - stops incremental/concurrent marking; // - stops incremental/concurrent marking;
// - flushes back any in-construction worklists if needed; // - flushes back any in-construction worklists if needed;
// - Updates the MarkingConfig if the stack state has changed; // - Updates the MarkingConfig if the stack state has changed;
void EnterAtomicPause(MarkingConfig::StackState); void EnterAtomicPause(StackState);
// Makes marking progress. A `marked_bytes_limit` of 0 means that the limit // Makes marking progress. A `marked_bytes_limit` of 0 means that the limit
// is determined by the internal marking scheduler. // is determined by the internal marking scheduler.
...@@ -113,7 +94,7 @@ class V8_EXPORT_PRIVATE MarkerBase { ...@@ -113,7 +94,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
// - AdvanceMarkingWithLimits() // - AdvanceMarkingWithLimits()
// - ProcessWeakness() // - ProcessWeakness()
// - LeaveAtomicPause() // - LeaveAtomicPause()
void FinishMarking(MarkingConfig::StackState); void FinishMarking(StackState);
void ProcessWeakness(); void ProcessWeakness();
...@@ -134,7 +115,7 @@ class V8_EXPORT_PRIVATE MarkerBase { ...@@ -134,7 +115,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
void SetMainThreadMarkingDisabledForTesting(bool); void SetMainThreadMarkingDisabledForTesting(bool);
void WaitForConcurrentMarkingForTesting(); void WaitForConcurrentMarkingForTesting();
void ClearAllWorklistsForTesting(); void ClearAllWorklistsForTesting();
bool IncrementalMarkingStepForTesting(MarkingConfig::StackState); bool IncrementalMarkingStepForTesting(StackState);
MarkingWorklists& MarkingWorklistsForTesting() { return marking_worklists_; } MarkingWorklists& MarkingWorklistsForTesting() { return marking_worklists_; }
MutatorMarkingState& MutatorMarkingStateForTesting() { MutatorMarkingState& MutatorMarkingStateForTesting() {
...@@ -157,7 +138,7 @@ class V8_EXPORT_PRIVATE MarkerBase { ...@@ -157,7 +138,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
bool ProcessWorklistsWithDeadline(size_t, v8::base::TimeTicks); bool ProcessWorklistsWithDeadline(size_t, v8::base::TimeTicks);
void VisitRoots(MarkingConfig::StackState); void VisitRoots(StackState);
bool VisitCrossThreadPersistentsIfNeeded(); bool VisitCrossThreadPersistentsIfNeeded();
...@@ -165,7 +146,7 @@ class V8_EXPORT_PRIVATE MarkerBase { ...@@ -165,7 +146,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
void ScheduleIncrementalMarkingTask(); void ScheduleIncrementalMarkingTask();
bool IncrementalMarkingStep(MarkingConfig::StackState); bool IncrementalMarkingStep(StackState);
void AdvanceMarkingOnAllocation(); void AdvanceMarkingOnAllocation();
......
...@@ -36,7 +36,7 @@ void VerificationState::VerifyMarked(const void* base_object_payload) const { ...@@ -36,7 +36,7 @@ void VerificationState::VerifyMarked(const void* base_object_payload) const {
} }
MarkingVerifierBase::MarkingVerifierBase( MarkingVerifierBase::MarkingVerifierBase(
HeapBase& heap, Heap::Config::CollectionType collection_type, HeapBase& heap, CollectionType collection_type,
VerificationState& verification_state, VerificationState& verification_state,
std::unique_ptr<cppgc::Visitor> visitor) std::unique_ptr<cppgc::Visitor> visitor)
: ConservativeTracingVisitor(heap, *heap.page_backend(), *visitor.get()), : ConservativeTracingVisitor(heap, *heap.page_backend(), *visitor.get()),
...@@ -45,7 +45,7 @@ MarkingVerifierBase::MarkingVerifierBase( ...@@ -45,7 +45,7 @@ MarkingVerifierBase::MarkingVerifierBase(
collection_type_(collection_type) {} collection_type_(collection_type) {}
void MarkingVerifierBase::Run( void MarkingVerifierBase::Run(
Heap::Config::StackState stack_state, uintptr_t stack_end, StackState stack_state, uintptr_t stack_end,
v8::base::Optional<size_t> expected_marked_bytes) { v8::base::Optional<size_t> expected_marked_bytes) {
Traverse(heap_.raw_heap()); Traverse(heap_.raw_heap());
// Avoid verifying the stack when running with TSAN as the TSAN runtime changes // Avoid verifying the stack when running with TSAN as the TSAN runtime changes
...@@ -61,7 +61,7 @@ void MarkingVerifierBase::Run( ...@@ -61,7 +61,7 @@ void MarkingVerifierBase::Run(
// TODO(chromium:1325007): Investigate if Oilpan verification can be moved // TODO(chromium:1325007): Investigate if Oilpan verification can be moved
// before V8 compaction or compaction never runs with stack. // before V8 compaction or compaction never runs with stack.
#if !defined(THREAD_SANITIZER) && !defined(CPPGC_POINTER_COMPRESSION) #if !defined(THREAD_SANITIZER) && !defined(CPPGC_POINTER_COMPRESSION)
if (stack_state == Heap::Config::StackState::kMayContainHeapPointers) { if (stack_state == StackState::kMayContainHeapPointers) {
in_construction_objects_ = &in_construction_objects_stack_; in_construction_objects_ = &in_construction_objects_stack_;
heap_.stack()->IteratePointersUnsafe(this, stack_end); heap_.stack()->IteratePointersUnsafe(this, stack_end);
// The objects found through the unsafe iteration are only a subset of the // The objects found through the unsafe iteration are only a subset of the
...@@ -114,7 +114,7 @@ bool MarkingVerifierBase::VisitHeapObjectHeader(HeapObjectHeader& header) { ...@@ -114,7 +114,7 @@ bool MarkingVerifierBase::VisitHeapObjectHeader(HeapObjectHeader& header) {
DCHECK(!header.IsFree()); DCHECK(!header.IsFree());
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
if (collection_type_ == Heap::Config::CollectionType::kMinor) { if (collection_type_ == CollectionType::kMinor) {
auto& caged_heap = CagedHeap::Instance(); auto& caged_heap = CagedHeap::Instance();
const auto age = CagedHeapLocalData::Get().age_table.GetAge( const auto age = CagedHeapLocalData::Get().age_table.GetAge(
caged_heap.OffsetFromAddress(header.ObjectStart())); caged_heap.OffsetFromAddress(header.ObjectStart()));
...@@ -185,7 +185,7 @@ class VerificationVisitor final : public cppgc::Visitor { ...@@ -185,7 +185,7 @@ class VerificationVisitor final : public cppgc::Visitor {
} // namespace } // namespace
MarkingVerifier::MarkingVerifier(HeapBase& heap_base, MarkingVerifier::MarkingVerifier(HeapBase& heap_base,
Heap::Config::CollectionType collection_type) CollectionType collection_type)
: MarkingVerifierBase(heap_base, collection_type, state_, : MarkingVerifierBase(heap_base, collection_type, state_,
std::make_unique<VerificationVisitor>(state_)) {} std::make_unique<VerificationVisitor>(state_)) {}
......
...@@ -41,11 +41,11 @@ class V8_EXPORT_PRIVATE MarkingVerifierBase ...@@ -41,11 +41,11 @@ class V8_EXPORT_PRIVATE MarkingVerifierBase
MarkingVerifierBase(const MarkingVerifierBase&) = delete; MarkingVerifierBase(const MarkingVerifierBase&) = delete;
MarkingVerifierBase& operator=(const MarkingVerifierBase&) = delete; MarkingVerifierBase& operator=(const MarkingVerifierBase&) = delete;
void Run(Heap::Config::StackState, uintptr_t, v8::base::Optional<size_t>); void Run(StackState, uintptr_t, v8::base::Optional<size_t>);
protected: protected:
MarkingVerifierBase(HeapBase&, Heap::Config::CollectionType, MarkingVerifierBase(HeapBase&, CollectionType, VerificationState&,
VerificationState&, std::unique_ptr<cppgc::Visitor>); std::unique_ptr<cppgc::Visitor>);
private: private:
void VisitInConstructionConservatively(HeapObjectHeader&, void VisitInConstructionConservatively(HeapObjectHeader&,
...@@ -63,12 +63,12 @@ class V8_EXPORT_PRIVATE MarkingVerifierBase ...@@ -63,12 +63,12 @@ class V8_EXPORT_PRIVATE MarkingVerifierBase
&in_construction_objects_heap_; &in_construction_objects_heap_;
size_t verifier_found_marked_bytes_ = 0; size_t verifier_found_marked_bytes_ = 0;
bool verifier_found_marked_bytes_are_exact_ = true; bool verifier_found_marked_bytes_are_exact_ = true;
Heap::Config::CollectionType collection_type_; CollectionType collection_type_;
}; };
class V8_EXPORT_PRIVATE MarkingVerifier final : public MarkingVerifierBase { class V8_EXPORT_PRIVATE MarkingVerifier final : public MarkingVerifierBase {
public: public:
MarkingVerifier(HeapBase&, Heap::Config::CollectionType); MarkingVerifier(HeapBase&, CollectionType);
~MarkingVerifier() final = default; ~MarkingVerifier() final = default;
private: private:
......
...@@ -148,9 +148,9 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space, ...@@ -148,9 +148,9 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space,
void* result = TryAllocateLargeObject(page_backend_, large_space, void* result = TryAllocateLargeObject(page_backend_, large_space,
stats_collector_, size, gcinfo); stats_collector_, size, gcinfo);
if (!result) { if (!result) {
auto config = GarbageCollector::Config::ConservativeAtomicConfig(); auto config = GCConfig::ConservativeAtomicConfig();
config.free_memory_handling = config.free_memory_handling =
GarbageCollector::Config::FreeMemoryHandling::kDiscardWherePossible; GCConfig::FreeMemoryHandling::kDiscardWherePossible;
garbage_collector_.CollectGarbage(config); garbage_collector_.CollectGarbage(config);
result = TryAllocateLargeObject(page_backend_, large_space, result = TryAllocateLargeObject(page_backend_, large_space,
stats_collector_, size, gcinfo); stats_collector_, size, gcinfo);
...@@ -170,9 +170,9 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space, ...@@ -170,9 +170,9 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space,
} }
if (!TryRefillLinearAllocationBuffer(space, request_size)) { if (!TryRefillLinearAllocationBuffer(space, request_size)) {
auto config = GarbageCollector::Config::ConservativeAtomicConfig(); auto config = GCConfig::ConservativeAtomicConfig();
config.free_memory_handling = config.free_memory_handling =
GarbageCollector::Config::FreeMemoryHandling::kDiscardWherePossible; GCConfig::FreeMemoryHandling::kDiscardWherePossible;
garbage_collector_.CollectGarbage(config); garbage_collector_.CollectGarbage(config);
if (!TryRefillLinearAllocationBuffer(space, request_size)) { if (!TryRefillLinearAllocationBuffer(space, request_size)) {
oom_handler_("Oilpan: Normal allocation."); oom_handler_("Oilpan: Normal allocation.");
......
...@@ -171,8 +171,7 @@ int64_t SumPhases(const MetricRecorder::GCCycle::Phases& phases) { ...@@ -171,8 +171,7 @@ int64_t SumPhases(const MetricRecorder::GCCycle::Phases& phases) {
} }
MetricRecorder::GCCycle GetCycleEventForMetricRecorder( MetricRecorder::GCCycle GetCycleEventForMetricRecorder(
StatsCollector::CollectionType type, CollectionType type, StatsCollector::MarkingType marking_type,
StatsCollector::MarkingType marking_type,
StatsCollector::SweepingType sweeping_type, int64_t atomic_mark_us, StatsCollector::SweepingType sweeping_type, int64_t atomic_mark_us,
int64_t atomic_weak_us, int64_t atomic_compact_us, int64_t atomic_sweep_us, int64_t atomic_weak_us, int64_t atomic_compact_us, int64_t atomic_sweep_us,
int64_t incremental_mark_us, int64_t incremental_sweep_us, int64_t incremental_mark_us, int64_t incremental_sweep_us,
...@@ -181,7 +180,7 @@ MetricRecorder::GCCycle GetCycleEventForMetricRecorder( ...@@ -181,7 +180,7 @@ MetricRecorder::GCCycle GetCycleEventForMetricRecorder(
int64_t objects_freed_bytes, int64_t memory_before_bytes, int64_t objects_freed_bytes, int64_t memory_before_bytes,
int64_t memory_after_bytes, int64_t memory_freed_bytes) { int64_t memory_after_bytes, int64_t memory_freed_bytes) {
MetricRecorder::GCCycle event; MetricRecorder::GCCycle event;
event.type = (type == StatsCollector::CollectionType::kMajor) event.type = (type == CollectionType::kMajor)
? MetricRecorder::GCCycle::Type::kMajor ? MetricRecorder::GCCycle::Type::kMajor
: MetricRecorder::GCCycle::Type::kMinor; : MetricRecorder::GCCycle::Type::kMinor;
// MainThread.Incremental: // MainThread.Incremental:
......
...@@ -68,12 +68,11 @@ namespace internal { ...@@ -68,12 +68,11 @@ namespace internal {
// Sink for various time and memory statistics. // Sink for various time and memory statistics.
class V8_EXPORT_PRIVATE StatsCollector final { class V8_EXPORT_PRIVATE StatsCollector final {
using IsForcedGC = GarbageCollector::Config::IsForcedGC; using IsForcedGC = GCConfig::IsForcedGC;
public: public:
using CollectionType = GarbageCollector::Config::CollectionType; using MarkingType = GCConfig::MarkingType;
using MarkingType = GarbageCollector::Config::MarkingType; using SweepingType = GCConfig::SweepingType;
using SweepingType = GarbageCollector::Config::SweepingType;
#if defined(CPPGC_DECLARE_ENUM) #if defined(CPPGC_DECLARE_ENUM)
static_assert(false, "CPPGC_DECLARE_ENUM macro is already defined"); static_assert(false, "CPPGC_DECLARE_ENUM macro is already defined");
......
...@@ -45,8 +45,7 @@ CppHeap::GarbageCollectionFlags ConvertTraceFlags( ...@@ -45,8 +45,7 @@ CppHeap::GarbageCollectionFlags ConvertTraceFlags(
void LocalEmbedderHeapTracer::PrepareForTrace( void LocalEmbedderHeapTracer::PrepareForTrace(
EmbedderHeapTracer::TraceFlags flags) { EmbedderHeapTracer::TraceFlags flags) {
if (cpp_heap_) if (cpp_heap_)
cpp_heap()->InitializeTracing( cpp_heap()->InitializeTracing(cppgc::internal::CollectionType::kMajor,
cppgc::internal::GarbageCollector::Config::CollectionType::kMajor,
ConvertTraceFlags(flags)); ConvertTraceFlags(flags));
} }
......
...@@ -74,9 +74,8 @@ class CompactorTest : public testing::TestWithPlatform { ...@@ -74,9 +74,8 @@ class CompactorTest : public testing::TestWithPlatform {
void StartCompaction() { void StartCompaction() {
compactor().EnableForNextGCForTesting(); compactor().EnableForNextGCForTesting();
compactor().InitializeIfShouldCompact( compactor().InitializeIfShouldCompact(GCConfig::MarkingType::kIncremental,
GarbageCollector::Config::MarkingType::kIncremental, StackState::kNoHeapPointers);
GarbageCollector::Config::StackState::kNoHeapPointers);
EXPECT_TRUE(compactor().IsEnabledForTesting()); EXPECT_TRUE(compactor().IsEnabledForTesting());
} }
...@@ -86,12 +85,11 @@ class CompactorTest : public testing::TestWithPlatform { ...@@ -86,12 +85,11 @@ class CompactorTest : public testing::TestWithPlatform {
CompactableGCed::g_destructor_callcount = 0u; CompactableGCed::g_destructor_callcount = 0u;
StartCompaction(); StartCompaction();
heap()->StartIncrementalGarbageCollection( heap()->StartIncrementalGarbageCollection(
GarbageCollector::Config::PreciseIncrementalConfig()); GCConfig::PreciseIncrementalConfig());
} }
void EndGC() { void EndGC() {
heap()->marker()->FinishMarking( heap()->marker()->FinishMarking(StackState::kNoHeapPointers);
GarbageCollector::Config::StackState::kNoHeapPointers);
heap()->GetMarkerRefForTesting().reset(); heap()->GetMarkerRefForTesting().reset();
FinishCompaction(); FinishCompaction();
// Sweeping also verifies the object start bitmap. // Sweeping also verifies the object start bitmap.
...@@ -125,13 +123,12 @@ namespace internal { ...@@ -125,13 +123,12 @@ namespace internal {
TEST_F(CompactorTest, NothingToCompact) { TEST_F(CompactorTest, NothingToCompact) {
StartCompaction(); StartCompaction();
heap()->stats_collector()->NotifyMarkingStarted( heap()->stats_collector()->NotifyMarkingStarted(
GarbageCollector::Config::CollectionType::kMajor, CollectionType::kMajor, GCConfig::MarkingType::kAtomic,
GarbageCollector::Config::MarkingType::kAtomic, GCConfig::IsForcedGC::kNotForced);
GarbageCollector::Config::IsForcedGC::kNotForced);
heap()->stats_collector()->NotifyMarkingCompleted(0); heap()->stats_collector()->NotifyMarkingCompleted(0);
FinishCompaction(); FinishCompaction();
heap()->stats_collector()->NotifySweepingCompleted( heap()->stats_collector()->NotifySweepingCompleted(
GarbageCollector::Config::SweepingType::kAtomic); GCConfig::SweepingType::kAtomic);
} }
TEST_F(CompactorTest, NonEmptySpaceAllLive) { TEST_F(CompactorTest, NonEmptySpaceAllLive) {
......
...@@ -27,20 +27,15 @@ class ConcurrentMarkingTest : public testing::TestWithHeap { ...@@ -27,20 +27,15 @@ class ConcurrentMarkingTest : public testing::TestWithHeap {
static constexpr int kNumStep = 10; static constexpr int kNumStep = 10;
#endif // defined(THREAD_SANITIZER) #endif // defined(THREAD_SANITIZER)
using Config = Heap::Config;
static constexpr Config ConcurrentPreciseConfig = {
Config::CollectionType::kMajor, Config::StackState::kNoHeapPointers,
Config::MarkingType::kIncrementalAndConcurrent,
Config::SweepingType::kIncrementalAndConcurrent};
void StartConcurrentGC() { void StartConcurrentGC() {
Heap* heap = Heap::From(GetHeap()); Heap* heap = Heap::From(GetHeap());
heap->DisableHeapGrowingForTesting(); heap->DisableHeapGrowingForTesting();
heap->StartIncrementalGarbageCollection(ConcurrentPreciseConfig); heap->StartIncrementalGarbageCollection(
GCConfig::PreciseConcurrentConfig());
heap->marker()->SetMainThreadMarkingDisabledForTesting(true); heap->marker()->SetMainThreadMarkingDisabledForTesting(true);
} }
bool SingleStep(Config::StackState stack_state) { bool SingleStep(StackState stack_state) {
MarkerBase* marker = Heap::From(GetHeap())->marker(); MarkerBase* marker = Heap::From(GetHeap())->marker();
DCHECK(marker); DCHECK(marker);
return marker->IncrementalMarkingStepForTesting(stack_state); return marker->IncrementalMarkingStepForTesting(stack_state);
...@@ -50,14 +45,10 @@ class ConcurrentMarkingTest : public testing::TestWithHeap { ...@@ -50,14 +45,10 @@ class ConcurrentMarkingTest : public testing::TestWithHeap {
Heap* heap = Heap::From(GetHeap()); Heap* heap = Heap::From(GetHeap());
heap->marker()->SetMainThreadMarkingDisabledForTesting(false); heap->marker()->SetMainThreadMarkingDisabledForTesting(false);
heap->FinalizeIncrementalGarbageCollectionIfRunning( heap->FinalizeIncrementalGarbageCollectionIfRunning(
ConcurrentPreciseConfig); GCConfig::PreciseConcurrentConfig());
} }
}; };
// static
constexpr ConcurrentMarkingTest::Config
ConcurrentMarkingTest::ConcurrentPreciseConfig;
template <typename T> template <typename T>
struct GCedHolder : public GarbageCollected<GCedHolder<T>> { struct GCedHolder : public GarbageCollected<GCedHolder<T>> {
void Trace(cppgc::Visitor* visitor) const { visitor->Trace(object); } void Trace(cppgc::Visitor* visitor) const { visitor->Trace(object); }
...@@ -110,7 +101,7 @@ TEST_F(ConcurrentMarkingTest, MarkingObjects) { ...@@ -110,7 +101,7 @@ TEST_F(ConcurrentMarkingTest, MarkingObjects) {
last_object = &(*last_object)->child_; last_object = &(*last_object)->child_;
} }
// Use SingleStep to re-post concurrent jobs. // Use SingleStep to re-post concurrent jobs.
SingleStep(Config::StackState::kNoHeapPointers); SingleStep(StackState::kNoHeapPointers);
} }
FinishGC(); FinishGC();
} }
...@@ -129,7 +120,7 @@ TEST_F(ConcurrentMarkingTest, MarkingInConstructionObjects) { ...@@ -129,7 +120,7 @@ TEST_F(ConcurrentMarkingTest, MarkingInConstructionObjects) {
}); });
} }
// Use SingleStep to re-post concurrent jobs. // Use SingleStep to re-post concurrent jobs.
SingleStep(Config::StackState::kNoHeapPointers); SingleStep(StackState::kNoHeapPointers);
} }
FinishGC(); FinishGC();
} }
...@@ -145,7 +136,7 @@ TEST_F(ConcurrentMarkingTest, MarkingMixinObjects) { ...@@ -145,7 +136,7 @@ TEST_F(ConcurrentMarkingTest, MarkingMixinObjects) {
last_object = &(*last_object)->child_; last_object = &(*last_object)->child_;
} }
// Use SingleStep to re-post concurrent jobs. // Use SingleStep to re-post concurrent jobs.
SingleStep(Config::StackState::kNoHeapPointers); SingleStep(StackState::kNoHeapPointers);
} }
FinishGC(); FinishGC();
} }
......
...@@ -73,9 +73,8 @@ class ConcurrentSweeperTest : public testing::TestWithHeap { ...@@ -73,9 +73,8 @@ class ConcurrentSweeperTest : public testing::TestWithHeap {
// Pretend do finish marking as StatsCollector verifies that Notify* // Pretend do finish marking as StatsCollector verifies that Notify*
// methods are called in the right order. // methods are called in the right order.
heap->stats_collector()->NotifyMarkingStarted( heap->stats_collector()->NotifyMarkingStarted(
GarbageCollector::Config::CollectionType::kMajor, CollectionType::kMajor, GCConfig::MarkingType::kAtomic,
GarbageCollector::Config::MarkingType::kAtomic, GCConfig::IsForcedGC::kNotForced);
GarbageCollector::Config::IsForcedGC::kNotForced);
heap->stats_collector()->NotifyMarkingCompleted(0); heap->stats_collector()->NotifyMarkingCompleted(0);
Sweeper& sweeper = heap->sweeper(); Sweeper& sweeper = heap->sweeper();
const SweepingConfig sweeping_config{ const SweepingConfig sweeping_config{
......
...@@ -49,11 +49,8 @@ class EphemeronHolderTraceEphemeron ...@@ -49,11 +49,8 @@ class EphemeronHolderTraceEphemeron
}; };
class EphemeronPairTest : public testing::TestWithHeap { class EphemeronPairTest : public testing::TestWithHeap {
using MarkingConfig = Marker::MarkingConfig; static constexpr MarkingConfig IncrementalPreciseMarkingConfig = {
CollectionType::kMajor, StackState::kNoHeapPointers,
static constexpr Marker::MarkingConfig IncrementalPreciseMarkingConfig = {
MarkingConfig::CollectionType::kMajor,
MarkingConfig::StackState::kNoHeapPointers,
MarkingConfig::MarkingType::kIncremental}; MarkingConfig::MarkingType::kIncremental};
public: public:
...@@ -63,11 +60,11 @@ class EphemeronPairTest : public testing::TestWithHeap { ...@@ -63,11 +60,11 @@ class EphemeronPairTest : public testing::TestWithHeap {
} }
void FinishMarking() { void FinishMarking() {
marker_->FinishMarking(MarkingConfig::StackState::kNoHeapPointers); marker_->FinishMarking(StackState::kNoHeapPointers);
// Pretend do finish sweeping as StatsCollector verifies that Notify* // Pretend do finish sweeping as StatsCollector verifies that Notify*
// methods are called in the right order. // methods are called in the right order.
Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted( Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted(
GarbageCollector::Config::SweepingType::kIncremental); GCConfig::SweepingType::kIncremental);
} }
void InitializeMarker(HeapBase& heap, cppgc::Platform* platform) { void InitializeMarker(HeapBase& heap, cppgc::Platform* platform) {
...@@ -81,15 +78,14 @@ class EphemeronPairTest : public testing::TestWithHeap { ...@@ -81,15 +78,14 @@ class EphemeronPairTest : public testing::TestWithHeap {
private: private:
bool SingleStep() { bool SingleStep() {
return marker_->IncrementalMarkingStepForTesting( return marker_->IncrementalMarkingStepForTesting(
MarkingConfig::StackState::kNoHeapPointers); StackState::kNoHeapPointers);
} }
std::unique_ptr<Marker> marker_; std::unique_ptr<Marker> marker_;
}; };
// static // static
constexpr Marker::MarkingConfig constexpr MarkingConfig EphemeronPairTest::IncrementalPreciseMarkingConfig;
EphemeronPairTest::IncrementalPreciseMarkingConfig;
} // namespace } // namespace
......
...@@ -18,9 +18,8 @@ namespace { ...@@ -18,9 +18,8 @@ namespace {
class MockGarbageCollector : public GarbageCollector { class MockGarbageCollector : public GarbageCollector {
public: public:
MOCK_METHOD(void, CollectGarbage, (GarbageCollector::Config), (override)); MOCK_METHOD(void, CollectGarbage, (GCConfig), (override));
MOCK_METHOD(void, StartIncrementalGarbageCollection, MOCK_METHOD(void, StartIncrementalGarbageCollection, (GCConfig), (override));
(GarbageCollector::Config), (override));
MOCK_METHOD(size_t, epoch, (), (const, override)); MOCK_METHOD(size_t, epoch, (), (const, override));
MOCK_METHOD(const EmbedderStackState*, override_stack_state, (), MOCK_METHOD(const EmbedderStackState*, override_stack_state, (),
(const, override)); (const, override));
...@@ -73,9 +72,8 @@ TEST(GCInvokerTest, PrecideGCIsInvokedSynchronously) { ...@@ -73,9 +72,8 @@ TEST(GCInvokerTest, PrecideGCIsInvokedSynchronously) {
GCInvoker invoker(&gc, &platform, GCInvoker invoker(&gc, &platform,
cppgc::Heap::StackSupport::kNoConservativeStackScan); cppgc::Heap::StackSupport::kNoConservativeStackScan);
EXPECT_CALL(gc, CollectGarbage(::testing::Field( EXPECT_CALL(gc, CollectGarbage(::testing::Field(
&GarbageCollector::Config::stack_state, &GCConfig::stack_state, StackState::kNoHeapPointers)));
GarbageCollector::Config::StackState::kNoHeapPointers))); invoker.CollectGarbage(GCConfig::PreciseAtomicConfig());
invoker.CollectGarbage(GarbageCollector::Config::PreciseAtomicConfig());
} }
TEST(GCInvokerTest, ConservativeGCIsInvokedSynchronouslyWhenSupported) { TEST(GCInvokerTest, ConservativeGCIsInvokedSynchronouslyWhenSupported) {
...@@ -85,9 +83,8 @@ TEST(GCInvokerTest, ConservativeGCIsInvokedSynchronouslyWhenSupported) { ...@@ -85,9 +83,8 @@ TEST(GCInvokerTest, ConservativeGCIsInvokedSynchronouslyWhenSupported) {
cppgc::Heap::StackSupport::kSupportsConservativeStackScan); cppgc::Heap::StackSupport::kSupportsConservativeStackScan);
EXPECT_CALL( EXPECT_CALL(
gc, CollectGarbage(::testing::Field( gc, CollectGarbage(::testing::Field(
&GarbageCollector::Config::stack_state, &GCConfig::stack_state, StackState::kMayContainHeapPointers)));
GarbageCollector::Config::StackState::kMayContainHeapPointers))); invoker.CollectGarbage(GCConfig::ConservativeAtomicConfig());
invoker.CollectGarbage(GarbageCollector::Config::ConservativeAtomicConfig());
} }
TEST(GCInvokerTest, ConservativeGCIsScheduledAsPreciseGCViaPlatform) { TEST(GCInvokerTest, ConservativeGCIsScheduledAsPreciseGCViaPlatform) {
...@@ -100,7 +97,7 @@ TEST(GCInvokerTest, ConservativeGCIsScheduledAsPreciseGCViaPlatform) { ...@@ -100,7 +97,7 @@ TEST(GCInvokerTest, ConservativeGCIsScheduledAsPreciseGCViaPlatform) {
EXPECT_CALL(gc, epoch).WillOnce(::testing::Return(0)); EXPECT_CALL(gc, epoch).WillOnce(::testing::Return(0));
EXPECT_CALL(*static_cast<MockTaskRunner*>(runner.get()), EXPECT_CALL(*static_cast<MockTaskRunner*>(runner.get()),
PostNonNestableTask(::testing::_)); PostNonNestableTask(::testing::_));
invoker.CollectGarbage(GarbageCollector::Config::ConservativeAtomicConfig()); invoker.CollectGarbage(GCConfig::ConservativeAtomicConfig());
} }
TEST(GCInvokerTest, ConservativeGCIsInvokedAsPreciseGCViaPlatform) { TEST(GCInvokerTest, ConservativeGCIsInvokedAsPreciseGCViaPlatform) {
...@@ -110,7 +107,7 @@ TEST(GCInvokerTest, ConservativeGCIsInvokedAsPreciseGCViaPlatform) { ...@@ -110,7 +107,7 @@ TEST(GCInvokerTest, ConservativeGCIsInvokedAsPreciseGCViaPlatform) {
cppgc::Heap::StackSupport::kNoConservativeStackScan); cppgc::Heap::StackSupport::kNoConservativeStackScan);
EXPECT_CALL(gc, epoch).WillRepeatedly(::testing::Return(0)); EXPECT_CALL(gc, epoch).WillRepeatedly(::testing::Return(0));
EXPECT_CALL(gc, CollectGarbage); EXPECT_CALL(gc, CollectGarbage);
invoker.CollectGarbage(GarbageCollector::Config::ConservativeAtomicConfig()); invoker.CollectGarbage(GCConfig::ConservativeAtomicConfig());
platform.RunAllForegroundTasks(); platform.RunAllForegroundTasks();
} }
...@@ -125,20 +122,18 @@ TEST(GCInvokerTest, IncrementalGCIsStarted) { ...@@ -125,20 +122,18 @@ TEST(GCInvokerTest, IncrementalGCIsStarted) {
cppgc::Heap::StackSupport::kSupportsConservativeStackScan); cppgc::Heap::StackSupport::kSupportsConservativeStackScan);
EXPECT_CALL( EXPECT_CALL(
gc, StartIncrementalGarbageCollection(::testing::Field( gc, StartIncrementalGarbageCollection(::testing::Field(
&GarbageCollector::Config::stack_state, &GCConfig::stack_state, StackState::kMayContainHeapPointers)));
GarbageCollector::Config::StackState::kMayContainHeapPointers)));
invoker_with_support.StartIncrementalGarbageCollection( invoker_with_support.StartIncrementalGarbageCollection(
GarbageCollector::Config::ConservativeIncrementalConfig()); GCConfig::ConservativeIncrementalConfig());
// Conservative stack scanning *not* supported. // Conservative stack scanning *not* supported.
GCInvoker invoker_without_support( GCInvoker invoker_without_support(
&gc, &platform, cppgc::Heap::StackSupport::kNoConservativeStackScan); &gc, &platform, cppgc::Heap::StackSupport::kNoConservativeStackScan);
EXPECT_CALL( EXPECT_CALL(gc,
gc, StartIncrementalGarbageCollection(::testing::Field( StartIncrementalGarbageCollection(::testing::Field(
&GarbageCollector::Config::stack_state, &GCConfig::stack_state, StackState::kMayContainHeapPointers)))
GarbageCollector::Config::StackState::kMayContainHeapPointers)))
.Times(0); .Times(0);
invoker_without_support.StartIncrementalGarbageCollection( invoker_without_support.StartIncrementalGarbageCollection(
GarbageCollector::Config::ConservativeIncrementalConfig()); GCConfig::ConservativeIncrementalConfig());
} }
} // namespace internal } // namespace internal
......
...@@ -22,19 +22,16 @@ class FakeGarbageCollector : public GarbageCollector { ...@@ -22,19 +22,16 @@ class FakeGarbageCollector : public GarbageCollector {
void SetLiveBytes(size_t live_bytes) { live_bytes_ = live_bytes; } void SetLiveBytes(size_t live_bytes) { live_bytes_ = live_bytes; }
void CollectGarbage(GarbageCollector::Config config) override { void CollectGarbage(GCConfig config) override {
stats_collector_->NotifyMarkingStarted( stats_collector_->NotifyMarkingStarted(CollectionType::kMajor,
GarbageCollector::Config::CollectionType::kMajor, GCConfig::MarkingType::kAtomic,
GarbageCollector::Config::MarkingType::kAtomic, GCConfig::IsForcedGC::kNotForced);
GarbageCollector::Config::IsForcedGC::kNotForced);
stats_collector_->NotifyMarkingCompleted(live_bytes_); stats_collector_->NotifyMarkingCompleted(live_bytes_);
stats_collector_->NotifySweepingCompleted( stats_collector_->NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
GarbageCollector::Config::SweepingType::kAtomic);
callcount_++; callcount_++;
} }
void StartIncrementalGarbageCollection( void StartIncrementalGarbageCollection(GCConfig config) override {
GarbageCollector::Config config) override {
UNREACHABLE(); UNREACHABLE();
} }
...@@ -51,9 +48,8 @@ class FakeGarbageCollector : public GarbageCollector { ...@@ -51,9 +48,8 @@ class FakeGarbageCollector : public GarbageCollector {
class MockGarbageCollector : public GarbageCollector { class MockGarbageCollector : public GarbageCollector {
public: public:
MOCK_METHOD(void, CollectGarbage, (GarbageCollector::Config), (override)); MOCK_METHOD(void, CollectGarbage, (GCConfig), (override));
MOCK_METHOD(void, StartIncrementalGarbageCollection, MOCK_METHOD(void, StartIncrementalGarbageCollection, (GCConfig), (override));
(GarbageCollector::Config), (override));
MOCK_METHOD(size_t, epoch, (), (const, override)); MOCK_METHOD(size_t, epoch, (), (const, override));
MOCK_METHOD(const EmbedderStackState*, override_stack_state, (), MOCK_METHOD(const EmbedderStackState*, override_stack_state, (),
(const, override)); (const, override));
...@@ -79,8 +75,7 @@ TEST(HeapGrowingTest, ConservativeGCInvoked) { ...@@ -79,8 +75,7 @@ TEST(HeapGrowingTest, ConservativeGCInvoked) {
cppgc::Heap::SweepingType::kIncrementalAndConcurrent); cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
EXPECT_CALL( EXPECT_CALL(
gc, CollectGarbage(::testing::Field( gc, CollectGarbage(::testing::Field(
&GarbageCollector::Config::stack_state, &GCConfig::stack_state, StackState::kMayContainHeapPointers)));
GarbageCollector::Config::StackState::kMayContainHeapPointers)));
FakeAllocate(&stats_collector, 100 * kMB); FakeAllocate(&stats_collector, 100 * kMB);
} }
...@@ -97,8 +92,7 @@ TEST(HeapGrowingTest, InitialHeapSize) { ...@@ -97,8 +92,7 @@ TEST(HeapGrowingTest, InitialHeapSize) {
FakeAllocate(&stats_collector, kObjectSize - 1); FakeAllocate(&stats_collector, kObjectSize - 1);
EXPECT_CALL( EXPECT_CALL(
gc, CollectGarbage(::testing::Field( gc, CollectGarbage(::testing::Field(
&GarbageCollector::Config::stack_state, &GCConfig::stack_state, StackState::kMayContainHeapPointers)));
GarbageCollector::Config::StackState::kMayContainHeapPointers)));
FakeAllocate(&stats_collector, kObjectSize); FakeAllocate(&stats_collector, kObjectSize);
} }
...@@ -146,9 +140,8 @@ TEST(HeapGrowingTest, IncrementalGCStarted) { ...@@ -146,9 +140,8 @@ TEST(HeapGrowingTest, IncrementalGCStarted) {
cppgc::Heap::MarkingType::kIncrementalAndConcurrent, cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
cppgc::Heap::SweepingType::kIncrementalAndConcurrent); cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
EXPECT_CALL( EXPECT_CALL(
gc, CollectGarbage(::testing::Field( gc, CollectGarbage(::testing::Field(&GCConfig::stack_state,
&GarbageCollector::Config::stack_state, StackState::kMayContainHeapPointers)))
GarbageCollector::Config::StackState::kMayContainHeapPointers)))
.Times(0); .Times(0);
EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_)); EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_));
// Allocate 1 byte less the limit for atomic gc to trigger incremental gc. // Allocate 1 byte less the limit for atomic gc to trigger incremental gc.
...@@ -163,9 +156,8 @@ TEST(HeapGrowingTest, IncrementalGCFinalized) { ...@@ -163,9 +156,8 @@ TEST(HeapGrowingTest, IncrementalGCFinalized) {
cppgc::Heap::MarkingType::kIncrementalAndConcurrent, cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
cppgc::Heap::SweepingType::kIncrementalAndConcurrent); cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
EXPECT_CALL( EXPECT_CALL(
gc, CollectGarbage(::testing::Field( gc, CollectGarbage(::testing::Field(&GCConfig::stack_state,
&GarbageCollector::Config::stack_state, StackState::kMayContainHeapPointers)))
GarbageCollector::Config::StackState::kMayContainHeapPointers)))
.Times(0); .Times(0);
EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_)); EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_));
// Allocate 1 byte less the limit for atomic gc to trigger incremental gc. // Allocate 1 byte less the limit for atomic gc to trigger incremental gc.
...@@ -174,8 +166,7 @@ TEST(HeapGrowingTest, IncrementalGCFinalized) { ...@@ -174,8 +166,7 @@ TEST(HeapGrowingTest, IncrementalGCFinalized) {
::testing::Mock::VerifyAndClearExpectations(&gc); ::testing::Mock::VerifyAndClearExpectations(&gc);
EXPECT_CALL( EXPECT_CALL(
gc, CollectGarbage(::testing::Field( gc, CollectGarbage(::testing::Field(
&GarbageCollector::Config::stack_state, &GCConfig::stack_state, StackState::kMayContainHeapPointers)));
GarbageCollector::Config::StackState::kMayContainHeapPointers)));
EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_)).Times(0); EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_)).Times(0);
// Allocate the rest needed to trigger atomic gc (). // Allocate the rest needed to trigger atomic gc ().
FakeAllocate(&stats_collector, StatsCollector::kAllocationThresholdBytes); FakeAllocate(&stats_collector, StatsCollector::kAllocationThresholdBytes);
......
...@@ -27,11 +27,11 @@ class GCHeapTest : public testing::TestWithHeap { ...@@ -27,11 +27,11 @@ class GCHeapTest : public testing::TestWithHeap {
public: public:
void ConservativeGC() { void ConservativeGC() {
internal::Heap::From(GetHeap())->CollectGarbage( internal::Heap::From(GetHeap())->CollectGarbage(
Heap::Config::ConservativeAtomicConfig()); GCConfig::ConservativeAtomicConfig());
} }
void PreciseGC() { void PreciseGC() {
internal::Heap::From(GetHeap())->CollectGarbage( internal::Heap::From(GetHeap())->CollectGarbage(
Heap::Config::PreciseAtomicConfig()); GCConfig::PreciseAtomicConfig());
} }
}; };
...@@ -74,7 +74,7 @@ namespace { ...@@ -74,7 +74,7 @@ namespace {
const void* ConservativeGCReturningObject(cppgc::Heap* heap, const void* ConservativeGCReturningObject(cppgc::Heap* heap,
const void* object) { const void* object) {
internal::Heap::From(heap)->CollectGarbage( internal::Heap::From(heap)->CollectGarbage(
Heap::Config::ConservativeAtomicConfig()); GCConfig::ConservativeAtomicConfig());
return object; return object;
} }
...@@ -113,7 +113,7 @@ class LargeObjectGCDuringCtor final ...@@ -113,7 +113,7 @@ class LargeObjectGCDuringCtor final
: child_(MakeGarbageCollected<GCedWithFinalizer>( : child_(MakeGarbageCollected<GCedWithFinalizer>(
heap->GetAllocationHandle())) { heap->GetAllocationHandle())) {
internal::Heap::From(heap)->CollectGarbage( internal::Heap::From(heap)->CollectGarbage(
Heap::Config::ConservativeAtomicConfig()); GCConfig::ConservativeAtomicConfig());
} }
void Trace(Visitor* visitor) const { visitor->Trace(child_); } void Trace(Visitor* visitor) const { visitor->Trace(child_); }
...@@ -235,8 +235,8 @@ TEST_F(GCHeapTest, IsGarbageCollectionAllowed) { ...@@ -235,8 +235,8 @@ TEST_F(GCHeapTest, IsGarbageCollectionAllowed) {
} }
TEST_F(GCHeapTest, IsMarking) { TEST_F(GCHeapTest, IsMarking) {
GarbageCollector::Config config = GarbageCollector::Config:: GCConfig config =
PreciseIncrementalMarkingConcurrentSweepingConfig(); GCConfig::PreciseIncrementalMarkingConcurrentSweepingConfig();
auto* heap = Heap::From(GetHeap()); auto* heap = Heap::From(GetHeap());
EXPECT_FALSE(subtle::HeapState::IsMarking(*heap)); EXPECT_FALSE(subtle::HeapState::IsMarking(*heap));
heap->StartIncrementalGarbageCollection(config); heap->StartIncrementalGarbageCollection(config);
...@@ -248,8 +248,8 @@ TEST_F(GCHeapTest, IsMarking) { ...@@ -248,8 +248,8 @@ TEST_F(GCHeapTest, IsMarking) {
} }
TEST_F(GCHeapTest, IsSweeping) { TEST_F(GCHeapTest, IsSweeping) {
GarbageCollector::Config config = GarbageCollector::Config:: GCConfig config =
PreciseIncrementalMarkingConcurrentSweepingConfig(); GCConfig::PreciseIncrementalMarkingConcurrentSweepingConfig();
auto* heap = Heap::From(GetHeap()); auto* heap = Heap::From(GetHeap());
EXPECT_FALSE(subtle::HeapState::IsSweeping(*heap)); EXPECT_FALSE(subtle::HeapState::IsSweeping(*heap));
heap->StartIncrementalGarbageCollection(config); heap->StartIncrementalGarbageCollection(config);
...@@ -280,8 +280,8 @@ class GCedExpectSweepingOnOwningThread final ...@@ -280,8 +280,8 @@ class GCedExpectSweepingOnOwningThread final
} // namespace } // namespace
TEST_F(GCHeapTest, IsSweepingOnOwningThread) { TEST_F(GCHeapTest, IsSweepingOnOwningThread) {
GarbageCollector::Config config = GarbageCollector::Config:: GCConfig config =
PreciseIncrementalMarkingConcurrentSweepingConfig(); GCConfig::PreciseIncrementalMarkingConcurrentSweepingConfig();
auto* heap = Heap::From(GetHeap()); auto* heap = Heap::From(GetHeap());
MakeGarbageCollected<GCedExpectSweepingOnOwningThread>( MakeGarbageCollected<GCedExpectSweepingOnOwningThread>(
heap->GetAllocationHandle(), *heap); heap->GetAllocationHandle(), *heap);
...@@ -316,8 +316,7 @@ class ExpectAtomicPause final : public GarbageCollected<ExpectAtomicPause> { ...@@ -316,8 +316,7 @@ class ExpectAtomicPause final : public GarbageCollected<ExpectAtomicPause> {
} // namespace } // namespace
TEST_F(GCHeapTest, IsInAtomicPause) { TEST_F(GCHeapTest, IsInAtomicPause) {
GarbageCollector::Config config = GCConfig config = GCConfig::PreciseIncrementalConfig();
GarbageCollector::Config::PreciseIncrementalConfig();
auto* heap = Heap::From(GetHeap()); auto* heap = Heap::From(GetHeap());
MakeGarbageCollected<ExpectAtomicPause>(heap->object_allocator(), *heap); MakeGarbageCollected<ExpectAtomicPause>(heap->object_allocator(), *heap);
EXPECT_FALSE(subtle::HeapState::IsInAtomicPause(*heap)); EXPECT_FALSE(subtle::HeapState::IsInAtomicPause(*heap));
......
...@@ -25,18 +25,15 @@ namespace internal { ...@@ -25,18 +25,15 @@ namespace internal {
namespace { namespace {
class MarkerTest : public testing::TestWithHeap { class MarkerTest : public testing::TestWithHeap {
public: public:
using MarkingConfig = Marker::MarkingConfig; void DoMarking(StackState stack_state) {
const MarkingConfig config = {CollectionType::kMajor, stack_state};
void DoMarking(MarkingConfig::StackState stack_state) {
const MarkingConfig config = {MarkingConfig::CollectionType::kMajor,
stack_state};
auto* heap = Heap::From(GetHeap()); auto* heap = Heap::From(GetHeap());
InitializeMarker(*heap, GetPlatformHandle().get(), config); InitializeMarker(*heap, GetPlatformHandle().get(), config);
marker_->FinishMarking(stack_state); marker_->FinishMarking(stack_state);
// Pretend do finish sweeping as StatsCollector verifies that Notify* // Pretend do finish sweeping as StatsCollector verifies that Notify*
// methods are called in the right order. // methods are called in the right order.
heap->stats_collector()->NotifySweepingCompleted( heap->stats_collector()->NotifySweepingCompleted(
GarbageCollector::Config::SweepingType::kAtomic); GCConfig::SweepingType::kAtomic);
} }
void InitializeMarker(HeapBase& heap, cppgc::Platform* platform, void InitializeMarker(HeapBase& heap, cppgc::Platform* platform,
...@@ -80,7 +77,7 @@ TEST_F(MarkerTest, PersistentIsMarked) { ...@@ -80,7 +77,7 @@ TEST_F(MarkerTest, PersistentIsMarked) {
Persistent<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle()); Persistent<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle());
HeapObjectHeader& header = HeapObjectHeader::FromObject(object); HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
EXPECT_FALSE(header.IsMarked()); EXPECT_FALSE(header.IsMarked());
DoMarking(MarkingConfig::StackState::kNoHeapPointers); DoMarking(StackState::kNoHeapPointers);
EXPECT_TRUE(header.IsMarked()); EXPECT_TRUE(header.IsMarked());
} }
...@@ -89,7 +86,7 @@ TEST_F(MarkerTest, ReachableMemberIsMarked) { ...@@ -89,7 +86,7 @@ TEST_F(MarkerTest, ReachableMemberIsMarked) {
parent->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle())); parent->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
HeapObjectHeader& header = HeapObjectHeader::FromObject(parent->child()); HeapObjectHeader& header = HeapObjectHeader::FromObject(parent->child());
EXPECT_FALSE(header.IsMarked()); EXPECT_FALSE(header.IsMarked());
DoMarking(MarkingConfig::StackState::kNoHeapPointers); DoMarking(StackState::kNoHeapPointers);
EXPECT_TRUE(header.IsMarked()); EXPECT_TRUE(header.IsMarked());
} }
...@@ -97,14 +94,14 @@ TEST_F(MarkerTest, UnreachableMemberIsNotMarked) { ...@@ -97,14 +94,14 @@ TEST_F(MarkerTest, UnreachableMemberIsNotMarked) {
Member<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle()); Member<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle());
HeapObjectHeader& header = HeapObjectHeader::FromObject(object); HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
EXPECT_FALSE(header.IsMarked()); EXPECT_FALSE(header.IsMarked());
DoMarking(MarkingConfig::StackState::kNoHeapPointers); DoMarking(StackState::kNoHeapPointers);
EXPECT_FALSE(header.IsMarked()); EXPECT_FALSE(header.IsMarked());
} }
TEST_F(MarkerTest, ObjectReachableFromStackIsMarked) { TEST_F(MarkerTest, ObjectReachableFromStackIsMarked) {
GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle()); GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle());
EXPECT_FALSE(HeapObjectHeader::FromObject(object).IsMarked()); EXPECT_FALSE(HeapObjectHeader::FromObject(object).IsMarked());
DoMarking(MarkingConfig::StackState::kMayContainHeapPointers); DoMarking(StackState::kMayContainHeapPointers);
EXPECT_TRUE(HeapObjectHeader::FromObject(object).IsMarked()); EXPECT_TRUE(HeapObjectHeader::FromObject(object).IsMarked());
access(object); access(object);
} }
...@@ -113,7 +110,7 @@ TEST_F(MarkerTest, ObjectReachableOnlyFromStackIsNotMarkedIfStackIsEmpty) { ...@@ -113,7 +110,7 @@ TEST_F(MarkerTest, ObjectReachableOnlyFromStackIsNotMarkedIfStackIsEmpty) {
GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle()); GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle());
HeapObjectHeader& header = HeapObjectHeader::FromObject(object); HeapObjectHeader& header = HeapObjectHeader::FromObject(object);
EXPECT_FALSE(header.IsMarked()); EXPECT_FALSE(header.IsMarked());
DoMarking(MarkingConfig::StackState::kNoHeapPointers); DoMarking(StackState::kNoHeapPointers);
EXPECT_FALSE(header.IsMarked()); EXPECT_FALSE(header.IsMarked());
access(object); access(object);
} }
...@@ -123,14 +120,14 @@ TEST_F(MarkerTest, WeakReferenceToUnreachableObjectIsCleared) { ...@@ -123,14 +120,14 @@ TEST_F(MarkerTest, WeakReferenceToUnreachableObjectIsCleared) {
WeakPersistent<GCed> weak_object = WeakPersistent<GCed> weak_object =
MakeGarbageCollected<GCed>(GetAllocationHandle()); MakeGarbageCollected<GCed>(GetAllocationHandle());
EXPECT_TRUE(weak_object); EXPECT_TRUE(weak_object);
DoMarking(MarkingConfig::StackState::kNoHeapPointers); DoMarking(StackState::kNoHeapPointers);
EXPECT_FALSE(weak_object); EXPECT_FALSE(weak_object);
} }
{ {
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle()); Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle());
parent->SetWeakChild(MakeGarbageCollected<GCed>(GetAllocationHandle())); parent->SetWeakChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
EXPECT_TRUE(parent->weak_child()); EXPECT_TRUE(parent->weak_child());
DoMarking(MarkingConfig::StackState::kNoHeapPointers); DoMarking(StackState::kNoHeapPointers);
EXPECT_FALSE(parent->weak_child()); EXPECT_FALSE(parent->weak_child());
} }
} }
...@@ -141,7 +138,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) { ...@@ -141,7 +138,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
Persistent<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle()); Persistent<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle());
WeakPersistent<GCed> weak_object(object); WeakPersistent<GCed> weak_object(object);
EXPECT_TRUE(weak_object); EXPECT_TRUE(weak_object);
DoMarking(MarkingConfig::StackState::kNoHeapPointers); DoMarking(StackState::kNoHeapPointers);
EXPECT_TRUE(weak_object); EXPECT_TRUE(weak_object);
} }
{ {
...@@ -149,7 +146,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) { ...@@ -149,7 +146,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle()); Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle());
parent->SetWeakChild(object); parent->SetWeakChild(object);
EXPECT_TRUE(parent->weak_child()); EXPECT_TRUE(parent->weak_child());
DoMarking(MarkingConfig::StackState::kNoHeapPointers); DoMarking(StackState::kNoHeapPointers);
EXPECT_TRUE(parent->weak_child()); EXPECT_TRUE(parent->weak_child());
} }
// Reachable from Member // Reachable from Member
...@@ -159,7 +156,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) { ...@@ -159,7 +156,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
MakeGarbageCollected<GCed>(GetAllocationHandle())); MakeGarbageCollected<GCed>(GetAllocationHandle()));
parent->SetChild(weak_object); parent->SetChild(weak_object);
EXPECT_TRUE(weak_object); EXPECT_TRUE(weak_object);
DoMarking(MarkingConfig::StackState::kNoHeapPointers); DoMarking(StackState::kNoHeapPointers);
EXPECT_TRUE(weak_object); EXPECT_TRUE(weak_object);
} }
{ {
...@@ -167,7 +164,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) { ...@@ -167,7 +164,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
parent->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle())); parent->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
parent->SetWeakChild(parent->child()); parent->SetWeakChild(parent->child());
EXPECT_TRUE(parent->weak_child()); EXPECT_TRUE(parent->weak_child());
DoMarking(MarkingConfig::StackState::kNoHeapPointers); DoMarking(StackState::kNoHeapPointers);
EXPECT_TRUE(parent->weak_child()); EXPECT_TRUE(parent->weak_child());
} }
// Reachable from stack // Reachable from stack
...@@ -175,7 +172,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) { ...@@ -175,7 +172,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle()); GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle());
WeakPersistent<GCed> weak_object(object); WeakPersistent<GCed> weak_object(object);
EXPECT_TRUE(weak_object); EXPECT_TRUE(weak_object);
DoMarking(MarkingConfig::StackState::kMayContainHeapPointers); DoMarking(StackState::kMayContainHeapPointers);
EXPECT_TRUE(weak_object); EXPECT_TRUE(weak_object);
access(object); access(object);
} }
...@@ -184,7 +181,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) { ...@@ -184,7 +181,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) {
Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle()); Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle());
parent->SetWeakChild(object); parent->SetWeakChild(object);
EXPECT_TRUE(parent->weak_child()); EXPECT_TRUE(parent->weak_child());
DoMarking(MarkingConfig::StackState::kMayContainHeapPointers); DoMarking(StackState::kMayContainHeapPointers);
EXPECT_TRUE(parent->weak_child()); EXPECT_TRUE(parent->weak_child());
access(object); access(object);
} }
...@@ -199,7 +196,7 @@ TEST_F(MarkerTest, DeepHierarchyIsMarked) { ...@@ -199,7 +196,7 @@ TEST_F(MarkerTest, DeepHierarchyIsMarked) {
parent->SetWeakChild(parent->child()); parent->SetWeakChild(parent->child());
parent = parent->child(); parent = parent->child();
} }
DoMarking(MarkingConfig::StackState::kNoHeapPointers); DoMarking(StackState::kNoHeapPointers);
EXPECT_TRUE(HeapObjectHeader::FromObject(root).IsMarked()); EXPECT_TRUE(HeapObjectHeader::FromObject(root).IsMarked());
parent = root; parent = root;
for (int i = 0; i < kHierarchyDepth; ++i) { for (int i = 0; i < kHierarchyDepth; ++i) {
...@@ -213,7 +210,7 @@ TEST_F(MarkerTest, NestedObjectsOnStackAreMarked) { ...@@ -213,7 +210,7 @@ TEST_F(MarkerTest, NestedObjectsOnStackAreMarked) {
GCed* root = MakeGarbageCollected<GCed>(GetAllocationHandle()); GCed* root = MakeGarbageCollected<GCed>(GetAllocationHandle());
root->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle())); root->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
root->child()->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle())); root->child()->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
DoMarking(MarkingConfig::StackState::kMayContainHeapPointers); DoMarking(StackState::kMayContainHeapPointers);
EXPECT_TRUE(HeapObjectHeader::FromObject(root).IsMarked()); EXPECT_TRUE(HeapObjectHeader::FromObject(root).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(root->child()).IsMarked()); EXPECT_TRUE(HeapObjectHeader::FromObject(root->child()).IsMarked());
EXPECT_TRUE(HeapObjectHeader::FromObject(root->child()->child()).IsMarked()); EXPECT_TRUE(HeapObjectHeader::FromObject(root->child()->child()).IsMarked());
...@@ -244,9 +241,8 @@ class GCedWithCallback : public GarbageCollected<GCedWithCallback> { ...@@ -244,9 +241,8 @@ class GCedWithCallback : public GarbageCollected<GCedWithCallback> {
} // namespace } // namespace
TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) { TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) {
static const Marker::MarkingConfig config = { static const MarkingConfig config = {CollectionType::kMajor,
MarkingConfig::CollectionType::kMajor, StackState::kMayContainHeapPointers};
MarkingConfig::StackState::kMayContainHeapPointers};
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config); InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
GCedWithCallback* object = MakeGarbageCollected<GCedWithCallback>( GCedWithCallback* object = MakeGarbageCollected<GCedWithCallback>(
GetAllocationHandle(), [marker = marker()](GCedWithCallback* obj) { GetAllocationHandle(), [marker = marker()](GCedWithCallback* obj) {
...@@ -254,22 +250,20 @@ TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) { ...@@ -254,22 +250,20 @@ TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) {
marker->Visitor().Trace(member); marker->Visitor().Trace(member);
}); });
EXPECT_FALSE(HeapObjectHeader::FromObject(object).IsMarked()); EXPECT_FALSE(HeapObjectHeader::FromObject(object).IsMarked());
marker()->FinishMarking(MarkingConfig::StackState::kMayContainHeapPointers); marker()->FinishMarking(StackState::kMayContainHeapPointers);
EXPECT_TRUE(HeapObjectHeader::FromObject(object).IsMarked()); EXPECT_TRUE(HeapObjectHeader::FromObject(object).IsMarked());
} }
TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedNonEmptyStack) { TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedNonEmptyStack) {
static const Marker::MarkingConfig config = { static const MarkingConfig config = {CollectionType::kMajor,
MarkingConfig::CollectionType::kMajor, StackState::kMayContainHeapPointers};
MarkingConfig::StackState::kMayContainHeapPointers};
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config); InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
MakeGarbageCollected<GCedWithCallback>( MakeGarbageCollected<GCedWithCallback>(
GetAllocationHandle(), [marker = marker()](GCedWithCallback* obj) { GetAllocationHandle(), [marker = marker()](GCedWithCallback* obj) {
Member<GCedWithCallback> member(obj); Member<GCedWithCallback> member(obj);
marker->Visitor().Trace(member); marker->Visitor().Trace(member);
EXPECT_FALSE(HeapObjectHeader::FromObject(obj).IsMarked()); EXPECT_FALSE(HeapObjectHeader::FromObject(obj).IsMarked());
marker->FinishMarking( marker->FinishMarking(StackState::kMayContainHeapPointers);
MarkingConfig::StackState::kMayContainHeapPointers);
EXPECT_TRUE(HeapObjectHeader::FromObject(obj).IsMarked()); EXPECT_TRUE(HeapObjectHeader::FromObject(obj).IsMarked());
}); });
} }
...@@ -318,36 +312,34 @@ V8_NOINLINE void RegisterInConstructionObject( ...@@ -318,36 +312,34 @@ V8_NOINLINE void RegisterInConstructionObject(
TEST_F(MarkerTest, TEST_F(MarkerTest,
InConstructionObjectIsEventuallyMarkedDifferentNonEmptyStack) { InConstructionObjectIsEventuallyMarkedDifferentNonEmptyStack) {
static const Marker::MarkingConfig config = { static const MarkingConfig config = {CollectionType::kMajor,
MarkingConfig::CollectionType::kMajor, StackState::kMayContainHeapPointers};
MarkingConfig::StackState::kMayContainHeapPointers};
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config); InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
GCObliviousObjectStorage storage; GCObliviousObjectStorage storage;
RegisterInConstructionObject(GetAllocationHandle(), marker()->Visitor(), RegisterInConstructionObject(GetAllocationHandle(), marker()->Visitor(),
storage); storage);
EXPECT_FALSE(HeapObjectHeader::FromObject(storage.object()).IsMarked()); EXPECT_FALSE(HeapObjectHeader::FromObject(storage.object()).IsMarked());
marker()->FinishMarking(MarkingConfig::StackState::kMayContainHeapPointers); marker()->FinishMarking(StackState::kMayContainHeapPointers);
EXPECT_TRUE(HeapObjectHeader::FromObject(storage.object()).IsMarked()); EXPECT_TRUE(HeapObjectHeader::FromObject(storage.object()).IsMarked());
} }
TEST_F(MarkerTest, SentinelNotClearedOnWeakPersistentHandling) { TEST_F(MarkerTest, SentinelNotClearedOnWeakPersistentHandling) {
static const Marker::MarkingConfig config = { static const MarkingConfig config = {
MarkingConfig::CollectionType::kMajor, CollectionType::kMajor, StackState::kNoHeapPointers,
MarkingConfig::StackState::kNoHeapPointers,
MarkingConfig::MarkingType::kIncremental}; MarkingConfig::MarkingType::kIncremental};
Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle()); Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
auto* tmp = MakeGarbageCollected<GCed>(GetAllocationHandle()); auto* tmp = MakeGarbageCollected<GCed>(GetAllocationHandle());
root->SetWeakChild(tmp); root->SetWeakChild(tmp);
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config); InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
while (!marker()->IncrementalMarkingStepForTesting( while (!marker()->IncrementalMarkingStepForTesting(
MarkingConfig::StackState::kNoHeapPointers)) { StackState::kNoHeapPointers)) {
} }
// {root} object must be marked at this point because we do not allow // {root} object must be marked at this point because we do not allow
// encountering kSentinelPointer in WeakMember on regular Trace() calls. // encountering kSentinelPointer in WeakMember on regular Trace() calls.
ASSERT_TRUE(HeapObjectHeader::FromObject(root.Get()).IsMarked()); ASSERT_TRUE(HeapObjectHeader::FromObject(root.Get()).IsMarked());
root->SetWeakChild(kSentinelPointer); root->SetWeakChild(kSentinelPointer);
marker()->FinishMarking(MarkingConfig::StackState::kNoHeapPointers); marker()->FinishMarking(StackState::kNoHeapPointers);
EXPECT_EQ(kSentinelPointer, root->weak_child()); EXPECT_EQ(kSentinelPointer, root->weak_child());
} }
...@@ -383,15 +375,14 @@ class ObjectWithEphemeronPair final ...@@ -383,15 +375,14 @@ class ObjectWithEphemeronPair final
} // namespace } // namespace
TEST_F(MarkerTest, MarkerProcessesAllEphemeronPairs) { TEST_F(MarkerTest, MarkerProcessesAllEphemeronPairs) {
static const Marker::MarkingConfig config = { static const MarkingConfig config = {CollectionType::kMajor,
MarkingConfig::CollectionType::kMajor, StackState::kNoHeapPointers,
MarkingConfig::StackState::kNoHeapPointers,
MarkingConfig::MarkingType::kAtomic}; MarkingConfig::MarkingType::kAtomic};
Persistent<ObjectWithEphemeronPair> obj = Persistent<ObjectWithEphemeronPair> obj =
MakeGarbageCollected<ObjectWithEphemeronPair>(GetAllocationHandle(), MakeGarbageCollected<ObjectWithEphemeronPair>(GetAllocationHandle(),
GetAllocationHandle()); GetAllocationHandle());
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config); InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
marker()->FinishMarking(MarkingConfig::StackState::kNoHeapPointers); marker()->FinishMarking(StackState::kNoHeapPointers);
ResetMarker(); ResetMarker();
} }
...@@ -399,26 +390,22 @@ TEST_F(MarkerTest, MarkerProcessesAllEphemeronPairs) { ...@@ -399,26 +390,22 @@ TEST_F(MarkerTest, MarkerProcessesAllEphemeronPairs) {
class IncrementalMarkingTest : public testing::TestWithHeap { class IncrementalMarkingTest : public testing::TestWithHeap {
public: public:
using MarkingConfig = Marker::MarkingConfig;
static constexpr MarkingConfig IncrementalPreciseMarkingConfig = { static constexpr MarkingConfig IncrementalPreciseMarkingConfig = {
MarkingConfig::CollectionType::kMajor, CollectionType::kMajor, StackState::kNoHeapPointers,
MarkingConfig::StackState::kNoHeapPointers,
MarkingConfig::MarkingType::kIncremental}; MarkingConfig::MarkingType::kIncremental};
void FinishSteps(MarkingConfig::StackState stack_state) { void FinishSteps(StackState stack_state) {
while (!SingleStep(stack_state)) { while (!SingleStep(stack_state)) {
} }
} }
void FinishMarking() { void FinishMarking() {
GetMarkerRef()->FinishMarking( GetMarkerRef()->FinishMarking(StackState::kMayContainHeapPointers);
MarkingConfig::StackState::kMayContainHeapPointers);
// Pretend do finish sweeping as StatsCollector verifies that Notify* // Pretend do finish sweeping as StatsCollector verifies that Notify*
// methods are called in the right order. // methods are called in the right order.
GetMarkerRef().reset(); GetMarkerRef().reset();
Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted( Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted(
GarbageCollector::Config::SweepingType::kIncremental); GCConfig::SweepingType::kIncremental);
} }
void InitializeMarker(HeapBase& heap, cppgc::Platform* platform, void InitializeMarker(HeapBase& heap, cppgc::Platform* platform,
...@@ -430,13 +417,12 @@ class IncrementalMarkingTest : public testing::TestWithHeap { ...@@ -430,13 +417,12 @@ class IncrementalMarkingTest : public testing::TestWithHeap {
MarkerBase* marker() const { return Heap::From(GetHeap())->marker(); } MarkerBase* marker() const { return Heap::From(GetHeap())->marker(); }
private: private:
bool SingleStep(MarkingConfig::StackState stack_state) { bool SingleStep(StackState stack_state) {
return GetMarkerRef()->IncrementalMarkingStepForTesting(stack_state); return GetMarkerRef()->IncrementalMarkingStepForTesting(stack_state);
} }
}; };
constexpr IncrementalMarkingTest::MarkingConfig constexpr MarkingConfig IncrementalMarkingTest::IncrementalPreciseMarkingConfig;
IncrementalMarkingTest::IncrementalPreciseMarkingConfig;
TEST_F(IncrementalMarkingTest, RootIsMarkedAfterMarkingStarted) { TEST_F(IncrementalMarkingTest, RootIsMarkedAfterMarkingStarted) {
Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle()); Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
...@@ -454,7 +440,7 @@ TEST_F(IncrementalMarkingTest, MemberIsMarkedAfterMarkingSteps) { ...@@ -454,7 +440,7 @@ TEST_F(IncrementalMarkingTest, MemberIsMarkedAfterMarkingSteps) {
EXPECT_FALSE(header.IsMarked()); EXPECT_FALSE(header.IsMarked());
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
IncrementalPreciseMarkingConfig); IncrementalPreciseMarkingConfig);
FinishSteps(MarkingConfig::StackState::kNoHeapPointers); FinishSteps(StackState::kNoHeapPointers);
EXPECT_TRUE(header.IsMarked()); EXPECT_TRUE(header.IsMarked());
FinishMarking(); FinishMarking();
} }
...@@ -465,7 +451,7 @@ TEST_F(IncrementalMarkingTest, ...@@ -465,7 +451,7 @@ TEST_F(IncrementalMarkingTest,
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
IncrementalPreciseMarkingConfig); IncrementalPreciseMarkingConfig);
root->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle())); root->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
FinishSteps(MarkingConfig::StackState::kNoHeapPointers); FinishSteps(StackState::kNoHeapPointers);
HeapObjectHeader& header = HeapObjectHeader::FromObject(root->child()); HeapObjectHeader& header = HeapObjectHeader::FromObject(root->child());
EXPECT_TRUE(header.IsMarked()); EXPECT_TRUE(header.IsMarked());
FinishMarking(); FinishMarking();
...@@ -491,10 +477,10 @@ TEST_F(IncrementalMarkingTest, IncrementalStepDuringAllocation) { ...@@ -491,10 +477,10 @@ TEST_F(IncrementalMarkingTest, IncrementalStepDuringAllocation) {
header = &HeapObjectHeader::FromObject(obj); header = &HeapObjectHeader::FromObject(obj);
holder->member_ = obj; holder->member_ = obj;
EXPECT_FALSE(header->IsMarked()); EXPECT_FALSE(header->IsMarked());
FinishSteps(MarkingConfig::StackState::kMayContainHeapPointers); FinishSteps(StackState::kMayContainHeapPointers);
EXPECT_FALSE(header->IsMarked()); EXPECT_FALSE(header->IsMarked());
}); });
FinishSteps(MarkingConfig::StackState::kNoHeapPointers); FinishSteps(StackState::kNoHeapPointers);
EXPECT_TRUE(header->IsMarked()); EXPECT_TRUE(header->IsMarked());
FinishMarking(); FinishMarking();
} }
...@@ -502,7 +488,7 @@ TEST_F(IncrementalMarkingTest, IncrementalStepDuringAllocation) { ...@@ -502,7 +488,7 @@ TEST_F(IncrementalMarkingTest, IncrementalStepDuringAllocation) {
TEST_F(IncrementalMarkingTest, MarkingRunsOutOfWorkEventually) { TEST_F(IncrementalMarkingTest, MarkingRunsOutOfWorkEventually) {
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
IncrementalPreciseMarkingConfig); IncrementalPreciseMarkingConfig);
FinishSteps(MarkingConfig::StackState::kNoHeapPointers); FinishSteps(StackState::kNoHeapPointers);
FinishMarking(); FinishMarking();
} }
......
...@@ -20,12 +20,10 @@ namespace { ...@@ -20,12 +20,10 @@ namespace {
class MarkingVerifierTest : public testing::TestWithHeap { class MarkingVerifierTest : public testing::TestWithHeap {
public: public:
using StackState = Heap::Config::StackState;
V8_NOINLINE void VerifyMarking(HeapBase& heap, StackState stack_state, V8_NOINLINE void VerifyMarking(HeapBase& heap, StackState stack_state,
size_t expected_marked_bytes) { size_t expected_marked_bytes) {
Heap::From(GetHeap())->object_allocator().ResetLinearAllocationBuffers(); Heap::From(GetHeap())->object_allocator().ResetLinearAllocationBuffers();
MarkingVerifier verifier(heap, Heap::Config::CollectionType::kMajor); MarkingVerifier verifier(heap, CollectionType::kMajor);
verifier.Run(stack_state, v8::base::Stack::GetCurrentStackPosition(), verifier.Run(stack_state, v8::base::Stack::GetCurrentStackPosition(),
expected_marked_bytes); expected_marked_bytes);
} }
...@@ -140,16 +138,14 @@ TEST_F(MarkingVerifierTest, DoesntDieOnInConstructionObjectWithWriteBarrier) { ...@@ -140,16 +138,14 @@ TEST_F(MarkingVerifierTest, DoesntDieOnInConstructionObjectWithWriteBarrier) {
Persistent<Holder<GCedWithCallbackAndChild>> persistent = Persistent<Holder<GCedWithCallbackAndChild>> persistent =
MakeGarbageCollected<Holder<GCedWithCallbackAndChild>>( MakeGarbageCollected<Holder<GCedWithCallbackAndChild>>(
GetAllocationHandle()); GetAllocationHandle());
GarbageCollector::Config config = GCConfig config = GCConfig::PreciseIncrementalConfig();
GarbageCollector::Config::PreciseIncrementalConfig();
Heap::From(GetHeap())->StartIncrementalGarbageCollection(config); Heap::From(GetHeap())->StartIncrementalGarbageCollection(config);
MakeGarbageCollected<GCedWithCallbackAndChild>( MakeGarbageCollected<GCedWithCallbackAndChild>(
GetAllocationHandle(), MakeGarbageCollected<GCed>(GetAllocationHandle()), GetAllocationHandle(), MakeGarbageCollected<GCed>(GetAllocationHandle()),
[&persistent](GCedWithCallbackAndChild* obj) { [&persistent](GCedWithCallbackAndChild* obj) {
persistent->object = obj; persistent->object = obj;
}); });
GetMarkerRef()->IncrementalMarkingStepForTesting( GetMarkerRef()->IncrementalMarkingStepForTesting(StackState::kNoHeapPointers);
GarbageCollector::Config::StackState::kNoHeapPointers);
Heap::From(GetHeap())->FinalizeIncrementalGarbageCollectionIfRunning(config); Heap::From(GetHeap())->FinalizeIncrementalGarbageCollectionIfRunning(config);
} }
......
...@@ -51,15 +51,13 @@ class MetricRecorderTest : public testing::TestWithHeap { ...@@ -51,15 +51,13 @@ class MetricRecorderTest : public testing::TestWithHeap {
} }
void StartGC() { void StartGC() {
stats->NotifyMarkingStarted( stats->NotifyMarkingStarted(CollectionType::kMajor,
GarbageCollector::Config::CollectionType::kMajor, GCConfig::MarkingType::kIncremental,
GarbageCollector::Config::MarkingType::kIncremental, GCConfig::IsForcedGC::kNotForced);
GarbageCollector::Config::IsForcedGC::kNotForced);
} }
void EndGC(size_t marked_bytes) { void EndGC(size_t marked_bytes) {
stats->NotifyMarkingCompleted(marked_bytes); stats->NotifyMarkingCompleted(marked_bytes);
stats->NotifySweepingCompleted( stats->NotifySweepingCompleted(GCConfig::SweepingType::kIncremental);
GarbageCollector::Config::SweepingType::kIncremental);
} }
StatsCollector* stats; StatsCollector* stats;
...@@ -308,8 +306,7 @@ TEST_F(MetricRecorderTest, ObjectSizeMetricsWithAllocations) { ...@@ -308,8 +306,7 @@ TEST_F(MetricRecorderTest, ObjectSizeMetricsWithAllocations) {
stats->NotifyAllocation(150); stats->NotifyAllocation(150);
stats->NotifyAllocatedMemory(1000); stats->NotifyAllocatedMemory(1000);
stats->NotifyFreedMemory(400); stats->NotifyFreedMemory(400);
stats->NotifySweepingCompleted( stats->NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
GarbageCollector::Config::SweepingType::kAtomic);
EXPECT_EQ(1300u, MetricRecorderImpl::GCCycle_event.objects.before_bytes); EXPECT_EQ(1300u, MetricRecorderImpl::GCCycle_event.objects.before_bytes);
EXPECT_EQ(800, MetricRecorderImpl::GCCycle_event.objects.after_bytes); EXPECT_EQ(800, MetricRecorderImpl::GCCycle_event.objects.after_bytes);
EXPECT_EQ(500u, MetricRecorderImpl::GCCycle_event.objects.freed_bytes); EXPECT_EQ(500u, MetricRecorderImpl::GCCycle_event.objects.freed_bytes);
......
...@@ -107,12 +107,11 @@ class MinorGCTest : public testing::TestWithHeap { ...@@ -107,12 +107,11 @@ class MinorGCTest : public testing::TestWithHeap {
} }
void CollectMinor() { void CollectMinor() {
Heap::From(GetHeap())->CollectGarbage( Heap::From(GetHeap())->CollectGarbage(GCConfig::MinorPreciseAtomicConfig());
Heap::Config::MinorPreciseAtomicConfig());
} }
void CollectMajor() { void CollectMajor() {
Heap::From(GetHeap())->CollectGarbage(Heap::Config::PreciseAtomicConfig()); Heap::From(GetHeap())->CollectGarbage(GCConfig::PreciseAtomicConfig());
} }
const auto& RememberedSlots() const { const auto& RememberedSlots() const {
......
...@@ -79,7 +79,7 @@ class V8_NODISCARD CppgcTracingScopesTest : public testing::TestWithHeap { ...@@ -79,7 +79,7 @@ class V8_NODISCARD CppgcTracingScopesTest : public testing::TestWithHeap {
GetMarkerRef()->FinishMarking(Config::StackState::kNoHeapPointers); GetMarkerRef()->FinishMarking(Config::StackState::kNoHeapPointers);
GetMarkerRef().reset(); GetMarkerRef().reset();
Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted( Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted(
GarbageCollector::Config::SweepingType::kAtomic); GCConfig::SweepingType::kAtomic);
} }
void ResetDelegatingTracingController(const char* expected_name = nullptr) { void ResetDelegatingTracingController(const char* expected_name = nullptr) {
...@@ -228,13 +228,11 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) { ...@@ -228,13 +228,11 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) {
TEST_F(CppgcTracingScopesTest, InitalScopesAreZero) { TEST_F(CppgcTracingScopesTest, InitalScopesAreZero) {
StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector(); StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector();
stats_collector->NotifyMarkingStarted( stats_collector->NotifyMarkingStarted(GCConfig::CollectionType::kMajor,
GarbageCollector::Config::CollectionType::kMajor, GCConfig::MarkingType::kAtomic,
GarbageCollector::Config::MarkingType::kAtomic, GCConfig::IsForcedGC::kNotForced);
GarbageCollector::Config::IsForcedGC::kNotForced);
stats_collector->NotifyMarkingCompleted(0); stats_collector->NotifyMarkingCompleted(0);
stats_collector->NotifySweepingCompleted( stats_collector->NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
GarbageCollector::Config::SweepingType::kAtomic);
const StatsCollector::Event& event = const StatsCollector::Event& event =
stats_collector->GetPreviousEventForTesting(); stats_collector->GetPreviousEventForTesting();
for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) { for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) {
...@@ -249,10 +247,9 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) { ...@@ -249,10 +247,9 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) {
for (int scope_id = 0; scope_id < StatsCollector::kNumHistogramScopeIds; for (int scope_id = 0; scope_id < StatsCollector::kNumHistogramScopeIds;
++scope_id) { ++scope_id) {
StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector(); StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector();
stats_collector->NotifyMarkingStarted( stats_collector->NotifyMarkingStarted(GCConfig::CollectionType::kMajor,
GarbageCollector::Config::CollectionType::kMajor, GCConfig::MarkingType::kIncremental,
GarbageCollector::Config::MarkingType::kIncremental, GCConfig::IsForcedGC::kNotForced);
GarbageCollector::Config::IsForcedGC::kNotForced);
DelegatingTracingControllerImpl::check_expectations = false; DelegatingTracingControllerImpl::check_expectations = false;
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
...@@ -265,7 +262,7 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) { ...@@ -265,7 +262,7 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) {
} }
stats_collector->NotifyMarkingCompleted(0); stats_collector->NotifyMarkingCompleted(0);
stats_collector->NotifySweepingCompleted( stats_collector->NotifySweepingCompleted(
GarbageCollector::Config::SweepingType::kIncremental); GCConfig::SweepingType::kIncremental);
const StatsCollector::Event& event = const StatsCollector::Event& event =
stats_collector->GetPreviousEventForTesting(); stats_collector->GetPreviousEventForTesting();
for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) { for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) {
...@@ -284,10 +281,9 @@ TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) { ...@@ -284,10 +281,9 @@ TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) {
for (int scope_id = 0; for (int scope_id = 0;
scope_id < StatsCollector::kNumHistogramConcurrentScopeIds; ++scope_id) { scope_id < StatsCollector::kNumHistogramConcurrentScopeIds; ++scope_id) {
StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector(); StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector();
stats_collector->NotifyMarkingStarted( stats_collector->NotifyMarkingStarted(GCConfig::CollectionType::kMajor,
GarbageCollector::Config::CollectionType::kMajor, GCConfig::MarkingType::kAtomic,
GarbageCollector::Config::MarkingType::kAtomic, GCConfig::IsForcedGC::kNotForced);
GarbageCollector::Config::IsForcedGC::kNotForced);
DelegatingTracingControllerImpl::check_expectations = false; DelegatingTracingControllerImpl::check_expectations = false;
{ {
StatsCollector::EnabledConcurrentScope scope( StatsCollector::EnabledConcurrentScope scope(
...@@ -299,8 +295,7 @@ TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) { ...@@ -299,8 +295,7 @@ TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) {
} }
} }
stats_collector->NotifyMarkingCompleted(0); stats_collector->NotifyMarkingCompleted(0);
stats_collector->NotifySweepingCompleted( stats_collector->NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
GarbageCollector::Config::SweepingType::kAtomic);
const StatsCollector::Event& event = const StatsCollector::Event& event =
stats_collector->GetPreviousEventForTesting(); stats_collector->GetPreviousEventForTesting();
for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) { for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) {
......
...@@ -38,23 +38,21 @@ class StatsCollectorTest : public ::testing::Test { ...@@ -38,23 +38,21 @@ class StatsCollectorTest : public ::testing::Test {
} // namespace } // namespace
TEST_F(StatsCollectorTest, NoMarkedBytes) { TEST_F(StatsCollectorTest, NoMarkedBytes) {
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor, stats.NotifyMarkingStarted(CollectionType::kMajor,
GarbageCollector::Config::MarkingType::kAtomic, GCConfig::MarkingType::kAtomic,
GarbageCollector::Config::IsForcedGC::kNotForced); GCConfig::IsForcedGC::kNotForced);
stats.NotifyMarkingCompleted(kNoMarkedBytes); stats.NotifyMarkingCompleted(kNoMarkedBytes);
stats.NotifySweepingCompleted( stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
GarbageCollector::Config::SweepingType::kAtomic);
auto event = stats.GetPreviousEventForTesting(); auto event = stats.GetPreviousEventForTesting();
EXPECT_EQ(0u, event.marked_bytes); EXPECT_EQ(0u, event.marked_bytes);
} }
TEST_F(StatsCollectorTest, EventPrevGCMarkedObjectSize) { TEST_F(StatsCollectorTest, EventPrevGCMarkedObjectSize) {
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor, stats.NotifyMarkingStarted(CollectionType::kMajor,
GarbageCollector::Config::MarkingType::kAtomic, GCConfig::MarkingType::kAtomic,
GarbageCollector::Config::IsForcedGC::kNotForced); GCConfig::IsForcedGC::kNotForced);
stats.NotifyMarkingCompleted(1024); stats.NotifyMarkingCompleted(1024);
stats.NotifySweepingCompleted( stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
GarbageCollector::Config::SweepingType::kAtomic);
auto event = stats.GetPreviousEventForTesting(); auto event = stats.GetPreviousEventForTesting();
EXPECT_EQ(1024u, event.marked_bytes); EXPECT_EQ(1024u, event.marked_bytes);
} }
...@@ -74,54 +72,50 @@ TEST_F(StatsCollectorTest, AlllocationReportAboveAllocationThresholdBytes) { ...@@ -74,54 +72,50 @@ TEST_F(StatsCollectorTest, AlllocationReportAboveAllocationThresholdBytes) {
} }
TEST_F(StatsCollectorTest, InitialAllocatedObjectSize) { TEST_F(StatsCollectorTest, InitialAllocatedObjectSize) {
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor, stats.NotifyMarkingStarted(CollectionType::kMajor,
GarbageCollector::Config::MarkingType::kAtomic, GCConfig::MarkingType::kAtomic,
GarbageCollector::Config::IsForcedGC::kNotForced); GCConfig::IsForcedGC::kNotForced);
EXPECT_EQ(0u, stats.allocated_object_size()); EXPECT_EQ(0u, stats.allocated_object_size());
stats.NotifyMarkingCompleted(kNoMarkedBytes); stats.NotifyMarkingCompleted(kNoMarkedBytes);
EXPECT_EQ(0u, stats.allocated_object_size()); EXPECT_EQ(0u, stats.allocated_object_size());
stats.NotifySweepingCompleted( stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
GarbageCollector::Config::SweepingType::kAtomic);
EXPECT_EQ(0u, stats.allocated_object_size()); EXPECT_EQ(0u, stats.allocated_object_size());
} }
TEST_F(StatsCollectorTest, AllocatedObjectSize) { TEST_F(StatsCollectorTest, AllocatedObjectSize) {
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor, stats.NotifyMarkingStarted(CollectionType::kMajor,
GarbageCollector::Config::MarkingType::kAtomic, GCConfig::MarkingType::kAtomic,
GarbageCollector::Config::IsForcedGC::kNotForced); GCConfig::IsForcedGC::kNotForced);
FakeAllocate(kMinReportedSize); FakeAllocate(kMinReportedSize);
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size()); EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
stats.NotifyMarkingCompleted(kMinReportedSize); stats.NotifyMarkingCompleted(kMinReportedSize);
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size()); EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
stats.NotifySweepingCompleted( stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
GarbageCollector::Config::SweepingType::kAtomic);
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size()); EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
} }
TEST_F(StatsCollectorTest, AllocatedObjectSizeNoMarkedBytes) { TEST_F(StatsCollectorTest, AllocatedObjectSizeNoMarkedBytes) {
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor, stats.NotifyMarkingStarted(CollectionType::kMajor,
GarbageCollector::Config::MarkingType::kAtomic, GCConfig::MarkingType::kAtomic,
GarbageCollector::Config::IsForcedGC::kNotForced); GCConfig::IsForcedGC::kNotForced);
FakeAllocate(kMinReportedSize); FakeAllocate(kMinReportedSize);
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size()); EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
stats.NotifyMarkingCompleted(kNoMarkedBytes); stats.NotifyMarkingCompleted(kNoMarkedBytes);
EXPECT_EQ(0u, stats.allocated_object_size()); EXPECT_EQ(0u, stats.allocated_object_size());
stats.NotifySweepingCompleted( stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
GarbageCollector::Config::SweepingType::kAtomic);
EXPECT_EQ(0u, stats.allocated_object_size()); EXPECT_EQ(0u, stats.allocated_object_size());
} }
TEST_F(StatsCollectorTest, AllocatedObjectSizeAllocateAfterMarking) { TEST_F(StatsCollectorTest, AllocatedObjectSizeAllocateAfterMarking) {
stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor, stats.NotifyMarkingStarted(CollectionType::kMajor,
GarbageCollector::Config::MarkingType::kAtomic, GCConfig::MarkingType::kAtomic,
GarbageCollector::Config::IsForcedGC::kNotForced); GCConfig::IsForcedGC::kNotForced);
FakeAllocate(kMinReportedSize); FakeAllocate(kMinReportedSize);
EXPECT_EQ(kMinReportedSize, stats.allocated_object_size()); EXPECT_EQ(kMinReportedSize, stats.allocated_object_size());
stats.NotifyMarkingCompleted(kMinReportedSize); stats.NotifyMarkingCompleted(kMinReportedSize);
FakeAllocate(kMinReportedSize); FakeAllocate(kMinReportedSize);
EXPECT_EQ(2 * kMinReportedSize, stats.allocated_object_size()); EXPECT_EQ(2 * kMinReportedSize, stats.allocated_object_size());
stats.NotifySweepingCompleted( stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
GarbageCollector::Config::SweepingType::kAtomic);
EXPECT_EQ(2 * kMinReportedSize, stats.allocated_object_size()); EXPECT_EQ(2 * kMinReportedSize, stats.allocated_object_size());
} }
...@@ -153,12 +147,11 @@ TEST_F(StatsCollectorTest, ObserveAllocatedObjectSizeIncreaseAndDecrease) { ...@@ -153,12 +147,11 @@ TEST_F(StatsCollectorTest, ObserveAllocatedObjectSizeIncreaseAndDecrease) {
namespace { namespace {
void FakeGC(StatsCollector* stats, size_t marked_bytes) { void FakeGC(StatsCollector* stats, size_t marked_bytes) {
stats->NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor, stats->NotifyMarkingStarted(CollectionType::kMajor,
GarbageCollector::Config::MarkingType::kAtomic, GCConfig::MarkingType::kAtomic,
GarbageCollector::Config::IsForcedGC::kNotForced); GCConfig::IsForcedGC::kNotForced);
stats->NotifyMarkingCompleted(marked_bytes); stats->NotifyMarkingCompleted(marked_bytes);
stats->NotifySweepingCompleted( stats->NotifySweepingCompleted(GCConfig::SweepingType::kAtomic);
GarbageCollector::Config::SweepingType::kAtomic);
} }
} // namespace } // namespace
......
...@@ -48,9 +48,8 @@ class SweeperTest : public testing::TestWithHeap { ...@@ -48,9 +48,8 @@ class SweeperTest : public testing::TestWithHeap {
// Pretend do finish marking as StatsCollector verifies that Notify* // Pretend do finish marking as StatsCollector verifies that Notify*
// methods are called in the right order. // methods are called in the right order.
heap->stats_collector()->NotifyMarkingStarted( heap->stats_collector()->NotifyMarkingStarted(
GarbageCollector::Config::CollectionType::kMajor, CollectionType::kMajor, GCConfig::MarkingType::kAtomic,
GarbageCollector::Config::MarkingType::kAtomic, GCConfig::IsForcedGC::kNotForced);
GarbageCollector::Config::IsForcedGC::kNotForced);
heap->stats_collector()->NotifyMarkingCompleted(0); heap->stats_collector()->NotifyMarkingCompleted(0);
const SweepingConfig sweeping_config{ const SweepingConfig sweeping_config{
SweepingConfig::SweepingType::kAtomic, SweepingConfig::SweepingType::kAtomic,
...@@ -226,8 +225,7 @@ class GCInDestructor final : public GarbageCollected<GCInDestructor> { ...@@ -226,8 +225,7 @@ class GCInDestructor final : public GarbageCollected<GCInDestructor> {
~GCInDestructor() { ~GCInDestructor() {
// Instead of directly calling GC, allocations should be supported here as // Instead of directly calling GC, allocations should be supported here as
// well. // well.
heap_->CollectGarbage( heap_->CollectGarbage(internal::GCConfig::ConservativeAtomicConfig());
internal::GarbageCollector::Config::ConservativeAtomicConfig());
} }
void Trace(Visitor*) const {} void Trace(Visitor*) const {}
...@@ -299,11 +297,10 @@ TEST_F(SweeperTest, LazySweepingDuringAllocation) { ...@@ -299,11 +297,10 @@ TEST_F(SweeperTest, LazySweepingDuringAllocation) {
testing::TestPlatform::DisableBackgroundTasksScope no_concurrent_sweep_scope( testing::TestPlatform::DisableBackgroundTasksScope no_concurrent_sweep_scope(
GetPlatformHandle().get()); GetPlatformHandle().get());
g_destructor_callcount = 0; g_destructor_callcount = 0;
static constexpr Heap::Config config = { static constexpr GCConfig config = {
Heap::Config::CollectionType::kMajor, CollectionType::kMajor, StackState::kNoHeapPointers,
Heap::Config::StackState::kNoHeapPointers, GCConfig::MarkingType::kAtomic,
Heap::Config::MarkingType::kAtomic, GCConfig::SweepingType::kIncrementalAndConcurrent};
Heap::Config::SweepingType::kIncrementalAndConcurrent};
Heap::From(GetHeap())->CollectGarbage(config); Heap::From(GetHeap())->CollectGarbage(config);
// Incremental sweeping is active and the space should have two pages with // Incremental sweeping is active and the space should have two pages with
// no room for an additional GCedObject. Allocating a new GCedObject should // no room for an additional GCedObject. Allocating a new GCedObject should
...@@ -334,14 +331,13 @@ TEST_F(SweeperTest, LazySweepingNormalPages) { ...@@ -334,14 +331,13 @@ TEST_F(SweeperTest, LazySweepingNormalPages) {
PreciseGC(); PreciseGC();
EXPECT_EQ(0u, g_destructor_callcount); EXPECT_EQ(0u, g_destructor_callcount);
MakeGarbageCollected<GCedObject>(GetAllocationHandle()); MakeGarbageCollected<GCedObject>(GetAllocationHandle());
static constexpr Heap::Config config = { static constexpr GCConfig config = {
Heap::Config::CollectionType::kMajor, CollectionType::kMajor, StackState::kNoHeapPointers,
Heap::Config::StackState::kNoHeapPointers, GCConfig::MarkingType::kAtomic,
Heap::Config::MarkingType::kAtomic,
// Sweeping type must not include concurrent as that could lead to the // Sweeping type must not include concurrent as that could lead to the
// concurrent sweeper holding onto pages in rare cases which delays // concurrent sweeper holding onto pages in rare cases which delays
// reclamation of objects. // reclamation of objects.
Heap::Config::SweepingType::kIncremental}; GCConfig::SweepingType::kIncremental};
Heap::From(GetHeap())->CollectGarbage(config); Heap::From(GetHeap())->CollectGarbage(config);
EXPECT_EQ(0u, g_destructor_callcount); EXPECT_EQ(0u, g_destructor_callcount);
MakeGarbageCollected<GCedObject>(GetAllocationHandle()); MakeGarbageCollected<GCedObject>(GetAllocationHandle());
...@@ -442,10 +438,9 @@ TEST_F(SweeperTest, CrossThreadPersistentCanBeClearedFromOtherThread) { ...@@ -442,10 +438,9 @@ TEST_F(SweeperTest, CrossThreadPersistentCanBeClearedFromOtherThread) {
testing::TestPlatform::DisableBackgroundTasksScope no_concurrent_sweep_scope( testing::TestPlatform::DisableBackgroundTasksScope no_concurrent_sweep_scope(
GetPlatformHandle().get()); GetPlatformHandle().get());
Heap::From(GetHeap())->CollectGarbage( Heap::From(GetHeap())->CollectGarbage(
{Heap::Config::CollectionType::kMajor, {CollectionType::kMajor, StackState::kNoHeapPointers,
Heap::Config::StackState::kNoHeapPointers, GCConfig::MarkingType::kAtomic,
Heap::Config::MarkingType::kAtomic, GCConfig::SweepingType::kIncrementalAndConcurrent});
Heap::Config::SweepingType::kIncrementalAndConcurrent});
// `holder` is unreachable (as the stack is not scanned) and will be // `holder` is unreachable (as the stack is not scanned) and will be
// reclaimed. Its payload memory is generally poisoned at this point. The // reclaimed. Its payload memory is generally poisoned at this point. The
// CrossThreadPersistent slot should be unpoisoned. // CrossThreadPersistent slot should be unpoisoned.
...@@ -470,11 +465,10 @@ TEST_F(SweeperTest, WeakCrossThreadPersistentCanBeClearedFromOtherThread) { ...@@ -470,11 +465,10 @@ TEST_F(SweeperTest, WeakCrossThreadPersistentCanBeClearedFromOtherThread) {
testing::TestPlatform::DisableBackgroundTasksScope no_concurrent_sweep_scope( testing::TestPlatform::DisableBackgroundTasksScope no_concurrent_sweep_scope(
GetPlatformHandle().get()); GetPlatformHandle().get());
static constexpr Heap::Config config = { static constexpr GCConfig config = {
Heap::Config::CollectionType::kMajor, CollectionType::kMajor, StackState::kNoHeapPointers,
Heap::Config::StackState::kNoHeapPointers, GCConfig::MarkingType::kAtomic,
Heap::Config::MarkingType::kAtomic, GCConfig::SweepingType::kIncrementalAndConcurrent};
Heap::Config::SweepingType::kIncrementalAndConcurrent};
Heap::From(GetHeap())->CollectGarbage(config); Heap::From(GetHeap())->CollectGarbage(config);
// `holder` is unreachable (as the stack is not scanned) and will be // `holder` is unreachable (as the stack is not scanned) and will be
// reclaimed. Its payload memory is generally poisoned at this point. The // reclaimed. Its payload memory is generally poisoned at this point. The
...@@ -483,10 +477,9 @@ TEST_F(SweeperTest, WeakCrossThreadPersistentCanBeClearedFromOtherThread) { ...@@ -483,10 +477,9 @@ TEST_F(SweeperTest, WeakCrossThreadPersistentCanBeClearedFromOtherThread) {
// GC in the remote heap should also clear `holder->weak_ref`. The slot for // GC in the remote heap should also clear `holder->weak_ref`. The slot for
// `weak_ref` should be unpoisoned by the GC. // `weak_ref` should be unpoisoned by the GC.
Heap::From(remote_heap.get()) Heap::From(remote_heap.get())
->CollectGarbage({Heap::Config::CollectionType::kMajor, ->CollectGarbage({CollectionType::kMajor, StackState::kNoHeapPointers,
Heap::Config::StackState::kNoHeapPointers, GCConfig::MarkingType::kAtomic,
Heap::Config::MarkingType::kAtomic, GCConfig::SweepingType::kAtomic});
Heap::Config::SweepingType::kAtomic});
// Finish the sweeper which will find the CrossThreadPersistent in cleared // Finish the sweeper which will find the CrossThreadPersistent in cleared
// state. // state.
......
...@@ -28,7 +28,7 @@ TEST_F(TestingTest, ...@@ -28,7 +28,7 @@ TEST_F(TestingTest,
auto* gced = MakeGarbageCollected<GCed>(GetHeap()->GetAllocationHandle()); auto* gced = MakeGarbageCollected<GCed>(GetHeap()->GetAllocationHandle());
WeakPersistent<GCed> weak{gced}; WeakPersistent<GCed> weak{gced};
internal::Heap::From(GetHeap())->CollectGarbage( internal::Heap::From(GetHeap())->CollectGarbage(
Heap::Config::PreciseAtomicConfig()); GCConfig::PreciseAtomicConfig());
EXPECT_FALSE(weak); EXPECT_FALSE(weak);
} }
{ {
...@@ -38,7 +38,7 @@ TEST_F(TestingTest, ...@@ -38,7 +38,7 @@ TEST_F(TestingTest,
GetHeap()->GetHeapHandle(), GetHeap()->GetHeapHandle(),
EmbedderStackState::kMayContainHeapPointers); EmbedderStackState::kMayContainHeapPointers);
internal::Heap::From(GetHeap())->CollectGarbage( internal::Heap::From(GetHeap())->CollectGarbage(
Heap::Config::PreciseAtomicConfig()); GCConfig::PreciseAtomicConfig());
EXPECT_FALSE(weak); EXPECT_FALSE(weak);
} }
{ {
...@@ -47,7 +47,7 @@ TEST_F(TestingTest, ...@@ -47,7 +47,7 @@ TEST_F(TestingTest,
cppgc::testing::OverrideEmbedderStackStateScope override_stack( cppgc::testing::OverrideEmbedderStackStateScope override_stack(
GetHeap()->GetHeapHandle(), EmbedderStackState::kNoHeapPointers); GetHeap()->GetHeapHandle(), EmbedderStackState::kNoHeapPointers);
internal::Heap::From(GetHeap())->CollectGarbage( internal::Heap::From(GetHeap())->CollectGarbage(
Heap::Config::ConservativeAtomicConfig()); GCConfig::ConservativeAtomicConfig());
EXPECT_TRUE(weak); EXPECT_TRUE(weak);
} }
} }
......
...@@ -87,10 +87,9 @@ class TestWithHeap : public TestWithPlatform { ...@@ -87,10 +87,9 @@ class TestWithHeap : public TestWithPlatform {
// size of the heap and corresponding pages. // size of the heap and corresponding pages.
void ConservativeMemoryDiscardingGC() { void ConservativeMemoryDiscardingGC() {
internal::Heap::From(GetHeap())->CollectGarbage( internal::Heap::From(GetHeap())->CollectGarbage(
{GarbageCollector::Config::CollectionType::kMajor, {CollectionType::kMajor, Heap::StackState::kMayContainHeapPointers,
Heap::StackState::kMayContainHeapPointers,
cppgc::Heap::MarkingType::kAtomic, cppgc::Heap::SweepingType::kAtomic, cppgc::Heap::MarkingType::kAtomic, cppgc::Heap::SweepingType::kAtomic,
GarbageCollector::Config::FreeMemoryHandling::kDiscardWherePossible}); GCConfig::FreeMemoryHandling::kDiscardWherePossible});
} }
cppgc::Heap* GetHeap() const { return heap_.get(); } cppgc::Heap* GetHeap() const { return heap_.get(); }
......
...@@ -18,26 +18,23 @@ namespace internal { ...@@ -18,26 +18,23 @@ namespace internal {
namespace { namespace {
class WeakContainerTest : public testing::TestWithHeap { class WeakContainerTest : public testing::TestWithHeap {
public: public:
using Config = Marker::MarkingConfig;
void StartMarking() { void StartMarking() {
CHECK_EQ(0u, CHECK_EQ(0u,
Heap::From(GetHeap())->AsBase().stats_collector()->marked_bytes()); Heap::From(GetHeap())->AsBase().stats_collector()->marked_bytes());
Config config = {Config::CollectionType::kMajor, MarkingConfig config = {CollectionType::kMajor, StackState::kNoHeapPointers,
Config::StackState::kNoHeapPointers, MarkingConfig::MarkingType::kIncremental};
Config::MarkingType::kIncremental};
GetMarkerRef() = std::make_unique<Marker>( GetMarkerRef() = std::make_unique<Marker>(
Heap::From(GetHeap())->AsBase(), GetPlatformHandle().get(), config); Heap::From(GetHeap())->AsBase(), GetPlatformHandle().get(), config);
GetMarkerRef()->StartMarking(); GetMarkerRef()->StartMarking();
} }
void FinishMarking(Config::StackState stack_state) { void FinishMarking(StackState stack_state) {
GetMarkerRef()->FinishMarking(stack_state); GetMarkerRef()->FinishMarking(stack_state);
marked_bytes_ = marked_bytes_ =
Heap::From(GetHeap())->AsBase().stats_collector()->marked_bytes(); Heap::From(GetHeap())->AsBase().stats_collector()->marked_bytes();
GetMarkerRef().reset(); GetMarkerRef().reset();
Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted( Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted(
GarbageCollector::Config::SweepingType::kAtomic); GCConfig::SweepingType::kAtomic);
} }
size_t GetMarkedBytes() const { return marked_bytes_; } size_t GetMarkedBytes() const { return marked_bytes_; }
...@@ -96,7 +93,7 @@ TEST_F(WeakContainerTest, TraceableGCedTraced) { ...@@ -96,7 +93,7 @@ TEST_F(WeakContainerTest, TraceableGCedTraced) {
obj->n_trace_calls = 0u; obj->n_trace_calls = 0u;
StartMarking(); StartMarking();
GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr); GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr);
FinishMarking(Config::StackState::kNoHeapPointers); FinishMarking(StackState::kNoHeapPointers);
EXPECT_NE(0u, obj->n_trace_calls); EXPECT_NE(0u, obj->n_trace_calls);
EXPECT_EQ(SizeOf<TraceableGCed>(), GetMarkedBytes()); EXPECT_EQ(SizeOf<TraceableGCed>(), GetMarkedBytes());
} }
...@@ -107,7 +104,7 @@ TEST_F(WeakContainerTest, NonTraceableGCedNotTraced) { ...@@ -107,7 +104,7 @@ TEST_F(WeakContainerTest, NonTraceableGCedNotTraced) {
obj->n_trace_calls = 0u; obj->n_trace_calls = 0u;
StartMarking(); StartMarking();
GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr); GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr);
FinishMarking(Config::StackState::kNoHeapPointers); FinishMarking(StackState::kNoHeapPointers);
EXPECT_EQ(0u, obj->n_trace_calls); EXPECT_EQ(0u, obj->n_trace_calls);
EXPECT_EQ(SizeOf<NonTraceableGCed>(), GetMarkedBytes()); EXPECT_EQ(SizeOf<NonTraceableGCed>(), GetMarkedBytes());
} }
...@@ -118,7 +115,7 @@ TEST_F(WeakContainerTest, NonTraceableGCedNotTracedConservatively) { ...@@ -118,7 +115,7 @@ TEST_F(WeakContainerTest, NonTraceableGCedNotTracedConservatively) {
obj->n_trace_calls = 0u; obj->n_trace_calls = 0u;
StartMarking(); StartMarking();
GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr); GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr);
FinishMarking(Config::StackState::kMayContainHeapPointers); FinishMarking(StackState::kMayContainHeapPointers);
EXPECT_NE(0u, obj->n_trace_calls); EXPECT_NE(0u, obj->n_trace_calls);
EXPECT_EQ(SizeOf<NonTraceableGCed>(), GetMarkedBytes()); EXPECT_EQ(SizeOf<NonTraceableGCed>(), GetMarkedBytes());
} }
...@@ -129,7 +126,7 @@ TEST_F(WeakContainerTest, PreciseGCTracesWeakContainerWhenTraced) { ...@@ -129,7 +126,7 @@ TEST_F(WeakContainerTest, PreciseGCTracesWeakContainerWhenTraced) {
obj->n_trace_calls = 0u; obj->n_trace_calls = 0u;
StartMarking(); StartMarking();
GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr); GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr);
FinishMarking(Config::StackState::kNoHeapPointers); FinishMarking(StackState::kNoHeapPointers);
EXPECT_EQ(1u, obj->n_trace_calls); EXPECT_EQ(1u, obj->n_trace_calls);
EXPECT_EQ(SizeOf<TraceableGCed>(), GetMarkedBytes()); EXPECT_EQ(SizeOf<TraceableGCed>(), GetMarkedBytes());
} }
...@@ -140,7 +137,7 @@ TEST_F(WeakContainerTest, ConservativeGCTracesWeakContainer) { ...@@ -140,7 +137,7 @@ TEST_F(WeakContainerTest, ConservativeGCTracesWeakContainer) {
obj->n_trace_calls = 0u; obj->n_trace_calls = 0u;
StartMarking(); StartMarking();
GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr); GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr);
FinishMarking(Config::StackState::kMayContainHeapPointers); FinishMarking(StackState::kMayContainHeapPointers);
EXPECT_EQ(2u, obj->n_trace_calls); EXPECT_EQ(2u, obj->n_trace_calls);
EXPECT_EQ(SizeOf<TraceableGCed>(), GetMarkedBytes()); EXPECT_EQ(SizeOf<TraceableGCed>(), GetMarkedBytes());
} }
...@@ -155,7 +152,7 @@ TEST_F(WeakContainerTest, ConservativeGCTracesWeakContainerOnce) { ...@@ -155,7 +152,7 @@ TEST_F(WeakContainerTest, ConservativeGCTracesWeakContainerOnce) {
obj->n_trace_calls = 0u; obj->n_trace_calls = 0u;
StartMarking(); StartMarking();
GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr); GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr);
FinishMarking(Config::StackState::kMayContainHeapPointers); FinishMarking(StackState::kMayContainHeapPointers);
EXPECT_EQ(1u, obj->n_trace_calls); EXPECT_EQ(1u, obj->n_trace_calls);
EXPECT_EQ(SizeOf<NonTraceableGCed>(), GetMarkedBytes()); EXPECT_EQ(SizeOf<NonTraceableGCed>(), GetMarkedBytes());
} }
...@@ -183,7 +180,7 @@ TEST_F(WeakContainerTest, WeakContainerWeakCallbackCalled) { ...@@ -183,7 +180,7 @@ TEST_F(WeakContainerTest, WeakContainerWeakCallbackCalled) {
StartMarking(); StartMarking();
GetMarkerRef()->Visitor().TraceWeakContainer(obj, WeakCallback::callback, GetMarkerRef()->Visitor().TraceWeakContainer(obj, WeakCallback::callback,
obj); obj);
FinishMarking(Config::StackState::kMayContainHeapPointers); FinishMarking(StackState::kMayContainHeapPointers);
EXPECT_NE(0u, WeakCallback::n_callback_called); EXPECT_NE(0u, WeakCallback::n_callback_called);
EXPECT_EQ(SizeOf<TraceableGCed>(), GetMarkedBytes()); EXPECT_EQ(SizeOf<TraceableGCed>(), GetMarkedBytes());
EXPECT_EQ(obj, WeakCallback::obj); EXPECT_EQ(obj, WeakCallback::obj);
......
...@@ -26,11 +26,11 @@ class WorkloadsTest : public testing::TestWithHeap { ...@@ -26,11 +26,11 @@ class WorkloadsTest : public testing::TestWithHeap {
public: public:
void ConservativeGC() { void ConservativeGC() {
internal::Heap::From(GetHeap())->CollectGarbage( internal::Heap::From(GetHeap())->CollectGarbage(
Heap::Config::ConservativeAtomicConfig()); GCConfig::ConservativeAtomicConfig());
} }
void PreciseGC() { void PreciseGC() {
internal::Heap::From(GetHeap())->CollectGarbage( internal::Heap::From(GetHeap())->CollectGarbage(
Heap::Config::PreciseAtomicConfig()); GCConfig::PreciseAtomicConfig());
} }
}; };
......
...@@ -29,16 +29,15 @@ class V8_NODISCARD IncrementalMarkingScope { ...@@ -29,16 +29,15 @@ class V8_NODISCARD IncrementalMarkingScope {
marker_->FinishMarking(kIncrementalConfig.stack_state); marker_->FinishMarking(kIncrementalConfig.stack_state);
} }
static constexpr Marker::MarkingConfig kIncrementalConfig{ static constexpr MarkingConfig kIncrementalConfig{
Marker::MarkingConfig::CollectionType::kMajor, CollectionType::kMajor, StackState::kNoHeapPointers,
Marker::MarkingConfig::StackState::kNoHeapPointers, MarkingConfig::MarkingType::kIncremental};
Marker::MarkingConfig::MarkingType::kIncremental};
private: private:
MarkerBase* marker_; MarkerBase* marker_;
}; };
constexpr Marker::MarkingConfig IncrementalMarkingScope::kIncrementalConfig; constexpr MarkingConfig IncrementalMarkingScope::kIncrementalConfig;
class V8_NODISCARD ExpectWriteBarrierFires final class V8_NODISCARD ExpectWriteBarrierFires final
: private IncrementalMarkingScope { : private IncrementalMarkingScope {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment