Commit d46c94db authored by Omer Katz's avatar Omer Katz Committed by Commit Bot

cppgc: Allow to disable incremental marking/sweeping

Add fields to HeapOptions to denote on heap creation that the heap does
not support incremental/concurrent marking/sweeping.
This only applies to standalone heaps.
When triggering a GC (either explicitly or by the heap growing
heuristics), the given config is limited to not trigger unsupported
marking/sweeping types.

Bug: chromium:1156170
Change-Id: Id7b5cf82962e7c40920f942df9415d798e2b6686
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2581961
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71698}
parent 7608e28a
......@@ -56,6 +56,41 @@ class V8_EXPORT Heap {
kNoConservativeStackScan,
};
/**
* Specifies supported marking types
*/
enum class MarkingType : uint8_t {
/**
* Atomic stop-the-world marking. This option does not require any write
* barriers but is the most intrusive in terms of jank.
*/
kAtomic,
/**
* Incremental marking, i.e. interleave marking is the rest of the
* application on the same thread.
*/
kIncremental,
/**
* Incremental and concurrent marking.
*/
kIncrementalAndConcurrent
};
/**
* Specifies supported sweeping types
*/
enum class SweepingType : uint8_t {
/**
* Atomic stop-the-world sweeping. All of sweeping is performed at once.
*/
kAtomic,
/**
* Incremental and concurrent sweeping. Sweeping is split and interleaved
* with the rest of the application.
*/
kIncrementalAndConcurrent
};
/**
* Constraints for a Heap setup.
*/
......@@ -98,6 +133,16 @@ class V8_EXPORT Heap {
*/
StackSupport stack_support = StackSupport::kSupportsConservativeStackScan;
/**
* Specifies which types of marking are supported by the heap.
*/
MarkingType marking_support = MarkingType::kIncrementalAndConcurrent;
/**
* Specifies which types of sweeping are supported by the heap.
*/
SweepingType sweeping_support = SweepingType::kIncrementalAndConcurrent;
/**
* Resource constraints specifying various properties that the internal
* GC scheduler follows.
......
......@@ -199,7 +199,7 @@ void CppHeap::TracePrologue(TraceFlags flags) {
const UnifiedHeapMarker::MarkingConfig marking_config{
UnifiedHeapMarker::MarkingConfig::CollectionType::kMajor,
cppgc::Heap::StackState::kNoHeapPointers,
UnifiedHeapMarker::MarkingConfig::MarkingType::kIncrementalAndConcurrent,
cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
flags == TraceFlags::kForced
? UnifiedHeapMarker::MarkingConfig::IsForcedGC::kForced
: UnifiedHeapMarker::MarkingConfig::IsForcedGC::kNotForced};
......@@ -234,9 +234,8 @@ void CppHeap::EnterFinalPause(EmbedderStackState stack_state) {
AsBase(), cppgc::internal::StatsCollector::kAtomicMark);
is_in_final_pause_ = true;
marker_->EnterAtomicPause(stack_state);
if (compactor_.CancelIfShouldNotCompact(
UnifiedHeapMarker::MarkingConfig::MarkingType::kAtomic,
stack_state)) {
if (compactor_.CancelIfShouldNotCompact(cppgc::Heap::MarkingType::kAtomic,
stack_state)) {
marker_->NotifyCompactionCancelled();
}
}
......
......@@ -206,16 +206,14 @@ ConcurrentMarkerBase::~ConcurrentMarkerBase() {
!concurrent_marking_handle_->IsValid());
}
bool ConcurrentMarkerBase::NotifyIncrementalMutatorStepCompleted() {
void ConcurrentMarkerBase::NotifyIncrementalMutatorStepCompleted() {
DCHECK(concurrent_marking_handle_);
if (HasWorkForConcurrentMarking(marking_worklists_)) {
// Notifies the scheduler that max concurrency might have increased.
// This will adjust the number of markers if necessary.
IncreaseMarkingPriorityIfNeeded();
concurrent_marking_handle_->NotifyConcurrencyIncrease();
return false;
}
return !concurrent_marking_handle_->IsActive();
}
void ConcurrentMarkerBase::IncreaseMarkingPriorityIfNeeded() {
......
......@@ -28,7 +28,7 @@ class V8_EXPORT_PRIVATE ConcurrentMarkerBase {
void JoinForTesting();
bool NotifyIncrementalMutatorStepCompleted();
void NotifyIncrementalMutatorStepCompleted();
bool IsActive() const;
......
......@@ -30,15 +30,19 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector {
public:
using Handle = SingleThreadedHandle;
static Handle Post(GarbageCollector* collector, cppgc::TaskRunner* runner) {
auto task = std::make_unique<GCInvoker::GCInvokerImpl::GCTask>(collector);
static Handle Post(GarbageCollector* collector, cppgc::TaskRunner* runner,
GarbageCollector::Config config) {
auto task =
std::make_unique<GCInvoker::GCInvokerImpl::GCTask>(collector, config);
auto handle = task->GetHandle();
runner->PostNonNestableTask(std::move(task));
return handle;
}
explicit GCTask(GarbageCollector* collector)
explicit GCTask(GarbageCollector* collector,
GarbageCollector::Config config)
: collector_(collector),
config_(config),
handle_(Handle::NonEmptyTag{}),
saved_epoch_(collector->epoch()) {}
......@@ -46,14 +50,14 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector {
void Run() final {
if (handle_.IsCanceled() || (collector_->epoch() != saved_epoch_)) return;
collector_->CollectGarbage(
GarbageCollector::Config::PreciseAtomicConfig());
collector_->CollectGarbage(config_);
handle_.Cancel();
}
Handle GetHandle() { return handle_; }
GarbageCollector* collector_;
GarbageCollector::Config config_;
Handle handle_;
size_t saved_epoch_;
};
......@@ -78,21 +82,27 @@ GCInvoker::GCInvokerImpl::~GCInvokerImpl() {
}
void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) {
DCHECK_EQ(config.marking_type, cppgc::Heap::MarkingType::kAtomic);
if ((config.stack_state ==
GarbageCollector::Config::StackState::kNoHeapPointers) ||
(stack_support_ ==
cppgc::Heap::StackSupport::kSupportsConservativeStackScan)) {
collector_->CollectGarbage(config);
} else if (platform_->GetForegroundTaskRunner()->NonNestableTasksEnabled()) {
} else if (platform_->GetForegroundTaskRunner() &&
platform_->GetForegroundTaskRunner()->NonNestableTasksEnabled()) {
if (!gc_task_handle_) {
gc_task_handle_ =
GCTask::Post(collector_, platform_->GetForegroundTaskRunner().get());
// Force a precise GC since it will run in a non-nestable task.
config.stack_state =
GarbageCollector::Config::StackState::kNoHeapPointers;
gc_task_handle_ = GCTask::Post(
collector_, platform_->GetForegroundTaskRunner().get(), config);
}
}
}
void GCInvoker::GCInvokerImpl::StartIncrementalGarbageCollection(
GarbageCollector::Config config) {
DCHECK_NE(config.marking_type, cppgc::Heap::MarkingType::kAtomic);
if ((stack_support_ !=
cppgc::Heap::StackSupport::kSupportsConservativeStackScan) &&
(!platform_->GetForegroundTaskRunner() ||
......
......@@ -31,7 +31,8 @@ class HeapGrowing::HeapGrowingImpl final
: public StatsCollector::AllocationObserver {
public:
HeapGrowingImpl(GarbageCollector*, StatsCollector*,
cppgc::Heap::ResourceConstraints);
cppgc::Heap::ResourceConstraints, cppgc::Heap::MarkingType,
cppgc::Heap::SweepingType);
~HeapGrowingImpl();
HeapGrowingImpl(const HeapGrowingImpl&) = delete;
......@@ -60,14 +61,21 @@ class HeapGrowing::HeapGrowingImpl final
SingleThreadedHandle gc_task_handle_;
bool disabled_for_testing_ = false;
const cppgc::Heap::MarkingType marking_support_;
const cppgc::Heap::SweepingType sweeping_support_;
};
HeapGrowing::HeapGrowingImpl::HeapGrowingImpl(
GarbageCollector* collector, StatsCollector* stats_collector,
cppgc::Heap::ResourceConstraints constraints)
cppgc::Heap::ResourceConstraints constraints,
cppgc::Heap::MarkingType marking_support,
cppgc::Heap::SweepingType sweeping_support)
: collector_(collector),
stats_collector_(stats_collector),
gc_task_handle_(SingleThreadedHandle::NonEmptyTag{}) {
gc_task_handle_(SingleThreadedHandle::NonEmptyTag{}),
marking_support_(marking_support),
sweeping_support_(sweeping_support) {
if (constraints.initial_heap_size_bytes > 0) {
initial_heap_size_ = constraints.initial_heap_size_bytes;
}
......@@ -85,10 +93,15 @@ void HeapGrowing::HeapGrowingImpl::AllocatedObjectSizeIncreased(size_t) {
size_t allocated_object_size = stats_collector_->allocated_object_size();
if (allocated_object_size > limit_for_atomic_gc_) {
collector_->CollectGarbage(
GarbageCollector::Config::ConservativeAtomicConfig());
{GarbageCollector::Config::CollectionType::kMajor,
GarbageCollector::Config::StackState::kMayContainHeapPointers,
GarbageCollector::Config::MarkingType::kAtomic, sweeping_support_});
} else if (allocated_object_size > limit_for_incremental_gc_) {
if (marking_support_ == cppgc::Heap::MarkingType::kAtomic) return;
collector_->StartIncrementalGarbageCollection(
GarbageCollector::Config::ConservativeIncrementalConfig());
{GarbageCollector::Config::CollectionType::kMajor,
GarbageCollector::Config::StackState::kMayContainHeapPointers,
marking_support_, sweeping_support_});
}
}
......@@ -133,9 +146,12 @@ void HeapGrowing::HeapGrowingImpl::DisableForTesting() {
HeapGrowing::HeapGrowing(GarbageCollector* collector,
StatsCollector* stats_collector,
cppgc::Heap::ResourceConstraints constraints)
cppgc::Heap::ResourceConstraints constraints,
cppgc::Heap::MarkingType marking_support,
cppgc::Heap::SweepingType sweeping_support)
: impl_(std::make_unique<HeapGrowing::HeapGrowingImpl>(
collector, stats_collector, constraints)) {}
collector, stats_collector, constraints, marking_support,
sweeping_support)) {}
HeapGrowing::~HeapGrowing() = default;
......
......@@ -34,7 +34,8 @@ class V8_EXPORT_PRIVATE HeapGrowing final {
kPageSize * RawHeap::kNumberOfRegularSpaces;
HeapGrowing(GarbageCollector*, StatsCollector*,
cppgc::Heap::ResourceConstraints);
cppgc::Heap::ResourceConstraints, cppgc::Heap::MarkingType,
cppgc::Heap::SweepingType);
~HeapGrowing();
HeapGrowing(const HeapGrowing&) = delete;
......
......@@ -43,8 +43,7 @@ void Heap::ForceGarbageCollectionSlow(const char* source, const char* reason,
Heap::StackState stack_state) {
internal::Heap::From(this)->CollectGarbage(
{internal::GarbageCollector::Config::CollectionType::kMajor, stack_state,
internal::GarbageCollector::Config::MarkingType::kAtomic,
internal::GarbageCollector::Config::SweepingType::kAtomic,
MarkingType::kAtomic, SweepingType::kAtomic,
internal::GarbageCollector::Config::IsForcedGC::kForced});
}
......@@ -71,11 +70,16 @@ class Unmarker final : private HeapVisitor<Unmarker> {
}
};
void CheckConfig(Heap::Config config) {
void CheckConfig(Heap::Config config, Heap::MarkingType marking_support,
Heap::SweepingType sweeping_support) {
CHECK_WITH_MSG(
(config.collection_type != Heap::Config::CollectionType::kMinor) ||
(config.stack_state == Heap::Config::StackState::kNoHeapPointers),
"Minor GCs with stack is currently not supported");
CHECK_LE(static_cast<int>(config.marking_type),
static_cast<int>(marking_support));
CHECK_LE(static_cast<int>(config.sweeping_type),
static_cast<int>(sweeping_support));
}
} // namespace
......@@ -85,7 +89,15 @@ Heap::Heap(std::shared_ptr<cppgc::Platform> platform,
: HeapBase(platform, options.custom_spaces, options.stack_support),
gc_invoker_(this, platform_.get(), options.stack_support),
growing_(&gc_invoker_, stats_collector_.get(),
options.resource_constraints) {}
options.resource_constraints, options.marking_support,
options.sweeping_support),
marking_support_(options.marking_support),
sweeping_support_(options.sweeping_support) {
CHECK_IMPLIES(options.marking_support != MarkingType::kAtomic,
platform_->GetForegroundTaskRunner());
CHECK_IMPLIES(options.sweeping_support != SweepingType::kAtomic,
platform_->GetForegroundTaskRunner());
}
Heap::~Heap() {
NoGCScope no_gc(*this);
......@@ -95,7 +107,7 @@ Heap::~Heap() {
void Heap::CollectGarbage(Config config) {
DCHECK_EQ(Config::MarkingType::kAtomic, config.marking_type);
CheckConfig(config);
CheckConfig(config, marking_support_, sweeping_support_);
if (in_no_gc_scope()) return;
......@@ -110,7 +122,8 @@ void Heap::CollectGarbage(Config config) {
void Heap::StartIncrementalGarbageCollection(Config config) {
DCHECK_NE(Config::MarkingType::kAtomic, config.marking_type);
CheckConfig(config);
DCHECK_NE(marking_support_, MarkingType::kAtomic);
CheckConfig(config, marking_support_, sweeping_support_);
if (gc_in_progress_ || in_no_gc_scope()) return;
......@@ -120,6 +133,9 @@ void Heap::StartIncrementalGarbageCollection(Config config) {
}
void Heap::FinalizeIncrementalGarbageCollectionIfRunning(Config config) {
DCHECK_NE(marking_support_, MarkingType::kAtomic);
CheckConfig(config, marking_support_, sweeping_support_);
if (!gc_in_progress_) return;
DCHECK(!in_no_gc_scope());
......
......@@ -52,6 +52,9 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase,
GCInvoker gc_invoker_;
HeapGrowing growing_;
const MarkingType marking_support_;
const SweepingType sweeping_support_;
bool gc_in_progress_ = false;
size_t epoch_ = 0;
};
......
......@@ -44,11 +44,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
kMajor,
};
using StackState = cppgc::Heap::StackState;
enum class MarkingType : uint8_t {
kAtomic,
kIncremental,
kIncrementalAndConcurrent
};
using MarkingType = cppgc::Heap::MarkingType;
enum class IsForcedGC : uint8_t {
kNotForced,
kForced,
......
......@@ -7,6 +7,7 @@
#include <memory>
#include "include/cppgc/heap.h"
#include "src/base/macros.h"
namespace cppgc {
......@@ -22,7 +23,7 @@ class ConcurrentSweeperTest;
class V8_EXPORT_PRIVATE Sweeper final {
public:
struct SweepingConfig {
enum class SweepingType : uint8_t { kAtomic, kIncrementalAndConcurrent };
using SweepingType = cppgc::Heap::SweepingType;
enum class CompactableSpaceHandling { kSweep, kIgnore };
SweepingType sweeping_type = SweepingType::kIncrementalAndConcurrent;
......
......@@ -65,7 +65,9 @@ TEST(HeapGrowingTest, ConservativeGCInvoked) {
cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update.
constraints.initial_heap_size_bytes = 1;
HeapGrowing growing(&gc, &stats_collector, constraints);
HeapGrowing growing(&gc, &stats_collector, constraints,
cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
EXPECT_CALL(gc, CollectGarbage(::testing::_));
FakeAllocate(&stats_collector, 100 * kMB);
}
......@@ -77,7 +79,9 @@ TEST(HeapGrowingTest, InitialHeapSize) {
// Use larger size to avoid running into small heap optimizations.
constexpr size_t kObjectSize = 10 * HeapGrowing::kMinLimitIncrease;
constraints.initial_heap_size_bytes = kObjectSize;
HeapGrowing growing(&gc, &stats_collector, constraints);
HeapGrowing growing(&gc, &stats_collector, constraints,
cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
FakeAllocate(&stats_collector, kObjectSize - 1);
EXPECT_CALL(gc, CollectGarbage(::testing::_));
FakeAllocate(&stats_collector, kObjectSize);
......@@ -91,7 +95,9 @@ TEST(HeapGrowingTest, ConstantGrowingFactor) {
cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update.
constraints.initial_heap_size_bytes = HeapGrowing::kMinLimitIncrease;
HeapGrowing growing(&gc, &stats_collector, constraints);
HeapGrowing growing(&gc, &stats_collector, constraints,
cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
EXPECT_EQ(0u, gc.epoch());
gc.SetLiveBytes(kObjectSize);
FakeAllocate(&stats_collector, kObjectSize + 1);
......@@ -107,7 +113,9 @@ TEST(HeapGrowingTest, SmallHeapGrowing) {
cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update.
constraints.initial_heap_size_bytes = 1;
HeapGrowing growing(&gc, &stats_collector, constraints);
HeapGrowing growing(&gc, &stats_collector, constraints,
cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
EXPECT_EQ(0u, gc.epoch());
gc.SetLiveBytes(1);
FakeAllocate(&stats_collector, kLargeAllocation);
......@@ -119,7 +127,9 @@ TEST(HeapGrowingTest, IncrementalGCStarted) {
StatsCollector stats_collector;
MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints;
HeapGrowing growing(&gc, &stats_collector, constraints);
HeapGrowing growing(&gc, &stats_collector, constraints,
cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
EXPECT_CALL(gc, CollectGarbage(::testing::_)).Times(0);
EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_));
// Allocate 1 byte less the limit for atomic gc to trigger incremental gc.
......@@ -130,7 +140,9 @@ TEST(HeapGrowingTest, IncrementalGCFinalized) {
StatsCollector stats_collector;
MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints;
HeapGrowing growing(&gc, &stats_collector, constraints);
HeapGrowing growing(&gc, &stats_collector, constraints,
cppgc::Heap::MarkingType::kIncrementalAndConcurrent,
cppgc::Heap::SweepingType::kIncrementalAndConcurrent);
EXPECT_CALL(gc, CollectGarbage(::testing::_)).Times(0);
EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_));
// Allocate 1 byte less the limit for atomic gc to trigger incremental gc.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment