Commit 58ca454f authored by Omer Katz's avatar Omer Katz Committed by Commit Bot

cppgc: Support incremental marking without non-nested tasks

For the standalone library, some platform implementations might not
support non-nested tasks. We can still offer incremental marking in
such cases using regular tasks and without assuming an empty stack.
(cppgc's default platform e.g. doesn't support non-nested tasks.)

This CL also updates GCInvoker to not trigger an incremental GC if we
won't be able to finalize it. That makes finalizing through an
non-nested incremental task safe.

Bug: chromium:1056170
Change-Id: I85f0c9f2efe643cb87dd65d80417eea0d6ee5d52
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2414217
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69971}
parent 2eb6b4bb
......@@ -26,8 +26,11 @@ class V8_EXPORT DefaultTaskRunner final : public cppgc::TaskRunner {
DefaultTaskRunner& operator=(const DefaultTaskRunner&) = delete;
void PostTask(std::unique_ptr<cppgc::Task> task) override;
void PostNonNestableTask(std::unique_ptr<cppgc::Task> task) override;
void PostDelayedTask(std::unique_ptr<cppgc::Task> task, double) override;
bool NonNestableTasksEnabled() const final { return false; }
bool NonNestableDelayedTasksEnabled() const final { return false; }
void PostNonNestableTask(std::unique_ptr<cppgc::Task> task) override;
void PostNonNestableDelayedTask(std::unique_ptr<cppgc::Task> task,
double) override;
......
......@@ -107,7 +107,9 @@ void UnifiedHeapMarker::AddObject(void* object) {
CppHeap::CppHeap(v8::Isolate* isolate, size_t custom_spaces)
: cppgc::internal::HeapBase(std::make_shared<CppgcPlatformAdapter>(isolate),
custom_spaces),
custom_spaces,
cppgc::internal::HeapBase::StackSupport::
kSupportsConservativeStackScan),
isolate_(*reinterpret_cast<Isolate*>(isolate)) {
CHECK(!FLAG_incremental_marking_wrappers);
}
......
......@@ -7,6 +7,7 @@
#include <chrono> // NOLINT(build/c++11)
#include <thread> // NOLINT(build/c++11)
#include "src/base/logging.h"
#include "src/base/page-allocator.h"
namespace cppgc {
......@@ -37,18 +38,18 @@ void DefaultTaskRunner::PostTask(std::unique_ptr<cppgc::Task> task) {
tasks_.push_back(std::move(task));
}
void DefaultTaskRunner::PostNonNestableTask(std::unique_ptr<cppgc::Task> task) {
PostTask(std::move(task));
}
void DefaultTaskRunner::PostDelayedTask(std::unique_ptr<cppgc::Task> task,
double) {
PostTask(std::move(task));
}
void DefaultTaskRunner::PostNonNestableDelayedTask(
std::unique_ptr<cppgc::Task> task, double) {
PostTask(std::move(task));
void DefaultTaskRunner::PostNonNestableTask(std::unique_ptr<cppgc::Task>) {
UNREACHABLE();
}
void DefaultTaskRunner::PostNonNestableDelayedTask(std::unique_ptr<cppgc::Task>,
double) {
UNREACHABLE();
}
void DefaultTaskRunner::PostIdleTask(std::unique_ptr<cppgc::IdleTask> task) {
......
......@@ -93,6 +93,17 @@ void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) {
void GCInvoker::GCInvokerImpl::StartIncrementalGarbageCollection(
GarbageCollector::Config config) {
if ((stack_support_ !=
cppgc::Heap::StackSupport::kSupportsConservativeStackScan) &&
(!platform_->GetForegroundTaskRunner() ||
!platform_->GetForegroundTaskRunner()->NonNestableTasksEnabled())) {
// In this configuration the GC finalization can only be triggered through
// ForceGarbageCollectionSlow. If incremental GC is started, there is no
// way to know how long it will remain enabled (and the write barrier with
// it). For that reason, we do not support running incremental GCs in this
// configuration.
return;
}
// No need to postpone starting incremental GC since the stack is not scanned
// until GC finalization.
collector_->StartIncrementalGarbageCollection(config);
......
......@@ -54,7 +54,7 @@ class ObjectSizeCounter : private HeapVisitor<ObjectSizeCounter> {
} // namespace
HeapBase::HeapBase(std::shared_ptr<cppgc::Platform> platform,
size_t custom_spaces)
size_t custom_spaces, StackSupport stack_support)
: raw_heap_(this, custom_spaces),
platform_(std::move(platform)),
#if defined(CPPGC_CAGED_HEAP)
......@@ -70,7 +70,8 @@ HeapBase::HeapBase(std::shared_ptr<cppgc::Platform> platform,
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>()),
object_allocator_(&raw_heap_, page_backend_.get(),
stats_collector_.get()),
sweeper_(&raw_heap_, platform_.get(), stats_collector_.get()) {
sweeper_(&raw_heap_, platform_.get(), stats_collector_.get()),
stack_support_(stack_support) {
}
HeapBase::~HeapBase() = default;
......
......@@ -44,6 +44,8 @@ class StatsCollector;
// Base class for heap implementations.
class V8_EXPORT_PRIVATE HeapBase {
public:
using StackSupport = cppgc::Heap::StackSupport;
// NoGCScope allows going over limits and avoids triggering garbage
// collection triggered through allocations or even explicitly.
class V8_EXPORT_PRIVATE NoGCScope final {
......@@ -60,7 +62,8 @@ class V8_EXPORT_PRIVATE HeapBase {
HeapBase& heap_;
};
HeapBase(std::shared_ptr<cppgc::Platform> platform, size_t custom_spaces);
HeapBase(std::shared_ptr<cppgc::Platform> platform, size_t custom_spaces,
StackSupport stack_support);
virtual ~HeapBase();
HeapBase(const HeapBase&) = delete;
......@@ -116,6 +119,8 @@ class V8_EXPORT_PRIVATE HeapBase {
size_t ObjectPayloadSize() const;
StackSupport stack_support() const { return stack_support_; }
void AdvanceIncrementalGarbageCollectionOnAllocationIfNeeded();
protected:
......@@ -150,6 +155,8 @@ class V8_EXPORT_PRIVATE HeapBase {
size_t no_gc_scope_ = 0;
const StackSupport stack_support_;
friend class MarkerBase::IncrementalMarkingTask;
friend class testing::TestWithHeap;
};
......
......@@ -77,7 +77,7 @@ void CheckConfig(Heap::Config config) {
Heap::Heap(std::shared_ptr<cppgc::Platform> platform,
cppgc::Heap::HeapOptions options)
: HeapBase(platform, options.custom_spaces.size()),
: HeapBase(platform, options.custom_spaces.size(), options.stack_support),
gc_invoker_(this, platform_.get(), options.stack_support),
growing_(&gc_invoker_, stats_collector_.get(),
options.resource_constraints) {}
......
......@@ -138,27 +138,44 @@ size_t GetNextIncrementalStepDuration(IncrementalMarkingSchedule& schedule,
constexpr v8::base::TimeDelta MarkerBase::kMaximumIncrementalStepDuration;
MarkerBase::IncrementalMarkingTask::IncrementalMarkingTask(MarkerBase* marker)
: marker_(marker), handle_(Handle::NonEmptyTag{}) {}
MarkerBase::IncrementalMarkingTask::IncrementalMarkingTask(
MarkerBase* marker, MarkingConfig::StackState stack_state)
: marker_(marker),
stack_state_(stack_state),
handle_(Handle::NonEmptyTag{}) {}
// static
MarkerBase::IncrementalMarkingTask::Handle
MarkerBase::IncrementalMarkingTask::Post(v8::TaskRunner* runner,
MarkerBase* marker) {
auto task = std::make_unique<IncrementalMarkingTask>(marker);
// Incremental GC is possible only via the GCInvoker, so getting here
// guarantees that either non-nestable tasks or conservative stack
// scannnig are supported. This is required so that the incremental
// task can safely finalize GC if needed.
DCHECK_IMPLIES(marker->heap().stack_support() !=
HeapBase::StackSupport::kSupportsConservativeStackScan,
runner->NonNestableTasksEnabled());
MarkingConfig::StackState stack_state_for_task =
runner->NonNestableTasksEnabled()
? MarkingConfig::StackState::kNoHeapPointers
: MarkingConfig::StackState::kMayContainHeapPointers;
auto task =
std::make_unique<IncrementalMarkingTask>(marker, stack_state_for_task);
auto handle = task->handle_;
if (runner->NonNestableTasksEnabled()) {
runner->PostNonNestableTask(std::move(task));
} else {
runner->PostTask(std::move(task));
}
return handle;
}
void MarkerBase::IncrementalMarkingTask::Run() {
if (handle_.IsCanceled()) return;
if (marker_->IncrementalMarkingStep(
MarkingConfig::StackState::kNoHeapPointers)) {
if (marker_->IncrementalMarkingStep(stack_state_)) {
// Incremental marking is done so should finalize GC.
marker_->heap().FinalizeIncrementalGarbageCollectionIfNeeded(
MarkingConfig::StackState::kNoHeapPointers);
marker_->heap().FinalizeIncrementalGarbageCollectionIfNeeded(stack_state_);
}
}
......
......@@ -103,7 +103,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
public:
using Handle = SingleThreadedHandle;
explicit IncrementalMarkingTask(MarkerBase*);
IncrementalMarkingTask(MarkerBase*, MarkingConfig::StackState);
static Handle Post(v8::TaskRunner*, MarkerBase*);
......@@ -111,6 +111,7 @@ class V8_EXPORT_PRIVATE MarkerBase {
void Run() final;
MarkerBase* const marker_;
MarkingConfig::StackState stack_state_;
// TODO(chromium:1056170): Change to CancelableTask.
Handle handle_;
};
......
......@@ -129,7 +129,8 @@ TEST(GCInvokerTest, IncrementalGCIsStarted) {
EXPECT_CALL(
gc, StartIncrementalGarbageCollection(::testing::Field(
&GarbageCollector::Config::stack_state,
GarbageCollector::Config::StackState::kMayContainHeapPointers)));
GarbageCollector::Config::StackState::kMayContainHeapPointers)))
.Times(0);
invoker_without_support.StartIncrementalGarbageCollection(
GarbageCollector::Config::ConservativeIncrementalConfig());
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment