Commit d75462ec authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

Reland "[cppgc-js] Allow overriding marking support"

This is a reland of commit 2115ba50.

Adds flags to allow overriding marking support. This adds
compatibility with EmbedderHeapTracer which allows for disabling
incremental marking support with `--no-incremental-marking-wrappers`.

The corresponding CppHeap flags are
* `--cppheap-incremental-marking`
* `--cppheap-concurrent-marking`

This allows embedders that use types that do not support incremental
and concurrent marking to switch from EmbedderHeapTracer to CppHeap.

Bug: v8:13207
Change-Id: I43a47d7d035bff5d4b437c5bf01336a895b61217
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3851543Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82693}
parent 8c8f6598
...@@ -79,6 +79,18 @@ struct WrapperDescriptor final { ...@@ -79,6 +79,18 @@ struct WrapperDescriptor final {
struct V8_EXPORT CppHeapCreateParams { struct V8_EXPORT CppHeapCreateParams {
std::vector<std::unique_ptr<cppgc::CustomSpaceBase>> custom_spaces; std::vector<std::unique_ptr<cppgc::CustomSpaceBase>> custom_spaces;
WrapperDescriptor wrapper_descriptor; WrapperDescriptor wrapper_descriptor;
/**
* Specifies which kind of marking are supported by the heap. The type may be
* further reduced via runtime flags when attaching the heap to an Isolate.
*/
cppgc::Heap::MarkingType marking_support =
cppgc::Heap::MarkingType::kIncrementalAndConcurrent;
/**
* Specifies which kind of sweeping is supported by the heap. The type may be
* further reduced via runtime flags when attaching the heap to an Isolate.
*/
cppgc::Heap::SweepingType sweeping_support =
cppgc::Heap::SweepingType::kIncrementalAndConcurrent;
}; };
/** /**
......
...@@ -388,6 +388,9 @@ DEFINE_BOOL_READONLY(disable_write_barriers, V8_DISABLE_WRITE_BARRIERS_BOOL, ...@@ -388,6 +388,9 @@ DEFINE_BOOL_READONLY(disable_write_barriers, V8_DISABLE_WRITE_BARRIERS_BOOL,
// Disable incremental marking barriers // Disable incremental marking barriers
DEFINE_NEG_IMPLICATION(disable_write_barriers, incremental_marking) DEFINE_NEG_IMPLICATION(disable_write_barriers, incremental_marking)
DEFINE_NEG_IMPLICATION(disable_write_barriers, concurrent_marking)
DEFINE_NEG_IMPLICATION(disable_write_barriers, cppheap_incremental_marking)
DEFINE_NEG_IMPLICATION(disable_write_barriers, cppheap_concurrent_marking)
#ifdef V8_ENABLE_UNCONDITIONAL_WRITE_BARRIERS #ifdef V8_ENABLE_UNCONDITIONAL_WRITE_BARRIERS
#define V8_ENABLE_UNCONDITIONAL_WRITE_BARRIERS_BOOL true #define V8_ENABLE_UNCONDITIONAL_WRITE_BARRIERS_BOOL true
...@@ -1420,6 +1423,18 @@ DEFINE_BOOL(clear_free_memory, false, "initialize free memory with 0") ...@@ -1420,6 +1423,18 @@ DEFINE_BOOL(clear_free_memory, false, "initialize free memory with 0")
DEFINE_BOOL(crash_on_aborted_evacuation, false, DEFINE_BOOL(crash_on_aborted_evacuation, false,
"crash when evacuation of page fails") "crash when evacuation of page fails")
// v8::CppHeap flags that allow fine-grained control of how C++ memory is
// reclaimed in the garbage collector.
DEFINE_BOOL(cppheap_incremental_marking, false,
"use incremental marking for CppHeap")
DEFINE_NEG_NEG_IMPLICATION(incremental_marking, cppheap_incremental_marking)
DEFINE_WEAK_IMPLICATION(incremental_marking, cppheap_incremental_marking)
DEFINE_BOOL(cppheap_concurrent_marking, false,
"use concurrent marking for CppHeap")
DEFINE_NEG_NEG_IMPLICATION(cppheap_incremental_marking,
cppheap_concurrent_marking)
DEFINE_WEAK_IMPLICATION(concurrent_marking, cppheap_concurrent_marking)
// assembler-ia32.cc / assembler-arm.cc / assembler-arm64.cc / assembler-x64.cc // assembler-ia32.cc / assembler-arm.cc / assembler-arm64.cc / assembler-x64.cc
#ifdef V8_ENABLE_DEBUG_CODE #ifdef V8_ENABLE_DEBUG_CODE
DEFINE_BOOL(debug_code, DEBUG_BOOL, DEFINE_BOOL(debug_code, DEBUG_BOOL,
...@@ -2240,6 +2255,7 @@ DEFINE_NEG_IMPLICATION(single_threaded_gc, parallel_pointer_update) ...@@ -2240,6 +2255,7 @@ DEFINE_NEG_IMPLICATION(single_threaded_gc, parallel_pointer_update)
DEFINE_NEG_IMPLICATION(single_threaded_gc, parallel_scavenge) DEFINE_NEG_IMPLICATION(single_threaded_gc, parallel_scavenge)
DEFINE_NEG_IMPLICATION(single_threaded_gc, concurrent_array_buffer_sweeping) DEFINE_NEG_IMPLICATION(single_threaded_gc, concurrent_array_buffer_sweeping)
DEFINE_NEG_IMPLICATION(single_threaded_gc, stress_concurrent_allocation) DEFINE_NEG_IMPLICATION(single_threaded_gc, stress_concurrent_allocation)
DEFINE_NEG_IMPLICATION(single_threaded_gc, cppheap_concurrent_marking)
// Web snapshots: 1) expose WebSnapshot.* API 2) interpret scripts as web // Web snapshots: 1) expose WebSnapshot.* API 2) interpret scripts as web
// snapshots if they start with a magic number. // snapshots if they start with a magic number.
......
...@@ -118,8 +118,9 @@ constexpr uint16_t WrapperDescriptor::kUnknownEmbedderId; ...@@ -118,8 +118,9 @@ constexpr uint16_t WrapperDescriptor::kUnknownEmbedderId;
// static // static
std::unique_ptr<CppHeap> CppHeap::Create(v8::Platform* platform, std::unique_ptr<CppHeap> CppHeap::Create(v8::Platform* platform,
const CppHeapCreateParams& params) { const CppHeapCreateParams& params) {
return std::make_unique<internal::CppHeap>(platform, params.custom_spaces, return std::make_unique<internal::CppHeap>(
params.wrapper_descriptor); platform, params.custom_spaces, params.wrapper_descriptor,
params.marking_support, params.sweeping_support);
} }
cppgc::AllocationHandle& CppHeap::GetAllocationHandle() { cppgc::AllocationHandle& CppHeap::GetAllocationHandle() {
...@@ -477,15 +478,14 @@ void CppHeap::InitializeOncePerProcess() { ...@@ -477,15 +478,14 @@ void CppHeap::InitializeOncePerProcess() {
CppHeap::CppHeap( CppHeap::CppHeap(
v8::Platform* platform, v8::Platform* platform,
const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>& custom_spaces, const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>& custom_spaces,
const v8::WrapperDescriptor& wrapper_descriptor) const v8::WrapperDescriptor& wrapper_descriptor,
cppgc::Heap::MarkingType marking_support,
cppgc::Heap::SweepingType sweeping_support)
: cppgc::internal::HeapBase( : cppgc::internal::HeapBase(
std::make_shared<CppgcPlatformAdapter>(platform), custom_spaces, std::make_shared<CppgcPlatformAdapter>(platform), custom_spaces,
cppgc::internal::HeapBase::StackSupport:: cppgc::internal::HeapBase::StackSupport::
kSupportsConservativeStackScan, kSupportsConservativeStackScan,
// Default marking and sweeping types are only incremental. The types marking_support, sweeping_support),
// are updated respecting flags only on GC as the flags are not set
// properly during heap setup.
MarkingType::kIncremental, SweepingType::kIncremental),
wrapper_descriptor_(wrapper_descriptor) { wrapper_descriptor_(wrapper_descriptor) {
CHECK_NE(WrapperDescriptor::kUnknownEmbedderId, CHECK_NE(WrapperDescriptor::kUnknownEmbedderId,
wrapper_descriptor_.embedder_id_for_garbage_collected); wrapper_descriptor_.embedder_id_for_garbage_collected);
...@@ -521,6 +521,7 @@ void CppHeap::AttachIsolate(Isolate* isolate) { ...@@ -521,6 +521,7 @@ void CppHeap::AttachIsolate(Isolate* isolate) {
SetMetricRecorder(std::make_unique<MetricRecorderAdapter>(*this)); SetMetricRecorder(std::make_unique<MetricRecorderAdapter>(*this));
isolate_->heap()->SetStackStart(base::Stack::GetStackStart()); isolate_->heap()->SetStackStart(base::Stack::GetStackStart());
oom_handler().SetCustomHandler(&FatalOutOfMemoryHandlerImpl); oom_handler().SetCustomHandler(&FatalOutOfMemoryHandlerImpl);
ReduceGCCapabilititesFromFlags();
no_gc_scope_--; no_gc_scope_--;
} }
...@@ -582,13 +583,17 @@ CppHeap::SweepingType CppHeap::SelectSweepingType() const { ...@@ -582,13 +583,17 @@ CppHeap::SweepingType CppHeap::SelectSweepingType() const {
return sweeping_support(); return sweeping_support();
} }
void CppHeap::UpdateSupportedGCTypesFromFlags() { void CppHeap::ReduceGCCapabilititesFromFlags() {
// Keep the selection simple for now as production configurations do not turn CHECK_IMPLIES(FLAG_cppheap_concurrent_marking,
// off parallel and/or concurrent marking independently. FLAG_cppheap_incremental_marking);
if (!FLAG_parallel_marking || !FLAG_concurrent_marking) { if (FLAG_cppheap_concurrent_marking) {
marking_support_ = MarkingType::kIncremental; marking_support_ = static_cast<MarkingType>(
std::min(marking_support_, MarkingType::kIncrementalAndConcurrent));
} else if (FLAG_cppheap_incremental_marking) {
marking_support_ = static_cast<MarkingType>(
std::min(marking_support_, MarkingType::kIncremental));
} else { } else {
marking_support_ = MarkingType::kIncrementalAndConcurrent; marking_support_ = MarkingType::kAtomic;
} }
sweeping_support_ = FLAG_single_threaded_gc sweeping_support_ = FLAG_single_threaded_gc
...@@ -600,8 +605,6 @@ void CppHeap::InitializeTracing(CollectionType collection_type, ...@@ -600,8 +605,6 @@ void CppHeap::InitializeTracing(CollectionType collection_type,
GarbageCollectionFlags gc_flags) { GarbageCollectionFlags gc_flags) {
CHECK(!sweeper_.IsSweepingInProgress()); CHECK(!sweeper_.IsSweepingInProgress());
UpdateSupportedGCTypesFromFlags();
// Check that previous cycle metrics for the same collection type have been // Check that previous cycle metrics for the same collection type have been
// reported. // reported.
if (GetMetricRecorder()) { if (GetMetricRecorder()) {
......
...@@ -110,10 +110,10 @@ class V8_EXPORT_PRIVATE CppHeap final ...@@ -110,10 +110,10 @@ class V8_EXPORT_PRIVATE CppHeap final
return static_cast<const CppHeap*>(heap); return static_cast<const CppHeap*>(heap);
} }
CppHeap( CppHeap(v8::Platform*,
v8::Platform* platform, const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>&,
const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>& custom_spaces, const v8::WrapperDescriptor&, cppgc::Heap::MarkingType,
const v8::WrapperDescriptor& wrapper_descriptor); cppgc::Heap::SweepingType);
~CppHeap() final; ~CppHeap() final;
CppHeap(const CppHeap&) = delete; CppHeap(const CppHeap&) = delete;
...@@ -167,7 +167,7 @@ class V8_EXPORT_PRIVATE CppHeap final ...@@ -167,7 +167,7 @@ class V8_EXPORT_PRIVATE CppHeap final
std::unique_ptr<CppMarkingState> CreateCppMarkingStateForMutatorThread(); std::unique_ptr<CppMarkingState> CreateCppMarkingStateForMutatorThread();
private: private:
void UpdateSupportedGCTypesFromFlags(); void ReduceGCCapabilititesFromFlags();
void FinalizeIncrementalGarbageCollectionIfNeeded( void FinalizeIncrementalGarbageCollectionIfNeeded(
cppgc::Heap::StackState) final { cppgc::Heap::StackState) final {
......
...@@ -102,10 +102,8 @@ void LocalEmbedderHeapTracer::EnterFinalPause() { ...@@ -102,10 +102,8 @@ void LocalEmbedderHeapTracer::EnterFinalPause() {
bool LocalEmbedderHeapTracer::Trace(double max_duration) { bool LocalEmbedderHeapTracer::Trace(double max_duration) {
if (!InUse()) return true; if (!InUse()) return true;
if (cpp_heap_) return cpp_heap_ ? cpp_heap_->AdvanceTracing(max_duration)
return cpp_heap()->AdvanceTracing(max_duration); : remote_tracer_->AdvanceTracing(max_duration);
else
return remote_tracer_->AdvanceTracing(max_duration);
} }
bool LocalEmbedderHeapTracer::IsRemoteTracingDone() { bool LocalEmbedderHeapTracer::IsRemoteTracingDone() {
......
...@@ -110,8 +110,18 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final { ...@@ -110,8 +110,18 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
bool IsRemoteTracingDone(); bool IsRemoteTracingDone();
bool ShouldFinalizeIncrementalMarking() { bool ShouldFinalizeIncrementalMarking() {
return !FLAG_incremental_marking_wrappers || !InUse() || // Covers cases where no remote tracer is in use or the flags for
(IsRemoteTracingDone() && embedder_worklist_empty_); // incremental marking have been disabled.
if (!SupportsIncrementalEmbedderSteps()) return true;
return IsRemoteTracingDone() && embedder_worklist_empty_;
}
bool SupportsIncrementalEmbedderSteps() const {
if (!InUse()) return false;
return cpp_heap_ ? FLAG_cppheap_incremental_marking
: FLAG_incremental_marking_wrappers;
} }
void SetEmbedderWorklistEmpty(bool is_empty) { void SetEmbedderWorklistEmpty(bool is_empty) {
......
...@@ -455,7 +455,9 @@ void IncrementalMarking::UpdateMarkedBytesAfterScavenge( ...@@ -455,7 +455,9 @@ void IncrementalMarking::UpdateMarkedBytesAfterScavenge(
void IncrementalMarking::EmbedderStep(double expected_duration_ms, void IncrementalMarking::EmbedderStep(double expected_duration_ms,
double* duration_ms) { double* duration_ms) {
if (!ShouldDoEmbedderStep()) { DCHECK(IsMarking());
if (!heap_->local_embedder_heap_tracer()
->SupportsIncrementalEmbedderSteps()) {
*duration_ms = 0.0; *duration_ms = 0.0;
return; return;
} }
...@@ -609,11 +611,6 @@ bool IncrementalMarking::TryInitializeTaskTimeout() { ...@@ -609,11 +611,6 @@ bool IncrementalMarking::TryInitializeTaskTimeout() {
} }
} }
bool IncrementalMarking::ShouldDoEmbedderStep() {
return IsMarking() && FLAG_incremental_marking_wrappers &&
heap_->local_embedder_heap_tracer()->InUse();
}
void IncrementalMarking::FastForwardSchedule() { void IncrementalMarking::FastForwardSchedule() {
if (scheduled_bytes_to_mark_ < bytes_marked_) { if (scheduled_bytes_to_mark_ < bytes_marked_) {
scheduled_bytes_to_mark_ = bytes_marked_; scheduled_bytes_to_mark_ = bytes_marked_;
......
...@@ -174,7 +174,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { ...@@ -174,7 +174,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
void StartMarking(); void StartMarking();
bool ShouldDoEmbedderStep();
void EmbedderStep(double expected_duration_ms, double* duration_ms); void EmbedderStep(double expected_duration_ms, double* duration_ms);
void StartBlackAllocation(); void StartBlackAllocation();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment