Commit 8d55cd6c authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

cppgc-js: Respect --single-threaded-gc

Before this CL Oilpan would schedule concurrent marking and sweeping
even in the presence of --single-threaded-gc. This e.g. flakily breaks
Blink tests that do not set up a thread-pool in certain configurations
where they pass --single-threaded (implying --single-threaded-gc).

Bug: chromium:1300492
Change-Id: I64f0c6a20f9c29d689a62e63cc5a8d024962ff2c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3497760Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#79321}
parent d7d01a9c
...@@ -68,8 +68,8 @@ class V8_EXPORT Heap { ...@@ -68,8 +68,8 @@ class V8_EXPORT Heap {
*/ */
kAtomic, kAtomic,
/** /**
* Incremental marking, i.e. interleave marking is the rest of the * Incremental marking interleaves marking with the rest of the application
* application on the same thread. * workload on the same thread.
*/ */
kIncremental, kIncremental,
/** /**
...@@ -86,6 +86,11 @@ class V8_EXPORT Heap { ...@@ -86,6 +86,11 @@ class V8_EXPORT Heap {
* Atomic stop-the-world sweeping. All of sweeping is performed at once. * Atomic stop-the-world sweeping. All of sweeping is performed at once.
*/ */
kAtomic, kAtomic,
/**
* Incremental sweeping interleaves sweeping with the rest of the
* application workload on the same thread.
*/
kIncremental,
/** /**
* Incremental and concurrent sweeping. Sweeping is split and interleaved * Incremental and concurrent sweeping. Sweeping is split and interleaved
* with the rest of the application. * with the rest of the application.
......
...@@ -411,8 +411,10 @@ CppHeap::CppHeap( ...@@ -411,8 +411,10 @@ CppHeap::CppHeap(
std::make_shared<CppgcPlatformAdapter>(platform), custom_spaces, std::make_shared<CppgcPlatformAdapter>(platform), custom_spaces,
cppgc::internal::HeapBase::StackSupport:: cppgc::internal::HeapBase::StackSupport::
kSupportsConservativeStackScan, kSupportsConservativeStackScan,
cppgc::internal::HeapBase::MarkingType::kIncrementalAndConcurrent, FLAG_single_threaded_gc ? MarkingType::kIncremental
cppgc::internal::HeapBase::SweepingType::kIncrementalAndConcurrent), : MarkingType::kIncrementalAndConcurrent,
FLAG_single_threaded_gc ? SweepingType::kIncremental
: SweepingType::kIncrementalAndConcurrent),
wrapper_descriptor_(wrapper_descriptor) { wrapper_descriptor_(wrapper_descriptor) {
CHECK_NE(WrapperDescriptor::kUnknownEmbedderId, CHECK_NE(WrapperDescriptor::kUnknownEmbedderId,
wrapper_descriptor_.embedder_id_for_garbage_collected); wrapper_descriptor_.embedder_id_for_garbage_collected);
...@@ -493,6 +495,19 @@ bool ShouldReduceMemory(CppHeap::GarbageCollectionFlags flags) { ...@@ -493,6 +495,19 @@ bool ShouldReduceMemory(CppHeap::GarbageCollectionFlags flags) {
} // namespace } // namespace
CppHeap::MarkingType CppHeap::SelectMarkingType() const {
if (IsForceGC(current_gc_flags_) && !force_incremental_marking_for_testing_)
return MarkingType::kAtomic;
return marking_support();
}
CppHeap::SweepingType CppHeap::SelectSweepingType() const {
if (IsForceGC(current_gc_flags_)) return SweepingType::kAtomic;
return sweeping_support();
}
void CppHeap::InitializeTracing( void CppHeap::InitializeTracing(
cppgc::internal::GarbageCollector::Config::CollectionType collection_type, cppgc::internal::GarbageCollector::Config::CollectionType collection_type,
GarbageCollectionFlags gc_flags) { GarbageCollectionFlags gc_flags) {
...@@ -515,16 +530,13 @@ void CppHeap::InitializeTracing( ...@@ -515,16 +530,13 @@ void CppHeap::InitializeTracing(
const UnifiedHeapMarker::MarkingConfig marking_config{ const UnifiedHeapMarker::MarkingConfig marking_config{
*collection_type_, cppgc::Heap::StackState::kNoHeapPointers, *collection_type_, cppgc::Heap::StackState::kNoHeapPointers,
(IsForceGC(current_gc_flags_) && !force_incremental_marking_for_testing_) SelectMarkingType(),
? UnifiedHeapMarker::MarkingConfig::MarkingType::kAtomic
: UnifiedHeapMarker::MarkingConfig::MarkingType::
kIncrementalAndConcurrent,
IsForceGC(current_gc_flags_) IsForceGC(current_gc_flags_)
? UnifiedHeapMarker::MarkingConfig::IsForcedGC::kForced ? UnifiedHeapMarker::MarkingConfig::IsForcedGC::kForced
: UnifiedHeapMarker::MarkingConfig::IsForcedGC::kNotForced}; : UnifiedHeapMarker::MarkingConfig::IsForcedGC::kNotForced};
DCHECK_IMPLIES(!isolate_, (cppgc::Heap::MarkingType::kAtomic == DCHECK_IMPLIES(!isolate_,
marking_config.marking_type) || (MarkingType::kAtomic == marking_config.marking_type) ||
force_incremental_marking_for_testing_); force_incremental_marking_for_testing_);
if (ShouldReduceMemory(current_gc_flags_)) { if (ShouldReduceMemory(current_gc_flags_)) {
// Only enable compaction when in a memory reduction garbage collection as // Only enable compaction when in a memory reduction garbage collection as
// it may significantly increase the final garbage collection pause. // it may significantly increase the final garbage collection pause.
...@@ -579,8 +591,7 @@ void CppHeap::EnterFinalPause(cppgc::EmbedderStackState stack_state) { ...@@ -579,8 +591,7 @@ void CppHeap::EnterFinalPause(cppgc::EmbedderStackState stack_state) {
->GetMutatorMarkingState(), ->GetMutatorMarkingState(),
wrapper_descriptor_); wrapper_descriptor_);
} }
compactor_.CancelIfShouldNotCompact(cppgc::Heap::MarkingType::kAtomic, compactor_.CancelIfShouldNotCompact(MarkingType::kAtomic, stack_state);
stack_state);
} }
void CppHeap::TraceEpilogue() { void CppHeap::TraceEpilogue() {
...@@ -619,21 +630,14 @@ void CppHeap::TraceEpilogue() { ...@@ -619,21 +630,14 @@ void CppHeap::TraceEpilogue() {
cppgc::internal::Sweeper::SweepingConfig::CompactableSpaceHandling cppgc::internal::Sweeper::SweepingConfig::CompactableSpaceHandling
compactable_space_handling = compactor_.CompactSpacesIfEnabled(); compactable_space_handling = compactor_.CompactSpacesIfEnabled();
const cppgc::internal::Sweeper::SweepingConfig sweeping_config{ const cppgc::internal::Sweeper::SweepingConfig sweeping_config{
// In case the GC was forced, also finalize sweeping right away. SelectSweepingType(), compactable_space_handling,
IsForceGC(current_gc_flags_)
? cppgc::internal::Sweeper::SweepingConfig::SweepingType::kAtomic
: cppgc::internal::Sweeper::SweepingConfig::SweepingType::
kIncrementalAndConcurrent,
compactable_space_handling,
ShouldReduceMemory(current_gc_flags_) ShouldReduceMemory(current_gc_flags_)
? cppgc::internal::Sweeper::SweepingConfig::FreeMemoryHandling:: ? cppgc::internal::Sweeper::SweepingConfig::FreeMemoryHandling::
kDiscardWherePossible kDiscardWherePossible
: cppgc::internal::Sweeper::SweepingConfig::FreeMemoryHandling:: : cppgc::internal::Sweeper::SweepingConfig::FreeMemoryHandling::
kDoNotDiscard}; kDoNotDiscard};
DCHECK_IMPLIES( DCHECK_IMPLIES(!isolate_,
!isolate_, SweepingType::kAtomic == sweeping_config.sweeping_type);
cppgc::internal::Sweeper::SweepingConfig::SweepingType::kAtomic ==
sweeping_config.sweeping_type);
sweeper().Start(sweeping_config); sweeper().Start(sweeping_config);
} }
in_atomic_pause_ = false; in_atomic_pause_ = false;
......
...@@ -157,6 +157,9 @@ class V8_EXPORT_PRIVATE CppHeap final ...@@ -157,6 +157,9 @@ class V8_EXPORT_PRIVATE CppHeap final
void FinalizeIncrementalGarbageCollectionForTesting( void FinalizeIncrementalGarbageCollectionForTesting(
cppgc::EmbedderStackState) final; cppgc::EmbedderStackState) final;
MarkingType SelectMarkingType() const;
SweepingType SelectSweepingType() const;
Isolate* isolate_ = nullptr; Isolate* isolate_ = nullptr;
bool marking_done_ = false; bool marking_done_ = false;
// |collection_type_| is initialized when marking is in progress. // |collection_type_| is initialized when marking is in progress.
......
...@@ -212,6 +212,7 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle { ...@@ -212,6 +212,7 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
int GetCreationThreadId() const { return creation_thread_id_; } int GetCreationThreadId() const { return creation_thread_id_; }
MarkingType marking_support() const { return marking_support_; } MarkingType marking_support() const { return marking_support_; }
SweepingType sweeping_support() const { return sweeping_support_; }
protected: protected:
// Used by the incremental scheduler to finalize a GC if supported. // Used by the incremental scheduler to finalize a GC if supported.
......
...@@ -737,8 +737,6 @@ class Sweeper::SweeperImpl final { ...@@ -737,8 +737,6 @@ class Sweeper::SweeperImpl final {
if (config.sweeping_type == SweepingConfig::SweepingType::kAtomic) { if (config.sweeping_type == SweepingConfig::SweepingType::kAtomic) {
Finish(); Finish();
} else { } else {
DCHECK_EQ(SweepingConfig::SweepingType::kIncrementalAndConcurrent,
config.sweeping_type);
ScheduleIncrementalSweeping(); ScheduleIncrementalSweeping();
ScheduleConcurrentSweeping(); ScheduleConcurrentSweeping();
} }
...@@ -953,6 +951,10 @@ class Sweeper::SweeperImpl final { ...@@ -953,6 +951,10 @@ class Sweeper::SweeperImpl final {
void ScheduleConcurrentSweeping() { void ScheduleConcurrentSweeping() {
DCHECK(platform_); DCHECK(platform_);
if (config_.sweeping_type !=
SweepingConfig::SweepingType::kIncrementalAndConcurrent)
return;
concurrent_sweeper_handle_ = concurrent_sweeper_handle_ =
platform_->PostJob(cppgc::TaskPriority::kUserVisible, platform_->PostJob(cppgc::TaskPriority::kUserVisible,
std::make_unique<ConcurrentSweepTask>( std::make_unique<ConcurrentSweepTask>(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment