Commit 64bf4c53 authored by Clemens Backes's avatar Clemens Backes Committed by Commit Bot

Revert "cppgc: Use tracing scopes"

This reverts commit 548fe208.

Reason for revert: Issues on Mac64: https://ci.chromium.org/p/v8/builders/ci/V8%20Mac64%20-%20debug/31710

Original change's description:
> cppgc: Use tracing scopes
>
> The scopes themselves mostly have the same coverage as current scopes in
> blink. A few exception due to encapsulation exist and are highlighted as
> comments on the CL.
>
> Bug: chromium:1056170
> Change-Id: I48af2cfdfd53a8caa1ab5d805d377f6f13a825bc
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2540552
> Commit-Queue: Omer Katz <omerkatz@chromium.org>
> Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
> Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#71285}

TBR=ulan@chromium.org,mlippautz@chromium.org,omerkatz@chromium.org

Change-Id: I20dce9309dcaeff6ea61bdc51df3a2f62c2a103f
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: chromium:1056170
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2550782Reviewed-by: 's avatarClemens Backes <clemensb@chromium.org>
Commit-Queue: Clemens Backes <clemensb@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71289}
parent af7f20fb
...@@ -206,26 +206,16 @@ void CppHeap::TracePrologue(TraceFlags flags) { ...@@ -206,26 +206,16 @@ void CppHeap::TracePrologue(TraceFlags flags) {
} }
bool CppHeap::AdvanceTracing(double deadline_in_ms) { bool CppHeap::AdvanceTracing(double deadline_in_ms) {
v8::base::TimeDelta deadline = // TODO(chromium:1056170): Replace std::numeric_limits<size_t>::max() with a
is_in_final_pause_ // proper deadline when unified heap transitions to bytes-based deadline.
? v8::base::TimeDelta::Max() marking_done_ = marker_->AdvanceMarkingWithMaxDuration(
: v8::base::TimeDelta::FromMillisecondsD(deadline_in_ms); v8::base::TimeDelta::FromMillisecondsD(deadline_in_ms));
cppgc::internal::StatsCollector::EnabledScope stats_scope(
AsBase(),
is_in_final_pause_
? cppgc::internal::StatsCollector::kAtomicPauseMarkTransitiveClosure
: cppgc::internal::StatsCollector::kUnifiedMarkingStep);
// TODO(chromium:1056170): Replace when unified heap transitions to
// bytes-based deadline.
marking_done_ = marker_->AdvanceMarkingWithMaxDuration(deadline);
DCHECK_IMPLIES(is_in_final_pause_, marking_done_);
return marking_done_; return marking_done_;
} }
bool CppHeap::IsTracingDone() { return marking_done_; } bool CppHeap::IsTracingDone() { return marking_done_; }
void CppHeap::EnterFinalPause(EmbedderStackState stack_state) { void CppHeap::EnterFinalPause(EmbedderStackState stack_state) {
is_in_final_pause_ = true;
marker_->EnterAtomicPause(stack_state); marker_->EnterAtomicPause(stack_state);
if (compactor_.CancelIfShouldNotCompact( if (compactor_.CancelIfShouldNotCompact(
UnifiedHeapMarker::MarkingConfig::MarkingType::kAtomic, UnifiedHeapMarker::MarkingConfig::MarkingType::kAtomic,
...@@ -235,21 +225,13 @@ void CppHeap::EnterFinalPause(EmbedderStackState stack_state) { ...@@ -235,21 +225,13 @@ void CppHeap::EnterFinalPause(EmbedderStackState stack_state) {
} }
void CppHeap::TraceEpilogue(TraceSummary* trace_summary) { void CppHeap::TraceEpilogue(TraceSummary* trace_summary) {
CHECK(is_in_final_pause_);
CHECK(marking_done_); CHECK(marking_done_);
{ {
// Weakness callbacks and pre-finalizers are forbidden from allocating
// objects.
cppgc::internal::ObjectAllocator::NoAllocationScope no_allocation_scope_( cppgc::internal::ObjectAllocator::NoAllocationScope no_allocation_scope_(
object_allocator_); object_allocator_);
marker_->LeaveAtomicPause(); marker_->LeaveAtomicPause();
is_in_final_pause_ = false;
}
{
cppgc::internal::StatsCollector::EnabledScope stats(
AsBase(), cppgc::internal::StatsCollector::kAtomicPauseSweepAndCompact);
{
cppgc::internal::ObjectAllocator::NoAllocationScope no_allocation_scope_(
object_allocator_);
prefinalizer_handler()->InvokePreFinalizers(); prefinalizer_handler()->InvokePreFinalizers();
} }
marker_.reset(); marker_.reset();
...@@ -258,18 +240,16 @@ void CppHeap::TraceEpilogue(TraceSummary* trace_summary) { ...@@ -258,18 +240,16 @@ void CppHeap::TraceEpilogue(TraceSummary* trace_summary) {
UnifiedHeapMarkingVerifier verifier(*this); UnifiedHeapMarkingVerifier verifier(*this);
verifier.Run(cppgc::Heap::StackState::kNoHeapPointers); verifier.Run(cppgc::Heap::StackState::kNoHeapPointers);
#endif #endif
{
NoGCScope no_gc(*this);
cppgc::internal::Sweeper::SweepingConfig::CompactableSpaceHandling cppgc::internal::Sweeper::SweepingConfig::CompactableSpaceHandling
compactable_space_handling = compactor_.CompactSpacesIfEnabled(); compactable_space_handling = compactor_.CompactSpacesIfEnabled();
{
NoGCScope no_gc(*this);
const cppgc::internal::Sweeper::SweepingConfig sweeping_config{ const cppgc::internal::Sweeper::SweepingConfig sweeping_config{
cppgc::internal::Sweeper::SweepingConfig::SweepingType:: cppgc::internal::Sweeper::SweepingConfig::SweepingType::
kIncrementalAndConcurrent, kIncrementalAndConcurrent,
compactable_space_handling}; compactable_space_handling};
sweeper().Start(sweeping_config); sweeper().Start(sweeping_config);
} }
}
sweeper().NotifyDoneIfNeeded(); sweeper().NotifyDoneIfNeeded();
} }
......
...@@ -58,7 +58,6 @@ class V8_EXPORT_PRIVATE CppHeap final : public cppgc::internal::HeapBase, ...@@ -58,7 +58,6 @@ class V8_EXPORT_PRIVATE CppHeap final : public cppgc::internal::HeapBase,
Isolate& isolate_; Isolate& isolate_;
bool marking_done_ = false; bool marking_done_ = false;
bool is_in_final_pause_ = false;
}; };
} // namespace internal } // namespace internal
......
...@@ -16,7 +16,6 @@ ...@@ -16,7 +16,6 @@
#include "src/heap/cppgc/heap-page.h" #include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/heap-space.h" #include "src/heap/cppgc/heap-space.h"
#include "src/heap/cppgc/raw-heap.h" #include "src/heap/cppgc/raw-heap.h"
#include "src/heap/cppgc/stats-collector.h"
namespace cppgc { namespace cppgc {
namespace internal { namespace internal {
...@@ -484,9 +483,6 @@ bool Compactor::CancelIfShouldNotCompact( ...@@ -484,9 +483,6 @@ bool Compactor::CancelIfShouldNotCompact(
Compactor::CompactableSpaceHandling Compactor::CompactSpacesIfEnabled() { Compactor::CompactableSpaceHandling Compactor::CompactSpacesIfEnabled() {
if (!is_enabled_) return CompactableSpaceHandling::kSweep; if (!is_enabled_) return CompactableSpaceHandling::kSweep;
StatsCollector::DisabledScope stats_scope(
*heap_.heap(), StatsCollector::kAtomicPauseCompaction);
MovableReferences movable_references(*heap_.heap()); MovableReferences movable_references(*heap_.heap());
CompactionWorklists::MovableReferencesWorklist::Local local( CompactionWorklists::MovableReferencesWorklist::Local local(
......
...@@ -10,7 +10,6 @@ ...@@ -10,7 +10,6 @@
#include "src/heap/cppgc/liveness-broker.h" #include "src/heap/cppgc/liveness-broker.h"
#include "src/heap/cppgc/marking-state.h" #include "src/heap/cppgc/marking-state.h"
#include "src/heap/cppgc/marking-visitor.h" #include "src/heap/cppgc/marking-visitor.h"
#include "src/heap/cppgc/stats-collector.h"
namespace cppgc { namespace cppgc {
namespace internal { namespace internal {
...@@ -72,9 +71,6 @@ ConcurrentMarkingTask::ConcurrentMarkingTask( ...@@ -72,9 +71,6 @@ ConcurrentMarkingTask::ConcurrentMarkingTask(
: concurrent_marker_(concurrent_marker) {} : concurrent_marker_(concurrent_marker) {}
void ConcurrentMarkingTask::Run(JobDelegate* job_delegate) { void ConcurrentMarkingTask::Run(JobDelegate* job_delegate) {
StatsCollector::EnabledConcurrentScope stats_scope(
concurrent_marker_.heap(), StatsCollector::kConcurrentMarkingStep);
if (!HasWorkForConcurrentMarking(concurrent_marker_.marking_worklists())) if (!HasWorkForConcurrentMarking(concurrent_marker_.marking_worklists()))
return; return;
ConcurrentMarkingState concurrent_marking_state( ConcurrentMarkingState concurrent_marking_state(
...@@ -148,15 +144,10 @@ void ConcurrentMarkingTask::ProcessWorklists( ...@@ -148,15 +144,10 @@ void ConcurrentMarkingTask::ProcessWorklists(
return; return;
} }
{
StatsCollector::DisabledConcurrentScope stats_scope(
concurrent_marker_.heap(),
StatsCollector::kConcurrentMarkInvokeEphemeronCallbacks);
if (!DrainWorklistWithYielding( if (!DrainWorklistWithYielding(
job_delegate, concurrent_marking_state, job_delegate, concurrent_marking_state,
concurrent_marker_.incremental_marking_schedule(), concurrent_marker_.incremental_marking_schedule(),
concurrent_marking_state concurrent_marking_state.ephemeron_pairs_for_processing_worklist(),
.ephemeron_pairs_for_processing_worklist(),
[&concurrent_marking_state]( [&concurrent_marking_state](
const MarkingWorklists::EphemeronPairItem& item) { const MarkingWorklists::EphemeronPairItem& item) {
concurrent_marking_state.ProcessEphemeron(item.key, concurrent_marking_state.ProcessEphemeron(item.key,
...@@ -164,7 +155,6 @@ void ConcurrentMarkingTask::ProcessWorklists( ...@@ -164,7 +155,6 @@ void ConcurrentMarkingTask::ProcessWorklists(
})) { })) {
return; return;
} }
}
} while ( } while (
!concurrent_marking_state.marking_worklist().IsLocalAndGlobalEmpty()); !concurrent_marking_state.marking_worklist().IsLocalAndGlobalEmpty());
} }
......
...@@ -69,7 +69,7 @@ HeapBase::HeapBase( ...@@ -69,7 +69,7 @@ HeapBase::HeapBase(
stats_collector_(std::make_unique<StatsCollector>()), stats_collector_(std::make_unique<StatsCollector>()),
stack_(std::make_unique<heap::base::Stack>( stack_(std::make_unique<heap::base::Stack>(
v8::base::Stack::GetStackStart())), v8::base::Stack::GetStackStart())),
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>(*this)), prefinalizer_handler_(std::make_unique<PreFinalizerHandler>()),
compactor_(raw_heap_), compactor_(raw_heap_),
object_allocator_(&raw_heap_, page_backend_.get(), object_allocator_(&raw_heap_, page_backend_.get(),
stats_collector_.get()), stats_collector_.get()),
......
...@@ -12,7 +12,6 @@ ...@@ -12,7 +12,6 @@
#include "src/heap/cppgc/marker.h" #include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/marking-verifier.h" #include "src/heap/cppgc/marking-verifier.h"
#include "src/heap/cppgc/prefinalizer-handler.h" #include "src/heap/cppgc/prefinalizer-handler.h"
#include "src/heap/cppgc/stats-collector.h"
namespace cppgc { namespace cppgc {
...@@ -156,19 +155,11 @@ void Heap::FinalizeGarbageCollection(Config::StackState stack_state) { ...@@ -156,19 +155,11 @@ void Heap::FinalizeGarbageCollection(Config::StackState stack_state) {
config_.stack_state = stack_state; config_.stack_state = stack_state;
DCHECK(marker_); DCHECK(marker_);
{ {
// This guards atomic pause marking, meaning that no internal method or // Pre finalizers are forbidden from allocating objects. Note that this also
// guard atomic pause marking below, meaning that no internal method or
// external callbacks are allowed to allocate new objects. // external callbacks are allowed to allocate new objects.
ObjectAllocator::NoAllocationScope no_allocation_scope_(object_allocator_); ObjectAllocator::NoAllocationScope no_allocation_scope_(object_allocator_);
marker_->FinishMarking(stack_state); marker_->FinishMarking(stack_state);
}
{
StatsCollector::EnabledScope stats(
*this, StatsCollector::kAtomicPauseSweepAndCompact);
{
// Pre finalizers are forbidden from allocating objects.
ObjectAllocator::NoAllocationScope no_allocation_scope_(
object_allocator_);
prefinalizer_handler_->InvokePreFinalizers(); prefinalizer_handler_->InvokePreFinalizers();
} }
marker_.reset(); marker_.reset();
...@@ -177,7 +168,7 @@ void Heap::FinalizeGarbageCollection(Config::StackState stack_state) { ...@@ -177,7 +168,7 @@ void Heap::FinalizeGarbageCollection(Config::StackState stack_state) {
MarkingVerifier verifier(*this); MarkingVerifier verifier(*this);
verifier.Run(stack_state); verifier.Run(stack_state);
#endif #endif
{
NoGCScope no_gc(*this); NoGCScope no_gc(*this);
const Sweeper::SweepingConfig sweeping_config{ const Sweeper::SweepingConfig sweeping_config{
config_.sweeping_type, config_.sweeping_type,
...@@ -191,13 +182,5 @@ void Heap::PostGarbageCollection() { gc_in_progress_ = false; } ...@@ -191,13 +182,5 @@ void Heap::PostGarbageCollection() { gc_in_progress_ = false; }
void Heap::DisableHeapGrowingForTesting() { growing_.DisableForTesting(); } void Heap::DisableHeapGrowingForTesting() { growing_.DisableForTesting(); }
void Heap::FinalizeIncrementalGarbageCollectionIfNeeded(
Config::StackState stack_state) {
StatsCollector::EnabledScope stats_scope(
*this, StatsCollector::kIncrementalMarkingFinalize);
FinalizeGarbageCollection(stack_state);
}
} // namespace internal } // namespace internal
} // namespace cppgc } // namespace cppgc
...@@ -44,7 +44,10 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase, ...@@ -44,7 +44,10 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase,
void StartGarbageCollection(Config); void StartGarbageCollection(Config);
void FinalizeGarbageCollection(Config::StackState); void FinalizeGarbageCollection(Config::StackState);
void FinalizeIncrementalGarbageCollectionIfNeeded(Config::StackState) final; void FinalizeIncrementalGarbageCollectionIfNeeded(
Config::StackState stack_state) final {
FinalizeGarbageCollection(stack_state);
}
void PostGarbageCollection() final; void PostGarbageCollection() final;
......
...@@ -59,8 +59,6 @@ bool ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config, ...@@ -59,8 +59,6 @@ bool ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
void VisitRememberedSlots(HeapBase& heap, void VisitRememberedSlots(HeapBase& heap,
MutatorMarkingState& mutator_marking_state) { MutatorMarkingState& mutator_marking_state) {
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
StatsCollector::EnabledScope stats_scope(
heap(), StatsCollector::kVisitRememberedSets);
for (void* slot : heap.remembered_slots()) { for (void* slot : heap.remembered_slots()) {
auto& slot_header = BasePage::FromInnerAddress(&heap, slot) auto& slot_header = BasePage::FromInnerAddress(&heap, slot)
->ObjectHeaderFromInnerAddress(slot); ->ObjectHeaderFromInnerAddress(slot);
...@@ -202,9 +200,6 @@ void MarkerBase::StartMarking() { ...@@ -202,9 +200,6 @@ void MarkerBase::StartMarking() {
is_marking_started_ = true; is_marking_started_ = true;
if (EnterIncrementalMarkingIfNeeded(config_, heap())) { if (EnterIncrementalMarkingIfNeeded(config_, heap())) {
StatsCollector::EnabledScope stats_scope(
heap(), StatsCollector::kIncrementalMarkingStartMarking);
// Performing incremental or concurrent marking. // Performing incremental or concurrent marking.
schedule_.NotifyIncrementalMarkingStart(); schedule_.NotifyIncrementalMarkingStart();
// Scanning the stack is expensive so we only do it at the atomic pause. // Scanning the stack is expensive so we only do it at the atomic pause.
...@@ -219,9 +214,6 @@ void MarkerBase::StartMarking() { ...@@ -219,9 +214,6 @@ void MarkerBase::StartMarking() {
} }
void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) { void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) {
StatsCollector::EnabledScope stats_scope(
heap(), StatsCollector::kAtomicPauseMarkPrologue);
if (ExitIncrementalMarkingIfNeeded(config_, heap())) { if (ExitIncrementalMarkingIfNeeded(config_, heap())) {
// Cancel remaining concurrent/incremental tasks. // Cancel remaining concurrent/incremental tasks.
concurrent_marker_->Cancel(); concurrent_marker_->Cancel();
...@@ -236,9 +228,6 @@ void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) { ...@@ -236,9 +228,6 @@ void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) {
// is either cleared or the object is retained. // is either cleared or the object is retained.
g_process_mutex.Pointer()->Lock(); g_process_mutex.Pointer()->Lock();
{
StatsCollector::EnabledScope inner_stats_scope(
heap(), StatsCollector::kAtomicPauseMarkRoots);
// VisitRoots also resets the LABs. // VisitRoots also resets the LABs.
VisitRoots(config_.stack_state); VisitRoots(config_.stack_state);
if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) { if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) {
...@@ -247,51 +236,35 @@ void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) { ...@@ -247,51 +236,35 @@ void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) {
} else { } else {
MarkNotFullyConstructedObjects(); MarkNotFullyConstructedObjects();
} }
}
} }
void MarkerBase::LeaveAtomicPause() { void MarkerBase::LeaveAtomicPause() {
StatsCollector::EnabledScope stats_scope(
heap(), StatsCollector::kAtomicPauseMarkEpilogue);
DCHECK(!incremental_marking_handle_); DCHECK(!incremental_marking_handle_);
ResetRememberedSet(heap()); ResetRememberedSet(heap());
heap().stats_collector()->NotifyMarkingCompleted( heap().stats_collector()->NotifyMarkingCompleted(
// GetOverallMarkedBytes also includes concurrently marked bytes. // GetOverallMarkedBytes also includes concurrently marked bytes.
schedule_.GetOverallMarkedBytes()); schedule_.GetOverallMarkedBytes());
is_marking_started_ = false; is_marking_started_ = false;
{
// Weakness callbacks are forbidden from allocating objects.
ObjectAllocator::NoAllocationScope no_allocation_scope_(
heap_.object_allocator());
ProcessWeakness(); ProcessWeakness();
}
g_process_mutex.Pointer()->Unlock(); g_process_mutex.Pointer()->Unlock();
} }
void MarkerBase::FinishMarking(MarkingConfig::StackState stack_state) { void MarkerBase::FinishMarking(MarkingConfig::StackState stack_state) {
DCHECK(is_marking_started_); DCHECK(is_marking_started_);
EnterAtomicPause(stack_state); EnterAtomicPause(stack_state);
{
StatsCollector::EnabledScope advance_tracing_scope(
heap(), StatsCollector::kAtomicPauseMarkTransitiveClosure);
CHECK(ProcessWorklistsWithDeadline(std::numeric_limits<size_t>::max(), CHECK(ProcessWorklistsWithDeadline(std::numeric_limits<size_t>::max(),
v8::base::TimeTicks::Max())); v8::base::TimeTicks::Max()));
}
mutator_marking_state_.Publish(); mutator_marking_state_.Publish();
LeaveAtomicPause(); LeaveAtomicPause();
} }
void MarkerBase::ProcessWeakness() { void MarkerBase::ProcessWeakness() {
DCHECK_EQ(MarkingConfig::MarkingType::kAtomic, config_.marking_type); DCHECK_EQ(MarkingConfig::MarkingType::kAtomic, config_.marking_type);
heap().GetWeakPersistentRegion().Trace(&visitor()); heap().GetWeakPersistentRegion().Trace(&visitor());
// Processing cross-thread handles requires taking the process lock. // Processing cross-thread handles requires taking the process lock.
g_process_mutex.Get().AssertHeld(); g_process_mutex.Get().AssertHeld();
heap().GetWeakCrossThreadPersistentRegion().Trace(&visitor()); heap().GetWeakCrossThreadPersistentRegion().Trace(&visitor());
{
StatsCollector::DisabledScope stats_scope(
heap(), StatsCollector::kMarkWeakProcessing);
// Call weak callbacks on objects that may now be pointing to dead objects. // Call weak callbacks on objects that may now be pointing to dead objects.
MarkingWorklists::WeakCallbackItem item; MarkingWorklists::WeakCallbackItem item;
LivenessBroker broker = LivenessBrokerFactory::Create(); LivenessBroker broker = LivenessBrokerFactory::Create();
...@@ -300,38 +273,21 @@ void MarkerBase::ProcessWeakness() { ...@@ -300,38 +273,21 @@ void MarkerBase::ProcessWeakness() {
while (local.Pop(&item)) { while (local.Pop(&item)) {
item.callback(broker, item.parameter); item.callback(broker, item.parameter);
} }
}
// Weak callbacks should not add any new objects for marking. // Weak callbacks should not add any new objects for marking.
DCHECK(marking_worklists_.marking_worklist()->IsEmpty()); DCHECK(marking_worklists_.marking_worklist()->IsEmpty());
} }
void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) { void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) {
StatsCollector::EnabledScope stats_scope(heap(), StatsCollector::kVisitRoots);
// Reset LABs before scanning roots. LABs are cleared to allow // Reset LABs before scanning roots. LABs are cleared to allow
// ObjectStartBitmap handling without considering LABs. // ObjectStartBitmap handling without considering LABs.
heap().object_allocator().ResetLinearAllocationBuffers(); heap().object_allocator().ResetLinearAllocationBuffers();
{
StatsCollector::DisabledScope stats_scope(
heap(), StatsCollector::kVisitPersistentRoots);
{
StatsCollector::DisabledScope inner_stats_scope(
heap(), StatsCollector::kVisitPersistents);
heap().GetStrongPersistentRegion().Trace(&visitor()); heap().GetStrongPersistentRegion().Trace(&visitor());
}
if (config_.marking_type == MarkingConfig::MarkingType::kAtomic) { if (config_.marking_type == MarkingConfig::MarkingType::kAtomic) {
StatsCollector::DisabledScope inner_stats_scope(
heap(), StatsCollector::kVisitCrossThreadPersistents);
g_process_mutex.Get().AssertHeld(); g_process_mutex.Get().AssertHeld();
heap().GetStrongCrossThreadPersistentRegion().Trace(&visitor()); heap().GetStrongCrossThreadPersistentRegion().Trace(&visitor());
} }
}
if (stack_state != MarkingConfig::StackState::kNoHeapPointers) { if (stack_state != MarkingConfig::StackState::kNoHeapPointers) {
StatsCollector::DisabledScope stack_stats_scope(
heap(), StatsCollector::kVisitStackRoots);
heap().stack()->IteratePointers(&stack_visitor()); heap().stack()->IteratePointers(&stack_visitor());
} }
if (config_.collection_type == MarkingConfig::CollectionType::kMinor) { if (config_.collection_type == MarkingConfig::CollectionType::kMinor) {
...@@ -352,9 +308,6 @@ bool MarkerBase::IncrementalMarkingStepForTesting( ...@@ -352,9 +308,6 @@ bool MarkerBase::IncrementalMarkingStepForTesting(
} }
bool MarkerBase::IncrementalMarkingStep(MarkingConfig::StackState stack_state) { bool MarkerBase::IncrementalMarkingStep(MarkingConfig::StackState stack_state) {
StatsCollector::EnabledScope stats_scope(
heap(), StatsCollector::kIncrementalMarkingStep);
if (stack_state == MarkingConfig::StackState::kNoHeapPointers) { if (stack_state == MarkingConfig::StackState::kNoHeapPointers) {
mutator_marking_state_.FlushNotFullyConstructedObjects(); mutator_marking_state_.FlushNotFullyConstructedObjects();
} }
...@@ -380,9 +333,6 @@ bool MarkerBase::AdvanceMarkingWithDeadline(v8::base::TimeDelta max_duration) { ...@@ -380,9 +333,6 @@ bool MarkerBase::AdvanceMarkingWithDeadline(v8::base::TimeDelta max_duration) {
if (!incremental_marking_disabled_for_testing_) { if (!incremental_marking_disabled_for_testing_) {
size_t step_size_in_bytes = size_t step_size_in_bytes =
GetNextIncrementalStepDuration(schedule_, heap_); GetNextIncrementalStepDuration(schedule_, heap_);
StatsCollector::EnabledScope deadline_scope(
heap(), StatsCollector::kIncrementalMarkingWithDeadline, "deadline_ms",
max_duration.InMillisecondsF());
is_done = ProcessWorklistsWithDeadline( is_done = ProcessWorklistsWithDeadline(
mutator_marking_state_.marked_bytes() + step_size_in_bytes, mutator_marking_state_.marked_bytes() + step_size_in_bytes,
v8::base::TimeTicks::Now() + max_duration); v8::base::TimeTicks::Now() + max_duration);
...@@ -410,35 +360,23 @@ bool MarkerBase::ProcessWorklistsWithDeadline( ...@@ -410,35 +360,23 @@ bool MarkerBase::ProcessWorklistsWithDeadline(
mutator_marking_state_.FlushDiscoveredEphemeronPairs(); mutator_marking_state_.FlushDiscoveredEphemeronPairs();
} }
StatsCollector::EnabledScope stats_scope(
heap(), StatsCollector::kMarkProcessWorklists);
// Bailout objects may be complicated to trace and thus might take longer // Bailout objects may be complicated to trace and thus might take longer
// than other objects. Therefore we reduce the interval between deadline // than other objects. Therefore we reduce the interval between deadline
// checks to guarantee the deadline is not exceeded. // checks to guarantee the deadline is not exceeded.
{
StatsCollector::EnabledScope inner_scope(
heap(), StatsCollector::kMarkBailOutObjects);
if (!DrainWorklistWithBytesAndTimeDeadline<kDefaultDeadlineCheckInterval / if (!DrainWorklistWithBytesAndTimeDeadline<kDefaultDeadlineCheckInterval /
5>( 5>(
mutator_marking_state_, marked_bytes_deadline, time_deadline, mutator_marking_state_, marked_bytes_deadline, time_deadline,
mutator_marking_state_.concurrent_marking_bailout_worklist(), mutator_marking_state_.concurrent_marking_bailout_worklist(),
[this]( [this](const MarkingWorklists::ConcurrentMarkingBailoutItem& item) {
const MarkingWorklists::ConcurrentMarkingBailoutItem& item) {
mutator_marking_state_.AccountMarkedBytes(item.bailedout_size); mutator_marking_state_.AccountMarkedBytes(item.bailedout_size);
item.callback(&visitor(), item.parameter); item.callback(&visitor(), item.parameter);
})) { })) {
return false; return false;
} }
}
{
StatsCollector::EnabledScope inner_scope(
heap(), StatsCollector::kMarkProcessNotFullyconstructeddWorklist);
if (!DrainWorklistWithBytesAndTimeDeadline( if (!DrainWorklistWithBytesAndTimeDeadline(
mutator_marking_state_, marked_bytes_deadline, time_deadline, mutator_marking_state_, marked_bytes_deadline, time_deadline,
mutator_marking_state_ mutator_marking_state_.previously_not_fully_constructed_worklist(),
.previously_not_fully_constructed_worklist(),
[this](HeapObjectHeader* header) { [this](HeapObjectHeader* header) {
mutator_marking_state_.AccountMarkedBytes(*header); mutator_marking_state_.AccountMarkedBytes(*header);
DynamicallyTraceMarkedObject<AccessMode::kNonAtomic>(visitor(), DynamicallyTraceMarkedObject<AccessMode::kNonAtomic>(visitor(),
...@@ -446,11 +384,7 @@ bool MarkerBase::ProcessWorklistsWithDeadline( ...@@ -446,11 +384,7 @@ bool MarkerBase::ProcessWorklistsWithDeadline(
})) { })) {
return false; return false;
} }
}
{
StatsCollector::EnabledScope inner_scope(
heap(), StatsCollector::kMarkProcessMarkingWorklist);
if (!DrainWorklistWithBytesAndTimeDeadline( if (!DrainWorklistWithBytesAndTimeDeadline(
mutator_marking_state_, marked_bytes_deadline, time_deadline, mutator_marking_state_, marked_bytes_deadline, time_deadline,
mutator_marking_state_.marking_worklist(), mutator_marking_state_.marking_worklist(),
...@@ -464,11 +398,7 @@ bool MarkerBase::ProcessWorklistsWithDeadline( ...@@ -464,11 +398,7 @@ bool MarkerBase::ProcessWorklistsWithDeadline(
})) { })) {
return false; return false;
} }
}
{
StatsCollector::EnabledScope inner_scope(
heap(), StatsCollector::kMarkProcessWriteBarrierWorklist);
if (!DrainWorklistWithBytesAndTimeDeadline( if (!DrainWorklistWithBytesAndTimeDeadline(
mutator_marking_state_, marked_bytes_deadline, time_deadline, mutator_marking_state_, marked_bytes_deadline, time_deadline,
mutator_marking_state_.write_barrier_worklist(), mutator_marking_state_.write_barrier_worklist(),
...@@ -479,11 +409,7 @@ bool MarkerBase::ProcessWorklistsWithDeadline( ...@@ -479,11 +409,7 @@ bool MarkerBase::ProcessWorklistsWithDeadline(
})) { })) {
return false; return false;
} }
}
{
StatsCollector::EnabledScope stats_scope(
heap(), StatsCollector::kMarkInvokeEphemeronCallbacks);
if (!DrainWorklistWithBytesAndTimeDeadline( if (!DrainWorklistWithBytesAndTimeDeadline(
mutator_marking_state_, marked_bytes_deadline, time_deadline, mutator_marking_state_, marked_bytes_deadline, time_deadline,
mutator_marking_state_.ephemeron_pairs_for_processing_worklist(), mutator_marking_state_.ephemeron_pairs_for_processing_worklist(),
...@@ -493,14 +419,11 @@ bool MarkerBase::ProcessWorklistsWithDeadline( ...@@ -493,14 +419,11 @@ bool MarkerBase::ProcessWorklistsWithDeadline(
})) { })) {
return false; return false;
} }
}
} while (!mutator_marking_state_.marking_worklist().IsLocalAndGlobalEmpty()); } while (!mutator_marking_state_.marking_worklist().IsLocalAndGlobalEmpty());
return true; return true;
} }
void MarkerBase::MarkNotFullyConstructedObjects() { void MarkerBase::MarkNotFullyConstructedObjects() {
StatsCollector::DisabledScope stats_scope(
heap(), StatsCollector::kMarkNotFullyConstructedObjects);
std::unordered_set<HeapObjectHeader*> objects = std::unordered_set<HeapObjectHeader*> objects =
mutator_marking_state_.not_fully_constructed_worklist().Extract(); mutator_marking_state_.not_fully_constructed_worklist().Extract();
for (HeapObjectHeader* object : objects) { for (HeapObjectHeader* object : objects) {
......
...@@ -6,8 +6,6 @@ ...@@ -6,8 +6,6 @@
#include <unordered_set> #include <unordered_set>
#include "src/heap/cppgc/stats-collector.h"
namespace cppgc { namespace cppgc {
namespace internal { namespace internal {
...@@ -21,8 +19,6 @@ void MutatorMarkingState::FlushNotFullyConstructedObjects() { ...@@ -21,8 +19,6 @@ void MutatorMarkingState::FlushNotFullyConstructedObjects() {
} }
void MutatorMarkingState::FlushDiscoveredEphemeronPairs() { void MutatorMarkingState::FlushDiscoveredEphemeronPairs() {
StatsCollector::EnabledScope stats_scope(
heap_, StatsCollector::kMarkFlushEphemeronPairs);
discovered_ephemeron_pairs_worklist_.Publish(); discovered_ephemeron_pairs_worklist_.Publish();
if (!discovered_ephemeron_pairs_worklist_.IsGlobalEmpty()) { if (!discovered_ephemeron_pairs_worklist_.IsGlobalEmpty()) {
ephemeron_pairs_for_processing_worklist_.Merge( ephemeron_pairs_for_processing_worklist_.Merge(
......
...@@ -120,7 +120,9 @@ class MarkingStateBase { ...@@ -120,7 +120,9 @@ class MarkingStateBase {
return movable_slots_worklist_.get(); return movable_slots_worklist_.get();
} }
#ifdef DEBUG
HeapBase& heap_; HeapBase& heap_;
#endif // DEBUG
MarkingWorklists::MarkingWorklist::Local marking_worklist_; MarkingWorklists::MarkingWorklist::Local marking_worklist_;
MarkingWorklists::NotFullyConstructedWorklist& MarkingWorklists::NotFullyConstructedWorklist&
...@@ -148,7 +150,9 @@ MarkingStateBase::MarkingStateBase(HeapBase& heap, ...@@ -148,7 +150,9 @@ MarkingStateBase::MarkingStateBase(HeapBase& heap,
MarkingWorklists& marking_worklists, MarkingWorklists& marking_worklists,
CompactionWorklists* compaction_worklists) CompactionWorklists* compaction_worklists)
: :
#ifdef DEBUG
heap_(heap), heap_(heap),
#endif // DEBUG
marking_worklist_(marking_worklists.marking_worklist()), marking_worklist_(marking_worklists.marking_worklist()),
not_fully_constructed_worklist_( not_fully_constructed_worklist_(
*marking_worklists.not_fully_constructed_worklist()), *marking_worklists.not_fully_constructed_worklist()),
......
...@@ -134,11 +134,7 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space, ...@@ -134,11 +134,7 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace* space,
// 3. Lazily sweep pages of this heap until we find a freed area for // 3. Lazily sweep pages of this heap until we find a freed area for
// this allocation or we finish sweeping all pages of this heap. // this allocation or we finish sweeping all pages of this heap.
// { // TODO(chromium:1056170): Add lazy sweep.
// StatsCollector::EnabledScope stats_scope(
// *space->raw_heap()->heap(), StatsCollector::kLazySweepOnAllocation);
// // TODO(chromium:1056170): Add lazy sweep.
// }
// 4. Complete sweeping. // 4. Complete sweeping.
raw_heap_->heap()->sweeper().FinishIfRunning(); raw_heap_->heap()->sweeper().FinishIfRunning();
......
...@@ -11,7 +11,6 @@ ...@@ -11,7 +11,6 @@
#include "src/heap/cppgc/heap-page.h" #include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/heap.h" #include "src/heap/cppgc/heap.h"
#include "src/heap/cppgc/liveness-broker.h" #include "src/heap/cppgc/liveness-broker.h"
#include "src/heap/cppgc/stats-collector.h"
namespace cppgc { namespace cppgc {
namespace internal { namespace internal {
...@@ -30,11 +29,9 @@ bool PreFinalizerRegistrationDispatcher::PreFinalizer::operator==( ...@@ -30,11 +29,9 @@ bool PreFinalizerRegistrationDispatcher::PreFinalizer::operator==(
return (object == other.object) && (callback == other.callback); return (object == other.object) && (callback == other.callback);
} }
PreFinalizerHandler::PreFinalizerHandler(HeapBase& heap) PreFinalizerHandler::PreFinalizerHandler()
: heap_(heap)
#ifdef DEBUG #ifdef DEBUG
, : creation_thread_id_(v8::base::OS::GetCurrentThreadId())
creation_thread_id_(v8::base::OS::GetCurrentThreadId())
#endif #endif
{ {
} }
...@@ -48,9 +45,6 @@ void PreFinalizerHandler::RegisterPrefinalizer(PreFinalizer pre_finalizer) { ...@@ -48,9 +45,6 @@ void PreFinalizerHandler::RegisterPrefinalizer(PreFinalizer pre_finalizer) {
} }
void PreFinalizerHandler::InvokePreFinalizers() { void PreFinalizerHandler::InvokePreFinalizers() {
StatsCollector::DisabledScope stats_scope(
heap_, StatsCollector::kInvokePreFinalizers);
DCHECK(CurrentThreadIsCreationThread()); DCHECK(CurrentThreadIsCreationThread());
LivenessBroker liveness_broker = LivenessBrokerFactory::Create(); LivenessBroker liveness_broker = LivenessBrokerFactory::Create();
ordered_pre_finalizers_.erase( ordered_pre_finalizers_.erase(
......
...@@ -12,14 +12,12 @@ ...@@ -12,14 +12,12 @@
namespace cppgc { namespace cppgc {
namespace internal { namespace internal {
class HeapBase;
class PreFinalizerHandler final { class PreFinalizerHandler final {
public: public:
using PreFinalizer = using PreFinalizer =
cppgc::internal::PreFinalizerRegistrationDispatcher::PreFinalizer; cppgc::internal::PreFinalizerRegistrationDispatcher::PreFinalizer;
explicit PreFinalizerHandler(HeapBase& heap); PreFinalizerHandler();
void RegisterPrefinalizer(PreFinalizer pre_finalizer); void RegisterPrefinalizer(PreFinalizer pre_finalizer);
...@@ -35,7 +33,6 @@ class PreFinalizerHandler final { ...@@ -35,7 +33,6 @@ class PreFinalizerHandler final {
// back-to-front. // back-to-front.
std::vector<PreFinalizer> ordered_pre_finalizers_; std::vector<PreFinalizer> ordered_pre_finalizers_;
HeapBase& heap_;
#ifdef DEBUG #ifdef DEBUG
int creation_thread_id_; int creation_thread_id_;
#endif #endif
......
...@@ -20,41 +20,10 @@ namespace cppgc { ...@@ -20,41 +20,10 @@ namespace cppgc {
namespace internal { namespace internal {
#define CPPGC_FOR_ALL_SCOPES(V) \ #define CPPGC_FOR_ALL_SCOPES(V) \
V(AtomicPauseCompaction) \ V(MainThreadScopeForTests1) \
V(AtomicPauseMarkEpilogue) \ V(MainThreadScopeForTests2)
V(AtomicPauseMarkPrologue) \
V(AtomicPauseMarkRoots) \ #define CPPGC_FOR_ALL_CONCURRENT_SCOPES(V) V(ConcurrentThreadScopeForTests)
V(AtomicPauseMarkTransitiveClosure) \
V(AtomicPauseSweepAndCompact) \
V(CompleteSweep) \
V(IncrementalMarkingFinalize) \
V(IncrementalMarkingStartMarking) \
V(IncrementalMarkingStep) \
V(IncrementalMarkingWithDeadline) \
V(InvokePreFinalizers) \
V(LazySweepInIdle) \
V(LazySweepOnAllocation) \
V(MarkBailOutObjects) \
V(MarkInvokeEphemeronCallbacks) \
V(MarkFlushEphemeronPairs) \
V(MarkProcessWorklists) \
V(MarkProcessMarkingWorklist) \
V(MarkProcessWriteBarrierWorklist) \
V(MarkProcessNotFullyconstructeddWorklist) \
V(MarkNotFullyConstructedObjects) \
V(MarkWeakProcessing) \
V(UnifiedMarkingStep) \
V(VisitCrossThreadPersistents) \
V(VisitPersistentRoots) \
V(VisitPersistents) \
V(VisitRoots) \
V(VisitStackRoots) \
V(VisitRememberedSets)
#define CPPGC_FOR_ALL_CONCURRENT_SCOPES(V) \
V(ConcurrentMarkInvokeEphemeronCallbacks) \
V(ConcurrentMarkingStep) \
V(ConcurrentSweepingStep)
// Sink for various time and memory statistics. // Sink for various time and memory statistics.
class V8_EXPORT_PRIVATE StatsCollector final { class V8_EXPORT_PRIVATE StatsCollector final {
......
...@@ -392,13 +392,9 @@ class ConcurrentSweepTask final : public cppgc::JobTask, ...@@ -392,13 +392,9 @@ class ConcurrentSweepTask final : public cppgc::JobTask,
friend class HeapVisitor<ConcurrentSweepTask>; friend class HeapVisitor<ConcurrentSweepTask>;
public: public:
explicit ConcurrentSweepTask(HeapBase& heap, SpaceStates* states) explicit ConcurrentSweepTask(SpaceStates* states) : states_(states) {}
: heap_(heap), states_(states) {}
void Run(cppgc::JobDelegate* delegate) final { void Run(cppgc::JobDelegate* delegate) final {
StatsCollector::EnabledConcurrentScope stats_scope(
heap_, StatsCollector::kConcurrentSweepingStep);
for (SpaceState& state : *states_) { for (SpaceState& state : *states_) {
while (auto page = state.unswept_pages.Pop()) { while (auto page = state.unswept_pages.Pop()) {
Traverse(*page); Traverse(*page);
...@@ -442,7 +438,6 @@ class ConcurrentSweepTask final : public cppgc::JobTask, ...@@ -442,7 +438,6 @@ class ConcurrentSweepTask final : public cppgc::JobTask,
return true; return true;
} }
HeapBase& heap_;
SpaceStates* states_; SpaceStates* states_;
std::atomic_bool is_completed_{false}; std::atomic_bool is_completed_{false};
}; };
...@@ -523,16 +518,12 @@ class Sweeper::SweeperImpl final { ...@@ -523,16 +518,12 @@ class Sweeper::SweeperImpl final {
void FinishIfRunning() { void FinishIfRunning() {
if (!is_in_progress_) return; if (!is_in_progress_) return;
{
StatsCollector::EnabledScope stats_scope(*heap_->heap(),
StatsCollector::kCompleteSweep);
if (concurrent_sweeper_handle_ && concurrent_sweeper_handle_->IsValid() && if (concurrent_sweeper_handle_ && concurrent_sweeper_handle_->IsValid() &&
concurrent_sweeper_handle_->UpdatePriorityEnabled()) { concurrent_sweeper_handle_->UpdatePriorityEnabled()) {
concurrent_sweeper_handle_->UpdatePriority( concurrent_sweeper_handle_->UpdatePriority(
cppgc::TaskPriority::kUserBlocking); cppgc::TaskPriority::kUserBlocking);
} }
Finish(); Finish();
}
NotifyDone(); NotifyDone();
} }
...@@ -596,16 +587,9 @@ class Sweeper::SweeperImpl final { ...@@ -596,16 +587,9 @@ class Sweeper::SweeperImpl final {
MutatorThreadSweeper sweeper(&sweeper_->space_states_, MutatorThreadSweeper sweeper(&sweeper_->space_states_,
sweeper_->platform_); sweeper_->platform_);
bool sweep_complete; const bool sweep_complete =
{ sweeper.SweepWithDeadline(deadline_in_seconds);
StatsCollector::EnabledScope stats_scope(
*sweeper_->heap_->heap(), StatsCollector::kLazySweepInIdle,
"idleDeltaInSeconds",
(deadline_in_seconds -
sweeper_->platform_->MonotonicallyIncreasingTime()));
sweep_complete = sweeper.SweepWithDeadline(deadline_in_seconds);
}
if (sweep_complete) { if (sweep_complete) {
sweeper_->FinalizeSweep(); sweeper_->FinalizeSweep();
sweeper_->NotifyDone(); sweeper_->NotifyDone();
...@@ -636,7 +620,7 @@ class Sweeper::SweeperImpl final { ...@@ -636,7 +620,7 @@ class Sweeper::SweeperImpl final {
concurrent_sweeper_handle_ = platform_->PostJob( concurrent_sweeper_handle_ = platform_->PostJob(
cppgc::TaskPriority::kUserVisible, cppgc::TaskPriority::kUserVisible,
std::make_unique<ConcurrentSweepTask>(*heap_->heap(), &space_states_)); std::make_unique<ConcurrentSweepTask>(&space_states_));
} }
void CancelSweepers() { void CancelSweepers() {
......
...@@ -7,7 +7,6 @@ ...@@ -7,7 +7,6 @@
#include "include/cppgc/allocation.h" #include "include/cppgc/allocation.h"
#include "include/cppgc/custom-space.h" #include "include/cppgc/custom-space.h"
#include "include/cppgc/persistent.h" #include "include/cppgc/persistent.h"
#include "src/heap/cppgc/garbage-collector.h"
#include "src/heap/cppgc/heap-object-header.h" #include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h" #include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/marker.h" #include "src/heap/cppgc/marker.h"
...@@ -126,12 +125,7 @@ namespace internal { ...@@ -126,12 +125,7 @@ namespace internal {
TEST_F(CompactorTest, NothingToCompact) { TEST_F(CompactorTest, NothingToCompact) {
StartCompaction(); StartCompaction();
heap()->stats_collector()->NotifyMarkingStarted(
GarbageCollector::Config::CollectionType::kMajor,
GarbageCollector::Config::IsForcedGC::kNotForced);
heap()->stats_collector()->NotifyMarkingCompleted(0);
FinishCompaction(); FinishCompaction();
heap()->stats_collector()->NotifySweepingCompleted();
} }
TEST_F(CompactorTest, CancelledNothingToCompact) { TEST_F(CompactorTest, CancelledNothingToCompact) {
......
...@@ -76,7 +76,7 @@ class CppgcTracingScopesTest : public testing::TestWithHeap { ...@@ -76,7 +76,7 @@ class CppgcTracingScopesTest : public testing::TestWithHeap {
Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted(); Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted();
} }
void ResetDelegatingTracingController(const char* expected_name = nullptr) { void ResetTestTracingController(const char* expected_name = nullptr) {
DelegatingTracingControllerImpl::AddTraceEvent_callcount = 0u; DelegatingTracingControllerImpl::AddTraceEvent_callcount = 0u;
DelegatingTracingControllerImpl::stored_num_args = 0; DelegatingTracingControllerImpl::stored_num_args = 0;
DelegatingTracingControllerImpl::stored_arg_names.clear(); DelegatingTracingControllerImpl::stored_arg_names.clear();
...@@ -102,10 +102,10 @@ class CppgcTracingScopesTest : public testing::TestWithHeap { ...@@ -102,10 +102,10 @@ class CppgcTracingScopesTest : public testing::TestWithHeap {
TEST_F(CppgcTracingScopesTest, DisabledScope) { TEST_F(CppgcTracingScopesTest, DisabledScope) {
StartGC(); StartGC();
ResetDelegatingTracingController(); ResetTestTracingController();
{ {
StatsCollector::DisabledScope scope( StatsCollector::DisabledScope scope(
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist); *Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1);
} }
EXPECT_EQ(0u, DelegatingTracingControllerImpl::AddTraceEvent_callcount); EXPECT_EQ(0u, DelegatingTracingControllerImpl::AddTraceEvent_callcount);
EndGC(); EndGC();
...@@ -114,21 +114,20 @@ TEST_F(CppgcTracingScopesTest, DisabledScope) { ...@@ -114,21 +114,20 @@ TEST_F(CppgcTracingScopesTest, DisabledScope) {
TEST_F(CppgcTracingScopesTest, EnabledScope) { TEST_F(CppgcTracingScopesTest, EnabledScope) {
{ {
StartGC(); StartGC();
ResetDelegatingTracingController("CppGC.MarkProcessMarkingWorklist"); ResetTestTracingController("CppGC.MainThreadScopeForTests1");
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist); *Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1);
} }
EXPECT_EQ(2u, DelegatingTracingControllerImpl::AddTraceEvent_callcount); EXPECT_EQ(2u, DelegatingTracingControllerImpl::AddTraceEvent_callcount);
EndGC(); EndGC();
} }
{ {
StartGC(); StartGC();
ResetDelegatingTracingController("CppGC.MarkProcessWriteBarrierWorklist"); ResetTestTracingController("CppGC.MainThreadScopeForTests2");
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), *Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests2);
StatsCollector::kMarkProcessWriteBarrierWorklist);
} }
EXPECT_EQ(2u, DelegatingTracingControllerImpl::AddTraceEvent_callcount); EXPECT_EQ(2u, DelegatingTracingControllerImpl::AddTraceEvent_callcount);
EndGC(); EndGC();
...@@ -139,20 +138,20 @@ TEST_F(CppgcTracingScopesTest, EnabledScopeWithArgs) { ...@@ -139,20 +138,20 @@ TEST_F(CppgcTracingScopesTest, EnabledScopeWithArgs) {
// Scopes always add 2 arguments: epoch and is_forced_gc. // Scopes always add 2 arguments: epoch and is_forced_gc.
{ {
StartGC(); StartGC();
ResetDelegatingTracingController(); ResetTestTracingController();
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist); *Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1);
} }
EXPECT_EQ(2, DelegatingTracingControllerImpl::stored_num_args); EXPECT_EQ(2, DelegatingTracingControllerImpl::stored_num_args);
EndGC(); EndGC();
} }
{ {
StartGC(); StartGC();
ResetDelegatingTracingController(); ResetTestTracingController();
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist, *Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1,
"arg1", 1); "arg1", 1);
} }
EXPECT_EQ(3, DelegatingTracingControllerImpl::stored_num_args); EXPECT_EQ(3, DelegatingTracingControllerImpl::stored_num_args);
...@@ -160,10 +159,10 @@ TEST_F(CppgcTracingScopesTest, EnabledScopeWithArgs) { ...@@ -160,10 +159,10 @@ TEST_F(CppgcTracingScopesTest, EnabledScopeWithArgs) {
} }
{ {
StartGC(); StartGC();
ResetDelegatingTracingController(); ResetTestTracingController();
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist, *Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1,
"arg1", 1, "arg2", 2); "arg1", 1, "arg2", 2);
} }
EXPECT_EQ(4, DelegatingTracingControllerImpl::stored_num_args); EXPECT_EQ(4, DelegatingTracingControllerImpl::stored_num_args);
...@@ -174,10 +173,10 @@ TEST_F(CppgcTracingScopesTest, EnabledScopeWithArgs) { ...@@ -174,10 +173,10 @@ TEST_F(CppgcTracingScopesTest, EnabledScopeWithArgs) {
TEST_F(CppgcTracingScopesTest, CheckScopeArgs) { TEST_F(CppgcTracingScopesTest, CheckScopeArgs) {
{ {
StartGC(); StartGC();
ResetDelegatingTracingController(); ResetTestTracingController();
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist, *Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1,
"uint_arg", 13u, "bool_arg", false); "uint_arg", 13u, "bool_arg", false);
} }
FindArgument("uint_arg", TRACE_VALUE_TYPE_UINT, 13); FindArgument("uint_arg", TRACE_VALUE_TYPE_UINT, 13);
...@@ -186,10 +185,10 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) { ...@@ -186,10 +185,10 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) {
} }
{ {
StartGC(); StartGC();
ResetDelegatingTracingController(); ResetTestTracingController();
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist, *Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1,
"neg_int_arg", -5, "pos_int_arg", 7); "neg_int_arg", -5, "pos_int_arg", 7);
} }
FindArgument("neg_int_arg", TRACE_VALUE_TYPE_INT, -5); FindArgument("neg_int_arg", TRACE_VALUE_TYPE_INT, -5);
...@@ -198,12 +197,12 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) { ...@@ -198,12 +197,12 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) {
} }
{ {
StartGC(); StartGC();
ResetDelegatingTracingController(); ResetTestTracingController();
double double_value = 1.2; double double_value = 1.2;
const char* string_value = "test"; const char* string_value = "test";
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist, *Heap::From(GetHeap()), StatsCollector::kMainThreadScopeForTests1,
"string_arg", string_value, "double_arg", double_value); "string_arg", string_value, "double_arg", double_value);
} }
FindArgument("string_arg", TRACE_VALUE_TYPE_STRING, FindArgument("string_arg", TRACE_VALUE_TYPE_STRING,
...@@ -215,14 +214,10 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) { ...@@ -215,14 +214,10 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) {
} }
TEST_F(CppgcTracingScopesTest, InitalScopesAreZero) { TEST_F(CppgcTracingScopesTest, InitalScopesAreZero) {
StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector(); StartGC();
stats_collector->NotifyMarkingStarted( EndGC();
GarbageCollector::Config::CollectionType::kMajor,
GarbageCollector::Config::IsForcedGC::kNotForced);
stats_collector->NotifyMarkingCompleted(0);
stats_collector->NotifySweepingCompleted();
const StatsCollector::Event& event = const StatsCollector::Event& event =
stats_collector->GetPreviousEventForTesting(); Heap::From(GetHeap())->stats_collector()->GetPreviousEventForTesting();
for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) { for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) {
EXPECT_TRUE(event.scope_data[i].IsZero()); EXPECT_TRUE(event.scope_data[i].IsZero());
} }
...@@ -233,10 +228,7 @@ TEST_F(CppgcTracingScopesTest, InitalScopesAreZero) { ...@@ -233,10 +228,7 @@ TEST_F(CppgcTracingScopesTest, InitalScopesAreZero) {
TEST_F(CppgcTracingScopesTest, TestIndividualScopes) { TEST_F(CppgcTracingScopesTest, TestIndividualScopes) {
for (int scope_id = 0; scope_id < StatsCollector::kNumScopeIds; ++scope_id) { for (int scope_id = 0; scope_id < StatsCollector::kNumScopeIds; ++scope_id) {
StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector(); StartGC();
stats_collector->NotifyMarkingStarted(
GarbageCollector::Config::CollectionType::kMajor,
GarbageCollector::Config::IsForcedGC::kNotForced);
DelegatingTracingControllerImpl::check_expectations = false; DelegatingTracingControllerImpl::check_expectations = false;
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
...@@ -247,10 +239,9 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) { ...@@ -247,10 +239,9 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) {
// Force time to progress before destroying scope. // Force time to progress before destroying scope.
} }
} }
stats_collector->NotifyMarkingCompleted(0); EndGC();
stats_collector->NotifySweepingCompleted();
const StatsCollector::Event& event = const StatsCollector::Event& event =
stats_collector->GetPreviousEventForTesting(); Heap::From(GetHeap())->stats_collector()->GetPreviousEventForTesting();
for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) { for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) {
if (i == scope_id) if (i == scope_id)
EXPECT_LT(v8::base::TimeDelta(), event.scope_data[i]); EXPECT_LT(v8::base::TimeDelta(), event.scope_data[i]);
...@@ -266,10 +257,7 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) { ...@@ -266,10 +257,7 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) {
TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) { TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) {
for (int scope_id = 0; scope_id < StatsCollector::kNumConcurrentScopeIds; for (int scope_id = 0; scope_id < StatsCollector::kNumConcurrentScopeIds;
++scope_id) { ++scope_id) {
StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector(); StartGC();
stats_collector->NotifyMarkingStarted(
GarbageCollector::Config::CollectionType::kMajor,
GarbageCollector::Config::IsForcedGC::kNotForced);
DelegatingTracingControllerImpl::check_expectations = false; DelegatingTracingControllerImpl::check_expectations = false;
{ {
StatsCollector::EnabledConcurrentScope scope( StatsCollector::EnabledConcurrentScope scope(
...@@ -280,10 +268,9 @@ TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) { ...@@ -280,10 +268,9 @@ TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) {
// Force time to progress before destroying scope. // Force time to progress before destroying scope.
} }
} }
stats_collector->NotifyMarkingCompleted(0); EndGC();
stats_collector->NotifySweepingCompleted();
const StatsCollector::Event& event = const StatsCollector::Event& event =
stats_collector->GetPreviousEventForTesting(); Heap::From(GetHeap())->stats_collector()->GetPreviousEventForTesting();
for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) { for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) {
EXPECT_TRUE(event.scope_data[i].IsZero()); EXPECT_TRUE(event.scope_data[i].IsZero());
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment