Commit 760e6797 authored by Omer Katz's avatar Omer Katz Committed by Commit Bot

cppgc: Remove StatsCollector dependency on HeapBase

Bug: chromium:1056170
Change-Id: I561166a7f1be658c5c35aa1caf8dbbbd2d720ab3
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2692815
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Auto-Submit: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#72754}
parent 5ec7ca95
...@@ -296,8 +296,8 @@ bool CppHeap::AdvanceTracing(double deadline_in_ms) { ...@@ -296,8 +296,8 @@ bool CppHeap::AdvanceTracing(double deadline_in_ms) {
// accounting since this scope is also accounted under an outer v8 scope. // accounting since this scope is also accounted under an outer v8 scope.
// Make sure to only account this scope once. // Make sure to only account this scope once.
cppgc::internal::StatsCollector::EnabledScope stats_scope( cppgc::internal::StatsCollector::EnabledScope stats_scope(
AsBase(), in_atomic_pause_ stats_collector(),
? cppgc::internal::StatsCollector::kAtomicMark in_atomic_pause_ ? cppgc::internal::StatsCollector::kAtomicMark
: cppgc::internal::StatsCollector::kIncrementalMark); : cppgc::internal::StatsCollector::kIncrementalMark);
const v8::base::TimeDelta deadline = const v8::base::TimeDelta deadline =
in_atomic_pause_ ? v8::base::TimeDelta::Max() in_atomic_pause_ ? v8::base::TimeDelta::Max()
...@@ -316,7 +316,7 @@ bool CppHeap::IsTracingDone() { return marking_done_; } ...@@ -316,7 +316,7 @@ bool CppHeap::IsTracingDone() { return marking_done_; }
void CppHeap::EnterFinalPause(EmbedderStackState stack_state) { void CppHeap::EnterFinalPause(EmbedderStackState stack_state) {
CHECK(!in_disallow_gc_scope()); CHECK(!in_disallow_gc_scope());
cppgc::internal::StatsCollector::EnabledScope stats_scope( cppgc::internal::StatsCollector::EnabledScope stats_scope(
AsBase(), cppgc::internal::StatsCollector::kAtomicMark); stats_collector(), cppgc::internal::StatsCollector::kAtomicMark);
in_atomic_pause_ = true; in_atomic_pause_ = true;
if (override_stack_state_) { if (override_stack_state_) {
stack_state = *override_stack_state_; stack_state = *override_stack_state_;
...@@ -333,7 +333,7 @@ void CppHeap::TraceEpilogue(TraceSummary* trace_summary) { ...@@ -333,7 +333,7 @@ void CppHeap::TraceEpilogue(TraceSummary* trace_summary) {
CHECK(marking_done_); CHECK(marking_done_);
{ {
cppgc::internal::StatsCollector::EnabledScope stats_scope( cppgc::internal::StatsCollector::EnabledScope stats_scope(
AsBase(), cppgc::internal::StatsCollector::kAtomicMark); stats_collector(), cppgc::internal::StatsCollector::kAtomicMark);
cppgc::subtle::DisallowGarbageCollectionScope disallow_gc_scope(*this); cppgc::subtle::DisallowGarbageCollectionScope disallow_gc_scope(*this);
marker_->LeaveAtomicPause(); marker_->LeaveAtomicPause();
} }
......
...@@ -484,7 +484,7 @@ bool Compactor::CancelIfShouldNotCompact( ...@@ -484,7 +484,7 @@ bool Compactor::CancelIfShouldNotCompact(
Compactor::CompactableSpaceHandling Compactor::CompactSpacesIfEnabled() { Compactor::CompactableSpaceHandling Compactor::CompactSpacesIfEnabled() {
if (!is_enabled_) return CompactableSpaceHandling::kSweep; if (!is_enabled_) return CompactableSpaceHandling::kSweep;
StatsCollector::DisabledScope stats_scope(*heap_.heap(), StatsCollector::DisabledScope stats_scope(heap_.heap()->stats_collector(),
StatsCollector::kAtomicCompact); StatsCollector::kAtomicCompact);
MovableReferences movable_references(*heap_.heap()); MovableReferences movable_references(*heap_.heap());
......
...@@ -73,7 +73,8 @@ ConcurrentMarkingTask::ConcurrentMarkingTask( ...@@ -73,7 +73,8 @@ ConcurrentMarkingTask::ConcurrentMarkingTask(
void ConcurrentMarkingTask::Run(JobDelegate* job_delegate) { void ConcurrentMarkingTask::Run(JobDelegate* job_delegate) {
StatsCollector::EnabledConcurrentScope stats_scope( StatsCollector::EnabledConcurrentScope stats_scope(
concurrent_marker_.heap(), StatsCollector::kConcurrentMark); concurrent_marker_.heap().stats_collector(),
StatsCollector::kConcurrentMark);
if (!HasWorkForConcurrentMarking(concurrent_marker_.marking_worklists())) if (!HasWorkForConcurrentMarking(concurrent_marker_.marking_worklists()))
return; return;
...@@ -150,7 +151,7 @@ void ConcurrentMarkingTask::ProcessWorklists( ...@@ -150,7 +151,7 @@ void ConcurrentMarkingTask::ProcessWorklists(
{ {
StatsCollector::DisabledConcurrentScope stats_scope( StatsCollector::DisabledConcurrentScope stats_scope(
concurrent_marker_.heap(), concurrent_marker_.heap().stats_collector(),
StatsCollector::kConcurrentMarkProcessEphemerons); StatsCollector::kConcurrentMarkProcessEphemerons);
if (!DrainWorklistWithYielding( if (!DrainWorklistWithYielding(
job_delegate, concurrent_marking_state, job_delegate, concurrent_marking_state,
......
...@@ -68,8 +68,8 @@ HeapBase::HeapBase( ...@@ -68,8 +68,8 @@ HeapBase::HeapBase(
page_backend_( page_backend_(
std::make_unique<PageBackend>(platform_->GetPageAllocator())), std::make_unique<PageBackend>(platform_->GetPageAllocator())),
#endif #endif
stats_collector_( stats_collector_(std::make_unique<StatsCollector>(
std::make_unique<StatsCollector>(std::move(histogram_recorder))), std::move(histogram_recorder), platform_.get())),
stack_(std::make_unique<heap::base::Stack>( stack_(std::make_unique<heap::base::Stack>(
v8::base::Stack::GetStackStart())), v8::base::Stack::GetStackStart())),
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>(*this)), prefinalizer_handler_(std::make_unique<PreFinalizerHandler>(*this)),
......
...@@ -209,7 +209,7 @@ void Heap::DisableHeapGrowingForTesting() { growing_.DisableForTesting(); } ...@@ -209,7 +209,7 @@ void Heap::DisableHeapGrowingForTesting() { growing_.DisableForTesting(); }
void Heap::FinalizeIncrementalGarbageCollectionIfNeeded( void Heap::FinalizeIncrementalGarbageCollectionIfNeeded(
Config::StackState stack_state) { Config::StackState stack_state) {
StatsCollector::EnabledScope stats_scope( StatsCollector::EnabledScope stats_scope(
*this, StatsCollector::kMarkIncrementalFinalize); stats_collector(), StatsCollector::kMarkIncrementalFinalize);
FinalizeGarbageCollection(stack_state); FinalizeGarbageCollection(stack_state);
} }
......
...@@ -63,7 +63,7 @@ void VisitRememberedSlots(HeapBase& heap, ...@@ -63,7 +63,7 @@ void VisitRememberedSlots(HeapBase& heap,
MutatorMarkingState& mutator_marking_state) { MutatorMarkingState& mutator_marking_state) {
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
StatsCollector::EnabledScope stats_scope( StatsCollector::EnabledScope stats_scope(
heap, StatsCollector::kMarkVisitRememberedSets); heap.stats_collector(), StatsCollector::kMarkVisitRememberedSets);
for (void* slot : heap.remembered_slots()) { for (void* slot : heap.remembered_slots()) {
auto& slot_header = BasePage::FromInnerAddress(&heap, slot) auto& slot_header = BasePage::FromInnerAddress(&heap, slot)
->ObjectHeaderFromInnerAddress(slot); ->ObjectHeaderFromInnerAddress(slot);
...@@ -151,7 +151,7 @@ MarkerBase::IncrementalMarkingTask::Post(cppgc::TaskRunner* runner, ...@@ -151,7 +151,7 @@ MarkerBase::IncrementalMarkingTask::Post(cppgc::TaskRunner* runner,
void MarkerBase::IncrementalMarkingTask::Run() { void MarkerBase::IncrementalMarkingTask::Run() {
if (handle_.IsCanceled()) return; if (handle_.IsCanceled()) return;
StatsCollector::EnabledScope stats_scope(marker_->heap(), StatsCollector::EnabledScope stats_scope(marker_->heap().stats_collector(),
StatsCollector::kIncrementalMark); StatsCollector::kIncrementalMark);
if (marker_->IncrementalMarkingStep(stack_state_)) { if (marker_->IncrementalMarkingStep(stack_state_)) {
...@@ -204,7 +204,8 @@ MarkerBase::~MarkerBase() { ...@@ -204,7 +204,8 @@ MarkerBase::~MarkerBase() {
void MarkerBase::StartMarking() { void MarkerBase::StartMarking() {
DCHECK(!is_marking_started_); DCHECK(!is_marking_started_);
StatsCollector::EnabledScope stats_scope( StatsCollector::EnabledScope stats_scope(
heap(), config_.marking_type == MarkingConfig::MarkingType::kAtomic heap().stats_collector(),
config_.marking_type == MarkingConfig::MarkingType::kAtomic
? StatsCollector::kAtomicMark ? StatsCollector::kAtomicMark
: StatsCollector::kIncrementalMark); : StatsCollector::kIncrementalMark);
...@@ -214,7 +215,7 @@ void MarkerBase::StartMarking() { ...@@ -214,7 +215,7 @@ void MarkerBase::StartMarking() {
is_marking_started_ = true; is_marking_started_ = true;
if (EnterIncrementalMarkingIfNeeded(config_, heap())) { if (EnterIncrementalMarkingIfNeeded(config_, heap())) {
StatsCollector::EnabledScope stats_scope( StatsCollector::EnabledScope stats_scope(
heap(), StatsCollector::kMarkIncrementalStart); heap().stats_collector(), StatsCollector::kMarkIncrementalStart);
// Performing incremental or concurrent marking. // Performing incremental or concurrent marking.
schedule_.NotifyIncrementalMarkingStart(); schedule_.NotifyIncrementalMarkingStart();
...@@ -230,7 +231,7 @@ void MarkerBase::StartMarking() { ...@@ -230,7 +231,7 @@ void MarkerBase::StartMarking() {
} }
void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) { void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) {
StatsCollector::EnabledScope stats_scope(heap(), StatsCollector::EnabledScope stats_scope(heap().stats_collector(),
StatsCollector::kMarkAtomicPrologue); StatsCollector::kMarkAtomicPrologue);
if (ExitIncrementalMarkingIfNeeded(config_, heap())) { if (ExitIncrementalMarkingIfNeeded(config_, heap())) {
...@@ -260,7 +261,7 @@ void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) { ...@@ -260,7 +261,7 @@ void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) {
} }
void MarkerBase::LeaveAtomicPause() { void MarkerBase::LeaveAtomicPause() {
StatsCollector::EnabledScope stats_scope(heap(), StatsCollector::EnabledScope stats_scope(heap().stats_collector(),
StatsCollector::kMarkAtomicEpilogue); StatsCollector::kMarkAtomicEpilogue);
DCHECK(!incremental_marking_handle_); DCHECK(!incremental_marking_handle_);
ResetRememberedSet(heap()); ResetRememberedSet(heap());
...@@ -278,7 +279,8 @@ void MarkerBase::LeaveAtomicPause() { ...@@ -278,7 +279,8 @@ void MarkerBase::LeaveAtomicPause() {
void MarkerBase::FinishMarking(MarkingConfig::StackState stack_state) { void MarkerBase::FinishMarking(MarkingConfig::StackState stack_state) {
DCHECK(is_marking_started_); DCHECK(is_marking_started_);
StatsCollector::EnabledScope stats_scope(heap(), StatsCollector::kAtomicMark); StatsCollector::EnabledScope stats_scope(heap().stats_collector(),
StatsCollector::kAtomicMark);
EnterAtomicPause(stack_state); EnterAtomicPause(stack_state);
CHECK(AdvanceMarkingWithLimits(v8::base::TimeDelta::Max(), SIZE_MAX)); CHECK(AdvanceMarkingWithLimits(v8::base::TimeDelta::Max(), SIZE_MAX));
mutator_marking_state_.Publish(); mutator_marking_state_.Publish();
...@@ -288,7 +290,7 @@ void MarkerBase::FinishMarking(MarkingConfig::StackState stack_state) { ...@@ -288,7 +290,7 @@ void MarkerBase::FinishMarking(MarkingConfig::StackState stack_state) {
void MarkerBase::ProcessWeakness() { void MarkerBase::ProcessWeakness() {
DCHECK_EQ(MarkingConfig::MarkingType::kAtomic, config_.marking_type); DCHECK_EQ(MarkingConfig::MarkingType::kAtomic, config_.marking_type);
StatsCollector::DisabledScope stats_scope(heap(), StatsCollector::DisabledScope stats_scope(heap().stats_collector(),
StatsCollector::kAtomicWeak); StatsCollector::kAtomicWeak);
heap().GetWeakPersistentRegion().Trace(&visitor()); heap().GetWeakPersistentRegion().Trace(&visitor());
...@@ -310,7 +312,7 @@ void MarkerBase::ProcessWeakness() { ...@@ -310,7 +312,7 @@ void MarkerBase::ProcessWeakness() {
} }
void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) { void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) {
StatsCollector::EnabledScope stats_scope(heap(), StatsCollector::EnabledScope stats_scope(heap().stats_collector(),
StatsCollector::kMarkVisitRoots); StatsCollector::kMarkVisitRoots);
// Reset LABs before scanning roots. LABs are cleared to allow // Reset LABs before scanning roots. LABs are cleared to allow
...@@ -320,12 +322,13 @@ void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) { ...@@ -320,12 +322,13 @@ void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) {
{ {
{ {
StatsCollector::DisabledScope inner_stats_scope( StatsCollector::DisabledScope inner_stats_scope(
heap(), StatsCollector::kMarkVisitPersistents); heap().stats_collector(), StatsCollector::kMarkVisitPersistents);
heap().GetStrongPersistentRegion().Trace(&visitor()); heap().GetStrongPersistentRegion().Trace(&visitor());
} }
if (config_.marking_type == MarkingConfig::MarkingType::kAtomic) { if (config_.marking_type == MarkingConfig::MarkingType::kAtomic) {
StatsCollector::DisabledScope inner_stats_scope( StatsCollector::DisabledScope inner_stats_scope(
heap(), StatsCollector::kMarkVisitCrossThreadPersistents); heap().stats_collector(),
StatsCollector::kMarkVisitCrossThreadPersistents);
g_process_mutex.Get().AssertHeld(); g_process_mutex.Get().AssertHeld();
heap().GetStrongCrossThreadPersistentRegion().Trace(&visitor()); heap().GetStrongCrossThreadPersistentRegion().Trace(&visitor());
} }
...@@ -333,7 +336,7 @@ void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) { ...@@ -333,7 +336,7 @@ void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) {
if (stack_state != MarkingConfig::StackState::kNoHeapPointers) { if (stack_state != MarkingConfig::StackState::kNoHeapPointers) {
StatsCollector::DisabledScope stack_stats_scope( StatsCollector::DisabledScope stack_stats_scope(
heap(), StatsCollector::kMarkVisitStack); heap().stats_collector(), StatsCollector::kMarkVisitStack);
heap().stack()->IteratePointers(&stack_visitor()); heap().stack()->IteratePointers(&stack_visitor());
} }
if (config_.collection_type == MarkingConfig::CollectionType::kMinor) { if (config_.collection_type == MarkingConfig::CollectionType::kMinor) {
...@@ -379,8 +382,9 @@ bool MarkerBase::AdvanceMarkingWithLimits(v8::base::TimeDelta max_duration, ...@@ -379,8 +382,9 @@ bool MarkerBase::AdvanceMarkingWithLimits(v8::base::TimeDelta max_duration,
GetNextIncrementalStepDuration(schedule_, heap_); GetNextIncrementalStepDuration(schedule_, heap_);
} }
StatsCollector::EnabledScope deadline_scope( StatsCollector::EnabledScope deadline_scope(
heap(), StatsCollector::kMarkTransitiveClosureWithDeadline, heap().stats_collector(),
"deadline_ms", max_duration.InMillisecondsF()); StatsCollector::kMarkTransitiveClosureWithDeadline, "deadline_ms",
max_duration.InMillisecondsF());
is_done = ProcessWorklistsWithDeadline( is_done = ProcessWorklistsWithDeadline(
marked_bytes_limit, v8::base::TimeTicks::Now() + max_duration); marked_bytes_limit, v8::base::TimeTicks::Now() + max_duration);
if (with_schedule) { if (with_schedule) {
...@@ -404,7 +408,7 @@ bool MarkerBase::AdvanceMarkingWithLimits(v8::base::TimeDelta max_duration, ...@@ -404,7 +408,7 @@ bool MarkerBase::AdvanceMarkingWithLimits(v8::base::TimeDelta max_duration,
bool MarkerBase::ProcessWorklistsWithDeadline( bool MarkerBase::ProcessWorklistsWithDeadline(
size_t marked_bytes_deadline, v8::base::TimeTicks time_deadline) { size_t marked_bytes_deadline, v8::base::TimeTicks time_deadline) {
StatsCollector::EnabledScope stats_scope( StatsCollector::EnabledScope stats_scope(
heap(), StatsCollector::kMarkTransitiveClosure); heap().stats_collector(), StatsCollector::kMarkTransitiveClosure);
do { do {
if ((config_.marking_type == MarkingConfig::MarkingType::kAtomic) || if ((config_.marking_type == MarkingConfig::MarkingType::kAtomic) ||
schedule_.ShouldFlushEphemeronPairs()) { schedule_.ShouldFlushEphemeronPairs()) {
...@@ -416,7 +420,7 @@ bool MarkerBase::ProcessWorklistsWithDeadline( ...@@ -416,7 +420,7 @@ bool MarkerBase::ProcessWorklistsWithDeadline(
// checks to guarantee the deadline is not exceeded. // checks to guarantee the deadline is not exceeded.
{ {
StatsCollector::EnabledScope inner_scope( StatsCollector::EnabledScope inner_scope(
heap(), StatsCollector::kMarkProcessBailOutObjects); heap().stats_collector(), StatsCollector::kMarkProcessBailOutObjects);
if (!DrainWorklistWithBytesAndTimeDeadline<kDefaultDeadlineCheckInterval / if (!DrainWorklistWithBytesAndTimeDeadline<kDefaultDeadlineCheckInterval /
5>( 5>(
mutator_marking_state_, marked_bytes_deadline, time_deadline, mutator_marking_state_, marked_bytes_deadline, time_deadline,
...@@ -432,7 +436,8 @@ bool MarkerBase::ProcessWorklistsWithDeadline( ...@@ -432,7 +436,8 @@ bool MarkerBase::ProcessWorklistsWithDeadline(
{ {
StatsCollector::EnabledScope inner_scope( StatsCollector::EnabledScope inner_scope(
heap(), StatsCollector::kMarkProcessNotFullyconstructedWorklist); heap().stats_collector(),
StatsCollector::kMarkProcessNotFullyconstructedWorklist);
if (!DrainWorklistWithBytesAndTimeDeadline( if (!DrainWorklistWithBytesAndTimeDeadline(
mutator_marking_state_, marked_bytes_deadline, time_deadline, mutator_marking_state_, marked_bytes_deadline, time_deadline,
mutator_marking_state_ mutator_marking_state_
...@@ -448,7 +453,8 @@ bool MarkerBase::ProcessWorklistsWithDeadline( ...@@ -448,7 +453,8 @@ bool MarkerBase::ProcessWorklistsWithDeadline(
{ {
StatsCollector::EnabledScope inner_scope( StatsCollector::EnabledScope inner_scope(
heap(), StatsCollector::kMarkProcessMarkingWorklist); heap().stats_collector(),
StatsCollector::kMarkProcessMarkingWorklist);
if (!DrainWorklistWithBytesAndTimeDeadline( if (!DrainWorklistWithBytesAndTimeDeadline(
mutator_marking_state_, marked_bytes_deadline, time_deadline, mutator_marking_state_, marked_bytes_deadline, time_deadline,
mutator_marking_state_.marking_worklist(), mutator_marking_state_.marking_worklist(),
...@@ -466,7 +472,8 @@ bool MarkerBase::ProcessWorklistsWithDeadline( ...@@ -466,7 +472,8 @@ bool MarkerBase::ProcessWorklistsWithDeadline(
{ {
StatsCollector::EnabledScope inner_scope( StatsCollector::EnabledScope inner_scope(
heap(), StatsCollector::kMarkProcessWriteBarrierWorklist); heap().stats_collector(),
StatsCollector::kMarkProcessWriteBarrierWorklist);
if (!DrainWorklistWithBytesAndTimeDeadline( if (!DrainWorklistWithBytesAndTimeDeadline(
mutator_marking_state_, marked_bytes_deadline, time_deadline, mutator_marking_state_, marked_bytes_deadline, time_deadline,
mutator_marking_state_.write_barrier_worklist(), mutator_marking_state_.write_barrier_worklist(),
...@@ -481,7 +488,7 @@ bool MarkerBase::ProcessWorklistsWithDeadline( ...@@ -481,7 +488,7 @@ bool MarkerBase::ProcessWorklistsWithDeadline(
{ {
StatsCollector::EnabledScope stats_scope( StatsCollector::EnabledScope stats_scope(
heap(), StatsCollector::kMarkProcessEphemerons); heap().stats_collector(), StatsCollector::kMarkProcessEphemerons);
if (!DrainWorklistWithBytesAndTimeDeadline( if (!DrainWorklistWithBytesAndTimeDeadline(
mutator_marking_state_, marked_bytes_deadline, time_deadline, mutator_marking_state_, marked_bytes_deadline, time_deadline,
mutator_marking_state_.ephemeron_pairs_for_processing_worklist(), mutator_marking_state_.ephemeron_pairs_for_processing_worklist(),
...@@ -498,7 +505,8 @@ bool MarkerBase::ProcessWorklistsWithDeadline( ...@@ -498,7 +505,8 @@ bool MarkerBase::ProcessWorklistsWithDeadline(
void MarkerBase::MarkNotFullyConstructedObjects() { void MarkerBase::MarkNotFullyConstructedObjects() {
StatsCollector::DisabledScope stats_scope( StatsCollector::DisabledScope stats_scope(
heap(), StatsCollector::kMarkVisitNotFullyConstructedObjects); heap().stats_collector(),
StatsCollector::kMarkVisitNotFullyConstructedObjects);
std::unordered_set<HeapObjectHeader*> objects = std::unordered_set<HeapObjectHeader*> objects =
mutator_marking_state_.not_fully_constructed_worklist().Extract(); mutator_marking_state_.not_fully_constructed_worklist().Extract();
for (HeapObjectHeader* object : objects) { for (HeapObjectHeader* object : objects) {
......
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
#include <unordered_set> #include <unordered_set>
#include "src/heap/cppgc/heap-base.h"
#include "src/heap/cppgc/stats-collector.h" #include "src/heap/cppgc/stats-collector.h"
namespace cppgc { namespace cppgc {
...@@ -22,7 +23,7 @@ void MutatorMarkingState::FlushNotFullyConstructedObjects() { ...@@ -22,7 +23,7 @@ void MutatorMarkingState::FlushNotFullyConstructedObjects() {
void MutatorMarkingState::FlushDiscoveredEphemeronPairs() { void MutatorMarkingState::FlushDiscoveredEphemeronPairs() {
StatsCollector::EnabledScope stats_scope( StatsCollector::EnabledScope stats_scope(
heap_, StatsCollector::kMarkFlushEphemerons); heap_.stats_collector(), StatsCollector::kMarkFlushEphemerons);
discovered_ephemeron_pairs_worklist_.Publish(); discovered_ephemeron_pairs_worklist_.Publish();
if (!discovered_ephemeron_pairs_worklist_.IsGlobalEmpty()) { if (!discovered_ephemeron_pairs_worklist_.IsGlobalEmpty()) {
ephemeron_pairs_for_processing_worklist_.Merge( ephemeron_pairs_for_processing_worklist_.Merge(
......
...@@ -49,7 +49,7 @@ void PreFinalizerHandler::RegisterPrefinalizer(PreFinalizer pre_finalizer) { ...@@ -49,7 +49,7 @@ void PreFinalizerHandler::RegisterPrefinalizer(PreFinalizer pre_finalizer) {
void PreFinalizerHandler::InvokePreFinalizers() { void PreFinalizerHandler::InvokePreFinalizers() {
StatsCollector::DisabledScope stats_scope( StatsCollector::DisabledScope stats_scope(
heap_, StatsCollector::kSweepInvokePreFinalizers); heap_.stats_collector(), StatsCollector::kSweepInvokePreFinalizers);
DCHECK(CurrentThreadIsCreationThread()); DCHECK(CurrentThreadIsCreationThread());
LivenessBroker liveness_broker = LivenessBrokerFactory::Create(); LivenessBroker liveness_broker = LivenessBrokerFactory::Create();
......
...@@ -17,8 +17,10 @@ namespace internal { ...@@ -17,8 +17,10 @@ namespace internal {
constexpr size_t StatsCollector::kAllocationThresholdBytes; constexpr size_t StatsCollector::kAllocationThresholdBytes;
StatsCollector::StatsCollector( StatsCollector::StatsCollector(
std::unique_ptr<MetricRecorder> histogram_recorder) std::unique_ptr<MetricRecorder> histogram_recorder, Platform* platform)
: metric_recorder_(std::move(histogram_recorder)) {} : metric_recorder_(std::move(histogram_recorder)), platform_(platform) {
USE(platform_);
}
void StatsCollector::RegisterObserver(AllocationObserver* observer) { void StatsCollector::RegisterObserver(AllocationObserver* observer) {
DCHECK_EQ(allocation_observers_.end(), DCHECK_EQ(allocation_observers_.end(),
......
...@@ -10,10 +10,10 @@ ...@@ -10,10 +10,10 @@
#include <vector> #include <vector>
#include "include/cppgc/platform.h"
#include "src/base/macros.h" #include "src/base/macros.h"
#include "src/base/platform/time.h" #include "src/base/platform/time.h"
#include "src/heap/cppgc/garbage-collector.h" #include "src/heap/cppgc/garbage-collector.h"
#include "src/heap/cppgc/heap-base.h"
#include "src/heap/cppgc/metric-recorder.h" #include "src/heap/cppgc/metric-recorder.h"
#include "src/heap/cppgc/trace-event.h" #include "src/heap/cppgc/trace-event.h"
...@@ -154,9 +154,9 @@ class V8_EXPORT_PRIVATE StatsCollector final { ...@@ -154,9 +154,9 @@ class V8_EXPORT_PRIVATE StatsCollector final {
public: public:
template <typename... Args> template <typename... Args>
InternalScope(HeapBase& heap, ScopeIdType scope_id, Args... args) InternalScope(StatsCollector* stats_collector, ScopeIdType scope_id,
: heap_(heap), Args... args)
stats_collector_(heap_.stats_collector()), : stats_collector_(stats_collector),
start_time_(v8::base::TimeTicks::Now()), start_time_(v8::base::TimeTicks::Now()),
scope_id_(scope_id) { scope_id_(scope_id) {
DCHECK_LE(0, scope_id_); DCHECK_LE(0, scope_id_);
...@@ -203,7 +203,6 @@ class V8_EXPORT_PRIVATE StatsCollector final { ...@@ -203,7 +203,6 @@ class V8_EXPORT_PRIVATE StatsCollector final {
inline void IncreaseScopeTime(); inline void IncreaseScopeTime();
HeapBase& heap_;
StatsCollector* const stats_collector_; StatsCollector* const stats_collector_;
v8::base::TimeTicks start_time_; v8::base::TimeTicks start_time_;
const ScopeIdType scope_id_; const ScopeIdType scope_id_;
...@@ -240,7 +239,7 @@ class V8_EXPORT_PRIVATE StatsCollector final { ...@@ -240,7 +239,7 @@ class V8_EXPORT_PRIVATE StatsCollector final {
// reasonably interesting sizes. // reasonably interesting sizes.
static constexpr size_t kAllocationThresholdBytes = 1024; static constexpr size_t kAllocationThresholdBytes = 1024;
explicit StatsCollector(std::unique_ptr<MetricRecorder>); StatsCollector(std::unique_ptr<MetricRecorder>, Platform*);
StatsCollector(const StatsCollector&) = delete; StatsCollector(const StatsCollector&) = delete;
StatsCollector& operator=(const StatsCollector&) = delete; StatsCollector& operator=(const StatsCollector&) = delete;
...@@ -325,6 +324,8 @@ class V8_EXPORT_PRIVATE StatsCollector final { ...@@ -325,6 +324,8 @@ class V8_EXPORT_PRIVATE StatsCollector final {
Event previous_; Event previous_;
std::unique_ptr<MetricRecorder> metric_recorder_; std::unique_ptr<MetricRecorder> metric_recorder_;
Platform* platform_;
}; };
template <typename Callback> template <typename Callback>
......
...@@ -427,7 +427,7 @@ class ConcurrentSweepTask final : public cppgc::JobTask, ...@@ -427,7 +427,7 @@ class ConcurrentSweepTask final : public cppgc::JobTask,
void Run(cppgc::JobDelegate* delegate) final { void Run(cppgc::JobDelegate* delegate) final {
StatsCollector::EnabledConcurrentScope stats_scope( StatsCollector::EnabledConcurrentScope stats_scope(
heap_, StatsCollector::kConcurrentSweep); heap_.stats_collector(), StatsCollector::kConcurrentSweep);
for (SpaceState& state : *states_) { for (SpaceState& state : *states_) {
while (auto page = state.unswept_pages.Pop()) { while (auto page = state.unswept_pages.Pop()) {
...@@ -531,7 +531,7 @@ class Sweeper::SweeperImpl final { ...@@ -531,7 +531,7 @@ class Sweeper::SweeperImpl final {
~SweeperImpl() { CancelSweepers(); } ~SweeperImpl() { CancelSweepers(); }
void Start(SweepingConfig config) { void Start(SweepingConfig config) {
StatsCollector::EnabledScope stats_scope(*heap_->heap(), StatsCollector::EnabledScope stats_scope(heap_->heap()->stats_collector(),
StatsCollector::kAtomicSweep); StatsCollector::kAtomicSweep);
is_in_progress_ = true; is_in_progress_ = true;
#if DEBUG #if DEBUG
...@@ -558,10 +558,10 @@ class Sweeper::SweeperImpl final { ...@@ -558,10 +558,10 @@ class Sweeper::SweeperImpl final {
// allocate new memory. // allocate new memory.
if (is_sweeping_on_mutator_thread_) return false; if (is_sweeping_on_mutator_thread_) return false;
StatsCollector::EnabledScope stats_scope(*heap_->heap(), StatsCollector::EnabledScope stats_scope(heap_->heap()->stats_collector(),
StatsCollector::kIncrementalSweep); StatsCollector::kIncrementalSweep);
StatsCollector::EnabledScope inner_scope( StatsCollector::EnabledScope inner_scope(
*heap_->heap(), StatsCollector::kSweepOnAllocation); heap_->heap()->stats_collector(), StatsCollector::kSweepOnAllocation);
MutatorThreadSweepingScope sweeping_in_progresss(*this); MutatorThreadSweepingScope sweeping_in_progresss(*this);
SpaceState& space_state = space_states_[space->index()]; SpaceState& space_state = space_states_[space->index()];
...@@ -597,8 +597,8 @@ class Sweeper::SweeperImpl final { ...@@ -597,8 +597,8 @@ class Sweeper::SweeperImpl final {
{ {
StatsCollector::EnabledScope stats_scope( StatsCollector::EnabledScope stats_scope(
*heap_->heap(), StatsCollector::kIncrementalSweep); heap_->heap()->stats_collector(), StatsCollector::kIncrementalSweep);
StatsCollector::EnabledScope inner_scope(*heap_->heap(), StatsCollector::EnabledScope inner_scope(heap_->heap()->stats_collector(),
StatsCollector::kSweepFinalize); StatsCollector::kSweepFinalize);
if (concurrent_sweeper_handle_ && concurrent_sweeper_handle_->IsValid() && if (concurrent_sweeper_handle_ && concurrent_sweeper_handle_->IsValid() &&
concurrent_sweeper_handle_->UpdatePriorityEnabled()) { concurrent_sweeper_handle_->UpdatePriorityEnabled()) {
...@@ -698,14 +698,15 @@ class Sweeper::SweeperImpl final { ...@@ -698,14 +698,15 @@ class Sweeper::SweeperImpl final {
bool sweep_complete; bool sweep_complete;
{ {
StatsCollector::EnabledScope stats_scope( StatsCollector::EnabledScope stats_scope(
*sweeper_->heap_->heap(), StatsCollector::kIncrementalSweep); sweeper_->heap_->heap()->stats_collector(),
StatsCollector::kIncrementalSweep);
MutatorThreadSweeper sweeper(&sweeper_->space_states_, MutatorThreadSweeper sweeper(&sweeper_->space_states_,
sweeper_->platform_); sweeper_->platform_);
{ {
StatsCollector::EnabledScope stats_scope( StatsCollector::EnabledScope stats_scope(
*sweeper_->heap_->heap(), StatsCollector::kSweepIdleStep, sweeper_->heap_->heap()->stats_collector(),
"idleDeltaInSeconds", StatsCollector::kSweepIdleStep, "idleDeltaInSeconds",
(deadline_in_seconds - (deadline_in_seconds -
sweeper_->platform_->MonotonicallyIncreasingTime())); sweeper_->platform_->MonotonicallyIncreasingTime()));
......
...@@ -116,7 +116,7 @@ enum CategoryGroupEnabledFlags { ...@@ -116,7 +116,7 @@ enum CategoryGroupEnabledFlags {
#define INTERNAL_TRACE_EVENT_ADD(phase, category_group, name, flags, ...) \ #define INTERNAL_TRACE_EVENT_ADD(phase, category_group, name, flags, ...) \
DCHECK_NOT_NULL(name); \ DCHECK_NOT_NULL(name); \
do { \ do { \
cppgc::Platform* platform = heap_.platform(); \ cppgc::Platform* platform = stats_collector_->platform_; \
INTERNAL_TRACE_EVENT_GET_CATEGORY_INFO(category_group); \ INTERNAL_TRACE_EVENT_GET_CATEGORY_INFO(category_group); \
if (INTERNAL_TRACE_EVENT_CATEGORY_GROUP_ENABLED_FOR_RECORDING_MODE()) { \ if (INTERNAL_TRACE_EVENT_CATEGORY_GROUP_ENABLED_FOR_RECORDING_MODE()) { \
cppgc::internal::AddTraceEvent( \ cppgc::internal::AddTraceEvent( \
......
...@@ -60,7 +60,8 @@ void FakeAllocate(StatsCollector* stats_collector, size_t bytes) { ...@@ -60,7 +60,8 @@ void FakeAllocate(StatsCollector* stats_collector, size_t bytes) {
} // namespace } // namespace
TEST(HeapGrowingTest, ConservativeGCInvoked) { TEST(HeapGrowingTest, ConservativeGCInvoked) {
StatsCollector stats_collector(nullptr /* metric_recorder */); StatsCollector stats_collector(nullptr /* metric_recorder */,
nullptr /* platform */);
MockGarbageCollector gc; MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints; cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update. // Force GC at the first update.
...@@ -73,7 +74,8 @@ TEST(HeapGrowingTest, ConservativeGCInvoked) { ...@@ -73,7 +74,8 @@ TEST(HeapGrowingTest, ConservativeGCInvoked) {
} }
TEST(HeapGrowingTest, InitialHeapSize) { TEST(HeapGrowingTest, InitialHeapSize) {
StatsCollector stats_collector(nullptr /* metric_recorder */); StatsCollector stats_collector(nullptr /* metric_recorder */,
nullptr /* platform */);
MockGarbageCollector gc; MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints; cppgc::Heap::ResourceConstraints constraints;
// Use larger size to avoid running into small heap optimizations. // Use larger size to avoid running into small heap optimizations.
...@@ -90,7 +92,8 @@ TEST(HeapGrowingTest, InitialHeapSize) { ...@@ -90,7 +92,8 @@ TEST(HeapGrowingTest, InitialHeapSize) {
TEST(HeapGrowingTest, ConstantGrowingFactor) { TEST(HeapGrowingTest, ConstantGrowingFactor) {
// Use larger size to avoid running into small heap optimizations. // Use larger size to avoid running into small heap optimizations.
constexpr size_t kObjectSize = 10 * HeapGrowing::kMinLimitIncrease; constexpr size_t kObjectSize = 10 * HeapGrowing::kMinLimitIncrease;
StatsCollector stats_collector(nullptr /* metric_recorder */); StatsCollector stats_collector(nullptr /* metric_recorder */,
nullptr /* platform */);
FakeGarbageCollector gc(&stats_collector); FakeGarbageCollector gc(&stats_collector);
cppgc::Heap::ResourceConstraints constraints; cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update. // Force GC at the first update.
...@@ -108,7 +111,8 @@ TEST(HeapGrowingTest, ConstantGrowingFactor) { ...@@ -108,7 +111,8 @@ TEST(HeapGrowingTest, ConstantGrowingFactor) {
TEST(HeapGrowingTest, SmallHeapGrowing) { TEST(HeapGrowingTest, SmallHeapGrowing) {
// Larger constant to avoid running into special handling for smaller heaps. // Larger constant to avoid running into special handling for smaller heaps.
constexpr size_t kLargeAllocation = 100 * kMB; constexpr size_t kLargeAllocation = 100 * kMB;
StatsCollector stats_collector(nullptr /* metric_recorder */); StatsCollector stats_collector(nullptr /* metric_recorder */,
nullptr /* platform */);
FakeGarbageCollector gc(&stats_collector); FakeGarbageCollector gc(&stats_collector);
cppgc::Heap::ResourceConstraints constraints; cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update. // Force GC at the first update.
...@@ -124,7 +128,8 @@ TEST(HeapGrowingTest, SmallHeapGrowing) { ...@@ -124,7 +128,8 @@ TEST(HeapGrowingTest, SmallHeapGrowing) {
} }
TEST(HeapGrowingTest, IncrementalGCStarted) { TEST(HeapGrowingTest, IncrementalGCStarted) {
StatsCollector stats_collector(nullptr /* metric_recorder */); StatsCollector stats_collector(nullptr /* metric_recorder */,
nullptr /* platform */);
MockGarbageCollector gc; MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints; cppgc::Heap::ResourceConstraints constraints;
HeapGrowing growing(&gc, &stats_collector, constraints, HeapGrowing growing(&gc, &stats_collector, constraints,
...@@ -137,7 +142,8 @@ TEST(HeapGrowingTest, IncrementalGCStarted) { ...@@ -137,7 +142,8 @@ TEST(HeapGrowingTest, IncrementalGCStarted) {
} }
TEST(HeapGrowingTest, IncrementalGCFinalized) { TEST(HeapGrowingTest, IncrementalGCFinalized) {
StatsCollector stats_collector(nullptr /* metric_recorder */); StatsCollector stats_collector(nullptr /* metric_recorder */,
nullptr /* platform */);
MockGarbageCollector gc; MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints; cppgc::Heap::ResourceConstraints constraints;
HeapGrowing growing(&gc, &stats_collector, constraints, HeapGrowing growing(&gc, &stats_collector, constraints,
......
...@@ -72,7 +72,8 @@ TEST_F(MetricRecorderTest, IncrementalScopesReportedImmediately) { ...@@ -72,7 +72,8 @@ TEST_F(MetricRecorderTest, IncrementalScopesReportedImmediately) {
{ {
EXPECT_EQ(0u, MetricRecorderImpl::CppGCMainThreadIncrementalMark_callcount); EXPECT_EQ(0u, MetricRecorderImpl::CppGCMainThreadIncrementalMark_callcount);
{ {
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()), StatsCollector::EnabledScope scope(
Heap::From(GetHeap())->stats_collector(),
StatsCollector::kIncrementalMark); StatsCollector::kIncrementalMark);
scope.DecreaseStartTimeForTesting( scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(1)); v8::base::TimeDelta::FromMilliseconds(1));
...@@ -86,7 +87,8 @@ TEST_F(MetricRecorderTest, IncrementalScopesReportedImmediately) { ...@@ -86,7 +87,8 @@ TEST_F(MetricRecorderTest, IncrementalScopesReportedImmediately) {
EXPECT_EQ(0u, EXPECT_EQ(0u,
MetricRecorderImpl::CppGCMainThreadIncrementalSweep_callcount); MetricRecorderImpl::CppGCMainThreadIncrementalSweep_callcount);
{ {
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()), StatsCollector::EnabledScope scope(
Heap::From(GetHeap())->stats_collector(),
StatsCollector::kIncrementalSweep); StatsCollector::kIncrementalSweep);
scope.DecreaseStartTimeForTesting( scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(1)); v8::base::TimeDelta::FromMilliseconds(1));
...@@ -107,28 +109,30 @@ TEST_F(MetricRecorderTest, NonIncrementlaScopesNotReportedImmediately) { ...@@ -107,28 +109,30 @@ TEST_F(MetricRecorderTest, NonIncrementlaScopesNotReportedImmediately) {
MetricRecorderImpl::CppGCMainThreadIncrementalSweep_callcount = 0u; MetricRecorderImpl::CppGCMainThreadIncrementalSweep_callcount = 0u;
StartGC(); StartGC();
{ {
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()), StatsCollector::EnabledScope scope(Heap::From(GetHeap())->stats_collector(),
StatsCollector::kAtomicMark); StatsCollector::kAtomicMark);
} }
{ {
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()), StatsCollector::EnabledScope scope(Heap::From(GetHeap())->stats_collector(),
StatsCollector::kAtomicWeak); StatsCollector::kAtomicWeak);
} }
{ {
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()), StatsCollector::EnabledScope scope(Heap::From(GetHeap())->stats_collector(),
StatsCollector::kAtomicCompact); StatsCollector::kAtomicCompact);
} }
{ {
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()), StatsCollector::EnabledScope scope(Heap::From(GetHeap())->stats_collector(),
StatsCollector::kAtomicSweep); StatsCollector::kAtomicSweep);
} }
{ {
StatsCollector::EnabledConcurrentScope scope( StatsCollector::EnabledConcurrentScope scope(
*Heap::From(GetHeap()), StatsCollector::kConcurrentMark); Heap::From(GetHeap())->stats_collector(),
StatsCollector::kConcurrentMark);
} }
{ {
StatsCollector::EnabledConcurrentScope scope( StatsCollector::EnabledConcurrentScope scope(
*Heap::From(GetHeap()), StatsCollector::kConcurrentSweep); Heap::From(GetHeap())->stats_collector(),
StatsCollector::kConcurrentSweep);
} }
EXPECT_EQ(0u, MetricRecorderImpl::CppGCMainThreadIncrementalMark_callcount); EXPECT_EQ(0u, MetricRecorderImpl::CppGCMainThreadIncrementalMark_callcount);
EXPECT_EQ(0u, MetricRecorderImpl::CppGCMainThreadIncrementalSweep_callcount); EXPECT_EQ(0u, MetricRecorderImpl::CppGCMainThreadIncrementalSweep_callcount);
...@@ -152,50 +156,52 @@ TEST_F(MetricRecorderTest, CycleEndHistogramReportsCorrectValues) { ...@@ -152,50 +156,52 @@ TEST_F(MetricRecorderTest, CycleEndHistogramReportsCorrectValues) {
EndGC(1000); EndGC(1000);
StartGC(); StartGC();
{ {
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()), StatsCollector::EnabledScope scope(Heap::From(GetHeap())->stats_collector(),
StatsCollector::kIncrementalMark); StatsCollector::kIncrementalMark);
scope.DecreaseStartTimeForTesting( scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(10)); v8::base::TimeDelta::FromMilliseconds(10));
} }
{ {
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()), StatsCollector::EnabledScope scope(Heap::From(GetHeap())->stats_collector(),
StatsCollector::kIncrementalSweep); StatsCollector::kIncrementalSweep);
scope.DecreaseStartTimeForTesting( scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(20)); v8::base::TimeDelta::FromMilliseconds(20));
} }
{ {
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()), StatsCollector::EnabledScope scope(Heap::From(GetHeap())->stats_collector(),
StatsCollector::kAtomicMark); StatsCollector::kAtomicMark);
scope.DecreaseStartTimeForTesting( scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(30)); v8::base::TimeDelta::FromMilliseconds(30));
} }
{ {
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()), StatsCollector::EnabledScope scope(Heap::From(GetHeap())->stats_collector(),
StatsCollector::kAtomicWeak); StatsCollector::kAtomicWeak);
scope.DecreaseStartTimeForTesting( scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(50)); v8::base::TimeDelta::FromMilliseconds(50));
} }
{ {
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()), StatsCollector::EnabledScope scope(Heap::From(GetHeap())->stats_collector(),
StatsCollector::kAtomicCompact); StatsCollector::kAtomicCompact);
scope.DecreaseStartTimeForTesting( scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(60)); v8::base::TimeDelta::FromMilliseconds(60));
} }
{ {
StatsCollector::EnabledScope scope(*Heap::From(GetHeap()), StatsCollector::EnabledScope scope(Heap::From(GetHeap())->stats_collector(),
StatsCollector::kAtomicSweep); StatsCollector::kAtomicSweep);
scope.DecreaseStartTimeForTesting( scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(70)); v8::base::TimeDelta::FromMilliseconds(70));
} }
{ {
StatsCollector::EnabledConcurrentScope scope( StatsCollector::EnabledConcurrentScope scope(
*Heap::From(GetHeap()), StatsCollector::kConcurrentMark); Heap::From(GetHeap())->stats_collector(),
StatsCollector::kConcurrentMark);
scope.DecreaseStartTimeForTesting( scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(80)); v8::base::TimeDelta::FromMilliseconds(80));
} }
{ {
StatsCollector::EnabledConcurrentScope scope( StatsCollector::EnabledConcurrentScope scope(
*Heap::From(GetHeap()), StatsCollector::kConcurrentSweep); Heap::From(GetHeap())->stats_collector(),
StatsCollector::kConcurrentSweep);
scope.DecreaseStartTimeForTesting( scope.DecreaseStartTimeForTesting(
v8::base::TimeDelta::FromMilliseconds(100)); v8::base::TimeDelta::FromMilliseconds(100));
} }
......
...@@ -109,7 +109,8 @@ TEST_F(CppgcTracingScopesTest, DisabledScope) { ...@@ -109,7 +109,8 @@ TEST_F(CppgcTracingScopesTest, DisabledScope) {
ResetDelegatingTracingController(); ResetDelegatingTracingController();
{ {
StatsCollector::DisabledScope scope( StatsCollector::DisabledScope scope(
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist); Heap::From(GetHeap())->stats_collector(),
StatsCollector::kMarkProcessMarkingWorklist);
} }
EXPECT_EQ(0u, DelegatingTracingControllerImpl::AddTraceEvent_callcount); EXPECT_EQ(0u, DelegatingTracingControllerImpl::AddTraceEvent_callcount);
EndGC(); EndGC();
...@@ -121,7 +122,8 @@ TEST_F(CppgcTracingScopesTest, EnabledScope) { ...@@ -121,7 +122,8 @@ TEST_F(CppgcTracingScopesTest, EnabledScope) {
ResetDelegatingTracingController("CppGC.MarkProcessMarkingWorklist"); ResetDelegatingTracingController("CppGC.MarkProcessMarkingWorklist");
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist); Heap::From(GetHeap())->stats_collector(),
StatsCollector::kMarkProcessMarkingWorklist);
} }
EXPECT_EQ(2u, DelegatingTracingControllerImpl::AddTraceEvent_callcount); EXPECT_EQ(2u, DelegatingTracingControllerImpl::AddTraceEvent_callcount);
EndGC(); EndGC();
...@@ -131,7 +133,7 @@ TEST_F(CppgcTracingScopesTest, EnabledScope) { ...@@ -131,7 +133,7 @@ TEST_F(CppgcTracingScopesTest, EnabledScope) {
ResetDelegatingTracingController("CppGC.MarkProcessWriteBarrierWorklist"); ResetDelegatingTracingController("CppGC.MarkProcessWriteBarrierWorklist");
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), Heap::From(GetHeap())->stats_collector(),
StatsCollector::kMarkProcessWriteBarrierWorklist); StatsCollector::kMarkProcessWriteBarrierWorklist);
} }
EXPECT_EQ(2u, DelegatingTracingControllerImpl::AddTraceEvent_callcount); EXPECT_EQ(2u, DelegatingTracingControllerImpl::AddTraceEvent_callcount);
...@@ -146,7 +148,8 @@ TEST_F(CppgcTracingScopesTest, EnabledScopeWithArgs) { ...@@ -146,7 +148,8 @@ TEST_F(CppgcTracingScopesTest, EnabledScopeWithArgs) {
ResetDelegatingTracingController(); ResetDelegatingTracingController();
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist); Heap::From(GetHeap())->stats_collector(),
StatsCollector::kMarkProcessMarkingWorklist);
} }
EXPECT_EQ(2, DelegatingTracingControllerImpl::stored_num_args); EXPECT_EQ(2, DelegatingTracingControllerImpl::stored_num_args);
EndGC(); EndGC();
...@@ -156,8 +159,8 @@ TEST_F(CppgcTracingScopesTest, EnabledScopeWithArgs) { ...@@ -156,8 +159,8 @@ TEST_F(CppgcTracingScopesTest, EnabledScopeWithArgs) {
ResetDelegatingTracingController(); ResetDelegatingTracingController();
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist, Heap::From(GetHeap())->stats_collector(),
"arg1", 1); StatsCollector::kMarkProcessMarkingWorklist, "arg1", 1);
} }
EXPECT_EQ(3, DelegatingTracingControllerImpl::stored_num_args); EXPECT_EQ(3, DelegatingTracingControllerImpl::stored_num_args);
EndGC(); EndGC();
...@@ -167,8 +170,8 @@ TEST_F(CppgcTracingScopesTest, EnabledScopeWithArgs) { ...@@ -167,8 +170,8 @@ TEST_F(CppgcTracingScopesTest, EnabledScopeWithArgs) {
ResetDelegatingTracingController(); ResetDelegatingTracingController();
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist, Heap::From(GetHeap())->stats_collector(),
"arg1", 1, "arg2", 2); StatsCollector::kMarkProcessMarkingWorklist, "arg1", 1, "arg2", 2);
} }
EXPECT_EQ(4, DelegatingTracingControllerImpl::stored_num_args); EXPECT_EQ(4, DelegatingTracingControllerImpl::stored_num_args);
EndGC(); EndGC();
...@@ -181,8 +184,9 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) { ...@@ -181,8 +184,9 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) {
ResetDelegatingTracingController(); ResetDelegatingTracingController();
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist, Heap::From(GetHeap())->stats_collector(),
"uint_arg", 13u, "bool_arg", false); StatsCollector::kMarkProcessMarkingWorklist, "uint_arg", 13u,
"bool_arg", false);
} }
FindArgument("uint_arg", TRACE_VALUE_TYPE_UINT, 13); FindArgument("uint_arg", TRACE_VALUE_TYPE_UINT, 13);
FindArgument("bool_arg", TRACE_VALUE_TYPE_BOOL, false); FindArgument("bool_arg", TRACE_VALUE_TYPE_BOOL, false);
...@@ -193,8 +197,9 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) { ...@@ -193,8 +197,9 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) {
ResetDelegatingTracingController(); ResetDelegatingTracingController();
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist, Heap::From(GetHeap())->stats_collector(),
"neg_int_arg", -5, "pos_int_arg", 7); StatsCollector::kMarkProcessMarkingWorklist, "neg_int_arg", -5,
"pos_int_arg", 7);
} }
FindArgument("neg_int_arg", TRACE_VALUE_TYPE_INT, -5); FindArgument("neg_int_arg", TRACE_VALUE_TYPE_INT, -5);
FindArgument("pos_int_arg", TRACE_VALUE_TYPE_INT, 7); FindArgument("pos_int_arg", TRACE_VALUE_TYPE_INT, 7);
...@@ -207,8 +212,9 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) { ...@@ -207,8 +212,9 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) {
const char* string_value = "test"; const char* string_value = "test";
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), StatsCollector::kMarkProcessMarkingWorklist, Heap::From(GetHeap())->stats_collector(),
"string_arg", string_value, "double_arg", double_value); StatsCollector::kMarkProcessMarkingWorklist, "string_arg",
string_value, "double_arg", double_value);
} }
FindArgument("string_arg", TRACE_VALUE_TYPE_STRING, FindArgument("string_arg", TRACE_VALUE_TYPE_STRING,
reinterpret_cast<uint64_t>(string_value)); reinterpret_cast<uint64_t>(string_value));
...@@ -245,7 +251,7 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) { ...@@ -245,7 +251,7 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) {
DelegatingTracingControllerImpl::check_expectations = false; DelegatingTracingControllerImpl::check_expectations = false;
{ {
StatsCollector::EnabledScope scope( StatsCollector::EnabledScope scope(
*Heap::From(GetHeap()), Heap::From(GetHeap())->stats_collector(),
static_cast<StatsCollector::ScopeId>(scope_id)); static_cast<StatsCollector::ScopeId>(scope_id));
v8::base::TimeTicks time = v8::base::TimeTicks::Now(); v8::base::TimeTicks time = v8::base::TimeTicks::Now();
while (time == v8::base::TimeTicks::Now()) { while (time == v8::base::TimeTicks::Now()) {
...@@ -278,7 +284,7 @@ TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) { ...@@ -278,7 +284,7 @@ TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) {
DelegatingTracingControllerImpl::check_expectations = false; DelegatingTracingControllerImpl::check_expectations = false;
{ {
StatsCollector::EnabledConcurrentScope scope( StatsCollector::EnabledConcurrentScope scope(
*Heap::From(GetHeap()), Heap::From(GetHeap())->stats_collector(),
static_cast<StatsCollector::ConcurrentScopeId>(scope_id)); static_cast<StatsCollector::ConcurrentScopeId>(scope_id));
v8::base::TimeTicks time = v8::base::TimeTicks::Now(); v8::base::TimeTicks time = v8::base::TimeTicks::Now();
while (time == v8::base::TimeTicks::Now()) { while (time == v8::base::TimeTicks::Now()) {
......
...@@ -18,7 +18,8 @@ constexpr size_t kMinReportedSize = StatsCollector::kAllocationThresholdBytes; ...@@ -18,7 +18,8 @@ constexpr size_t kMinReportedSize = StatsCollector::kAllocationThresholdBytes;
class StatsCollectorTest : public ::testing::Test { class StatsCollectorTest : public ::testing::Test {
public: public:
StatsCollectorTest() : stats(nullptr /* metric_recorder */) {} StatsCollectorTest()
: stats(nullptr /* metric_recorder */, nullptr /* platform */) {}
void FakeAllocate(size_t bytes) { void FakeAllocate(size_t bytes) {
stats.NotifyAllocation(bytes); stats.NotifyAllocation(bytes);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment