Commit adda4c5f authored by Omer Katz's avatar Omer Katz Committed by Commit Bot

cppgc: Add UMA support

This CL introduces cppgc::HistogramRecorder api which is similar to the
v8::metrics::Recorder api and is used by cppgc to report histogram
samples to embedders. Embedders should implement the api if they want to
collect histograms and provide an instance of it on heap creation.

CppHeap uses an adaptor class that implements the HistogramRecorder api
and is used to forward the relevant info to the relevant
v8::metrics::Recorder.

The api used 3 data structures: 2 for incremental steps that need to be
reported as they come (marking and sweeping) and 1 for the end of a GC
cycle that aggregates statistics over the entire cycle.
The data structure only provide the "raw" samples (e.g. atomic mark
time, incremental mark time, etc...). The embedder is expected to
compute aggregate histogram on its own (e.g. overall marking time).

Bug: chromium:1056170
Change-Id: If63ef50a29a21594f654edb83084598980d221ce
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2642258
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#72256}
parent 987f0b75
......@@ -4646,6 +4646,7 @@ v8_source_set("cppgc_base") {
"src/heap/cppgc/marking-visitor.h",
"src/heap/cppgc/marking-worklists.cc",
"src/heap/cppgc/marking-worklists.h",
"src/heap/cppgc/metric-recorder.h",
"src/heap/cppgc/name-trait.cc",
"src/heap/cppgc/object-allocator.cc",
"src/heap/cppgc/object-allocator.h",
......
......@@ -169,11 +169,13 @@ void UnifiedHeapMarker::AddObject(void* object) {
CppHeap::CppHeap(
v8::Isolate* isolate,
const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>& custom_spaces)
const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>& custom_spaces,
std::unique_ptr<cppgc::internal::MetricRecorder> metric_recorder)
: cppgc::internal::HeapBase(std::make_shared<CppgcPlatformAdapter>(isolate),
custom_spaces,
cppgc::internal::HeapBase::StackSupport::
kSupportsConservativeStackScan),
kSupportsConservativeStackScan,
std::move(metric_recorder)),
isolate_(*reinterpret_cast<Isolate*>(isolate)) {
if (isolate_.heap_profiler()) {
isolate_.heap_profiler()->AddBuildEmbedderGraphCallback(
......
......@@ -28,9 +28,11 @@ class V8_EXPORT_PRIVATE CppHeap final : public cppgc::internal::HeapBase,
return static_cast<const CppHeap*>(heap);
}
CppHeap(v8::Isolate* isolate,
const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>&
custom_spaces);
CppHeap(
v8::Isolate* isolate,
const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>& custom_spaces,
std::unique_ptr<cppgc::internal::MetricRecorder> metric_recorder =
nullptr);
~CppHeap() final;
CppHeap(const CppHeap&) = delete;
......@@ -56,8 +58,6 @@ class V8_EXPORT_PRIVATE CppHeap final : public cppgc::internal::HeapBase,
// finalization is not needed) thus this method is left empty.
}
void PostGarbageCollection() final {}
Isolate& isolate_;
bool marking_done_ = false;
bool is_in_final_pause_ = false;
......
......@@ -56,7 +56,8 @@ class ObjectSizeCounter : private HeapVisitor<ObjectSizeCounter> {
HeapBase::HeapBase(
std::shared_ptr<cppgc::Platform> platform,
const std::vector<std::unique_ptr<CustomSpaceBase>>& custom_spaces,
StackSupport stack_support)
StackSupport stack_support,
std::unique_ptr<MetricRecorder> histogram_recorder)
: raw_heap_(this, custom_spaces),
platform_(std::move(platform)),
#if defined(CPPGC_CAGED_HEAP)
......@@ -66,7 +67,8 @@ HeapBase::HeapBase(
page_backend_(
std::make_unique<PageBackend>(platform_->GetPageAllocator())),
#endif
stats_collector_(std::make_unique<StatsCollector>()),
stats_collector_(
std::make_unique<StatsCollector>(std::move(histogram_recorder))),
stack_(std::make_unique<heap::base::Stack>(
v8::base::Stack::GetStackStart())),
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>(*this)),
......
......@@ -14,6 +14,7 @@
#include "src/base/macros.h"
#include "src/heap/cppgc/compactor.h"
#include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/metric-recorder.h"
#include "src/heap/cppgc/object-allocator.h"
#include "src/heap/cppgc/raw-heap.h"
#include "src/heap/cppgc/sweeper.h"
......@@ -79,7 +80,8 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
HeapBase(std::shared_ptr<cppgc::Platform> platform,
const std::vector<std::unique_ptr<CustomSpaceBase>>& custom_spaces,
StackSupport stack_support);
StackSupport stack_support,
std::unique_ptr<MetricRecorder> histogram_recorder);
virtual ~HeapBase();
HeapBase(const HeapBase&) = delete;
......@@ -153,9 +155,6 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
void AdvanceIncrementalGarbageCollectionOnAllocationIfNeeded();
// Notifies the heap that a GC is done.
virtual void PostGarbageCollection() = 0;
// Termination drops all roots (clears them out) and runs garbage collections
// in a bounded fixed point loop until no new objects are created in
// destructors. Exceeding the loop bound results in a crash.
......
......@@ -86,7 +86,8 @@ void CheckConfig(Heap::Config config, Heap::MarkingType marking_support,
Heap::Heap(std::shared_ptr<cppgc::Platform> platform,
cppgc::Heap::HeapOptions options)
: HeapBase(platform, options.custom_spaces, options.stack_support),
: HeapBase(platform, options.custom_spaces, options.stack_support,
nullptr /* metric_recorder */),
gc_invoker_(this, platform_.get(), options.stack_support),
growing_(&gc_invoker_, stats_collector_.get(),
options.resource_constraints, options.marking_support,
......@@ -196,8 +197,6 @@ void Heap::FinalizeGarbageCollection(Config::StackState stack_state) {
sweeper_.NotifyDoneIfNeeded();
}
void Heap::PostGarbageCollection() {}
void Heap::DisableHeapGrowingForTesting() { growing_.DisableForTesting(); }
void Heap::FinalizeIncrementalGarbageCollectionIfNeeded(
......
......@@ -46,8 +46,6 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase,
void FinalizeIncrementalGarbageCollectionIfNeeded(Config::StackState) final;
void PostGarbageCollection() final;
Config config_;
GCInvoker gc_invoker_;
HeapGrowing growing_;
......
......@@ -287,8 +287,8 @@ void MarkerBase::FinishMarking(MarkingConfig::StackState stack_state) {
void MarkerBase::ProcessWeakness() {
DCHECK_EQ(MarkingConfig::MarkingType::kAtomic, config_.marking_type);
StatsCollector::DisabledScope stats_scope(
heap(), StatsCollector::kWeakInvokeCallbacks);
StatsCollector::DisabledScope stats_scope(heap(),
StatsCollector::kAtomicWeak);
heap().GetWeakPersistentRegion().Trace(&visitor());
// Processing cross-thread handles requires taking the process lock.
......
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CPPGC_METRIC_RECORDER_H_
#define V8_HEAP_CPPGC_METRIC_RECORDER_H_
#include <cstdint>
namespace cppgc {
namespace internal {
class StatsCollector;
/**
* Base class used for reporting GC statistics histograms. Embedders interested
* in collecting histgorams should implement the virtual AddMainThreadEvent
* methods below and pass an instance of the implementation during Heap
* creation.
*/
class MetricRecorder {
public:
struct CppGCCycleEndMetricSamples {
int64_t atomic_mark_ms;
int64_t atomic_weak_ms;
int64_t atomic_compact_ms;
int64_t atomic_sweep_ms;
int64_t incremental_mark_ms;
int64_t incremental_sweep_ms;
int64_t concurrent_mark_ms;
int64_t concurrent_sweep_ms;
};
struct CppGCIncrementalMarkMetricSample {
int64_t duration_ms;
};
struct CppGCIncrementalSweepMetricSample {
int64_t duration_ms;
};
virtual ~MetricRecorder() = default;
virtual void AddMainThreadEvent(const CppGCCycleEndMetricSamples& event) {}
virtual void AddMainThreadEvent(
const CppGCIncrementalMarkMetricSample& event) {}
virtual void AddMainThreadEvent(
const CppGCIncrementalSweepMetricSample& event) {}
};
} // namespace internal
} // namespace cppgc
#endif // V8_HEAP_CPPGC_METRIC_RECORDER_H_
......@@ -8,6 +8,7 @@
#include <cmath>
#include "src/base/logging.h"
#include "src/heap/cppgc/metric-recorder.h"
namespace cppgc {
namespace internal {
......@@ -15,6 +16,10 @@ namespace internal {
// static
constexpr size_t StatsCollector::kAllocationThresholdBytes;
StatsCollector::StatsCollector(
std::unique_ptr<MetricRecorder> histogram_recorder)
: metric_recorder_(std::move(histogram_recorder)) {}
void StatsCollector::RegisterObserver(AllocationObserver* observer) {
DCHECK_EQ(allocation_observers_.end(),
std::find(allocation_observers_.begin(),
......@@ -114,6 +119,18 @@ void StatsCollector::NotifySweepingCompleted() {
gc_state_ = GarbageCollectionState::kNotRunning;
previous_ = std::move(current_);
current_ = Event();
if (metric_recorder_) {
MetricRecorder::CppGCCycleEndMetricSamples event{
previous_.scope_data[kAtomicMark].InMilliseconds(),
previous_.scope_data[kAtomicWeak].InMilliseconds(),
previous_.scope_data[kAtomicCompact].InMilliseconds(),
previous_.scope_data[kAtomicSweep].InMilliseconds(),
previous_.scope_data[kIncrementalMark].InMilliseconds(),
previous_.scope_data[kIncrementalSweep].InMilliseconds(),
previous_.concurrent_scope_data[kConcurrentMark],
previous_.concurrent_scope_data[kConcurrentSweep]};
metric_recorder_->AddMainThreadEvent(event);
}
}
size_t StatsCollector::allocated_object_size() const {
......@@ -129,5 +146,25 @@ size_t StatsCollector::allocated_object_size() const {
allocated_bytes_since_end_of_marking_);
}
void StatsCollector::RecordHistogramSample(ScopeId scope_id_,
v8::base::TimeDelta time) {
switch (scope_id_) {
case kIncrementalMark: {
MetricRecorder::CppGCIncrementalMarkMetricSample event{
time.InMilliseconds()};
metric_recorder_->AddMainThreadEvent(event);
break;
}
case kIncrementalSweep: {
MetricRecorder::CppGCIncrementalSweepMetricSample event{
time.InMilliseconds()};
metric_recorder_->AddMainThreadEvent(event);
break;
}
default:
break;
}
}
} // namespace internal
} // namespace cppgc
......@@ -14,17 +14,23 @@
#include "src/base/platform/time.h"
#include "src/heap/cppgc/garbage-collector.h"
#include "src/heap/cppgc/heap-base.h"
#include "src/heap/cppgc/metric-recorder.h"
#include "src/heap/cppgc/trace-event.h"
namespace cppgc {
namespace internal {
// Histogram scopes contribute to histogram as well as to traces and metrics.
// Other scopes contribute only to traces and metrics.
#define CPPGC_FOR_ALL_HISTOGRAM_SCOPES(V) \
V(AtomicMark) \
V(AtomicWeak) \
V(AtomicCompact) \
V(AtomicSweep) \
V(IncrementalMark) \
V(IncrementalSweep)
#define CPPGC_FOR_ALL_SCOPES(V) \
V(AtomicMark) \
V(AtomicSweep) \
V(AtomicCompact) \
V(IncrementalMark) \
V(IncrementalSweep) \
V(MarkIncrementalStart) \
V(MarkIncrementalFinalize) \
V(MarkAtomicPrologue) \
......@@ -43,17 +49,17 @@ namespace internal {
V(MarkVisitCrossThreadPersistents) \
V(MarkVisitStack) \
V(MarkVisitRememberedSets) \
V(WeakInvokeCallbacks) \
V(SweepInvokePreFinalizers) \
V(SweepIdleStep) \
V(SweepOnAllocation) \
V(SweepFinalize)
#define CPPGC_FOR_ALL_CONCURRENT_SCOPES(V) \
V(ConcurrentMarkProcessEphemerons) \
V(ConcurrentMark) \
#define CPPGC_FOR_ALL_HISTOGRAM_CONCURRENT_SCOPES(V) \
V(ConcurrentMark) \
V(ConcurrentSweep)
#define CPPGC_FOR_ALL_CONCURRENT_SCOPES(V) V(ConcurrentMarkProcessEphemerons)
// Sink for various time and memory statistics.
class V8_EXPORT_PRIVATE StatsCollector final {
using CollectionType = GarbageCollector::Config::CollectionType;
......@@ -66,6 +72,8 @@ class V8_EXPORT_PRIVATE StatsCollector final {
enum ScopeId {
#define CPPGC_DECLARE_ENUM(name) k##name,
CPPGC_FOR_ALL_HISTOGRAM_SCOPES(CPPGC_DECLARE_ENUM)
kNumHistogramScopeIds,
CPPGC_FOR_ALL_SCOPES(CPPGC_DECLARE_ENUM)
#undef CPPGC_DECLARE_ENUM
kNumScopeIds,
......@@ -73,6 +81,8 @@ class V8_EXPORT_PRIVATE StatsCollector final {
enum ConcurrentScopeId {
#define CPPGC_DECLARE_ENUM(name) k##name,
CPPGC_FOR_ALL_HISTOGRAM_CONCURRENT_SCOPES(CPPGC_DECLARE_ENUM)
kNumHistogramConcurrentScopeIds,
CPPGC_FOR_ALL_CONCURRENT_SCOPES(CPPGC_DECLARE_ENUM)
#undef CPPGC_DECLARE_ENUM
kNumConcurrentScopeIds
......@@ -85,8 +95,9 @@ class V8_EXPORT_PRIVATE StatsCollector final {
struct Event final {
V8_EXPORT_PRIVATE explicit Event();
v8::base::TimeDelta scope_data[kNumScopeIds];
v8::base::Atomic32 concurrent_scope_data[kNumConcurrentScopeIds]{0};
v8::base::TimeDelta scope_data[kNumHistogramScopeIds];
v8::base::Atomic32 concurrent_scope_data[kNumHistogramConcurrentScopeIds]{
0};
size_t epoch = -1;
CollectionType collection_type = CollectionType::kMajor;
......@@ -106,6 +117,7 @@ class V8_EXPORT_PRIVATE StatsCollector final {
case k##name: \
return type == CollectionType::kMajor ? "CppGC." #name \
: "CppGC." #name ".Minor";
CPPGC_FOR_ALL_HISTOGRAM_SCOPES(CPPGC_CASE)
CPPGC_FOR_ALL_SCOPES(CPPGC_CASE)
#undef CPPGC_CASE
default:
......@@ -120,6 +132,7 @@ class V8_EXPORT_PRIVATE StatsCollector final {
case k##name: \
return type == CollectionType::kMajor ? "CppGC." #name \
: "CppGC." #name ".Minor";
CPPGC_FOR_ALL_HISTOGRAM_CONCURRENT_SCOPES(CPPGC_CASE)
CPPGC_FOR_ALL_CONCURRENT_SCOPES(CPPGC_CASE)
#undef CPPGC_CASE
default:
......@@ -149,6 +162,10 @@ class V8_EXPORT_PRIVATE StatsCollector final {
scope_category == kMutatorThread
? static_cast<int>(kNumScopeIds)
: static_cast<int>(kNumConcurrentScopeIds));
DCHECK_NE(static_cast<int>(scope_id_),
scope_category == kMutatorThread
? static_cast<int>(kNumHistogramScopeIds)
: static_cast<int>(kNumHistogramConcurrentScopeIds));
StartTrace(args...);
}
......@@ -160,6 +177,10 @@ class V8_EXPORT_PRIVATE StatsCollector final {
InternalScope(const InternalScope&) = delete;
InternalScope& operator=(const InternalScope&) = delete;
void DecreaseStartTimeForTesting(v8::base::TimeDelta delta) {
start_time_ -= delta;
}
private:
void* operator new(size_t, void*) = delete;
void* operator new(size_t) = delete;
......@@ -182,7 +203,7 @@ class V8_EXPORT_PRIVATE StatsCollector final {
HeapBase& heap_;
StatsCollector* const stats_collector_;
const v8::base::TimeTicks start_time_;
v8::base::TimeTicks start_time_;
const ScopeIdType scope_id_;
};
......@@ -217,7 +238,7 @@ class V8_EXPORT_PRIVATE StatsCollector final {
// reasonably interesting sizes.
static constexpr size_t kAllocationThresholdBytes = 1024;
StatsCollector() = default;
explicit StatsCollector(std::unique_ptr<MetricRecorder>);
StatsCollector(const StatsCollector&) = delete;
StatsCollector& operator=(const StatsCollector&) = delete;
......@@ -248,6 +269,11 @@ class V8_EXPORT_PRIVATE StatsCollector final {
const Event& GetPreviousEventForTesting() const { return previous_; }
void SetMetricRecorderForTesting(
std::unique_ptr<MetricRecorder> histogram_recorder) {
metric_recorder_ = std::move(histogram_recorder);
}
private:
enum class GarbageCollectionState : uint8_t {
kNotRunning,
......@@ -255,6 +281,9 @@ class V8_EXPORT_PRIVATE StatsCollector final {
kSweeping
};
void RecordHistogramSample(ScopeId, v8::base::TimeDelta);
void RecordHistogramSample(ConcurrentScopeId, v8::base::TimeDelta) {}
// Invokes |callback| for all registered observers.
template <typename Callback>
void ForAllAllocationObservers(Callback callback);
......@@ -285,6 +314,8 @@ class V8_EXPORT_PRIVATE StatsCollector final {
Event current_;
// The previous GC event which is populated at NotifySweepingFinished.
Event previous_;
std::unique_ptr<MetricRecorder> metric_recorder_;
};
template <typename Callback>
......@@ -371,9 +402,17 @@ template <StatsCollector::TraceCategory trace_category,
void StatsCollector::InternalScope<trace_category,
scope_category>::IncreaseScopeTime() {
DCHECK_NE(GarbageCollectionState::kNotRunning, stats_collector_->gc_state_);
// Only record top level scopes.
if (static_cast<int>(scope_id_) >=
(scope_category == kMutatorThread
? static_cast<int>(kNumHistogramScopeIds)
: static_cast<int>(kNumHistogramConcurrentScopeIds)))
return;
v8::base::TimeDelta time = v8::base::TimeTicks::Now() - start_time_;
if (scope_category == StatsCollector::ScopeContext::kMutatorThread) {
stats_collector_->current_.scope_data[scope_id_] += time;
if (stats_collector_->metric_recorder_)
stats_collector_->RecordHistogramSample(scope_id_, time);
return;
}
// scope_category == StatsCollector::ScopeContext::kConcurrentThread
......
......@@ -572,8 +572,6 @@ class Sweeper::SweeperImpl final {
DCHECK(notify_done_pending_);
notify_done_pending_ = false;
stats_collector_->NotifySweepingCompleted();
// Notify the heap that GC is finished.
heap_->heap()->PostGarbageCollection();
}
void NotifyDoneIfNeeded() {
......
......@@ -103,6 +103,7 @@ v8_source_set("cppgc_unittests_sources") {
"heap/cppgc/marking-verifier-unittest.cc",
"heap/cppgc/marking-visitor-unittest.cc",
"heap/cppgc/member-unittest.cc",
"heap/cppgc/metric-recorder-unittest.cc",
"heap/cppgc/minor-gc-unittest.cc",
"heap/cppgc/name-trait-unittest.cc",
"heap/cppgc/object-start-bitmap-unittest.cc",
......
......@@ -60,7 +60,7 @@ void FakeAllocate(StatsCollector* stats_collector, size_t bytes) {
} // namespace
TEST(HeapGrowingTest, ConservativeGCInvoked) {
StatsCollector stats_collector;
StatsCollector stats_collector(nullptr /* metric_recorder */);
MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update.
......@@ -73,7 +73,7 @@ TEST(HeapGrowingTest, ConservativeGCInvoked) {
}
TEST(HeapGrowingTest, InitialHeapSize) {
StatsCollector stats_collector;
StatsCollector stats_collector(nullptr /* metric_recorder */);
MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints;
// Use larger size to avoid running into small heap optimizations.
......@@ -90,7 +90,7 @@ TEST(HeapGrowingTest, InitialHeapSize) {
TEST(HeapGrowingTest, ConstantGrowingFactor) {
// Use larger size to avoid running into small heap optimizations.
constexpr size_t kObjectSize = 10 * HeapGrowing::kMinLimitIncrease;
StatsCollector stats_collector;
StatsCollector stats_collector(nullptr /* metric_recorder */);
FakeGarbageCollector gc(&stats_collector);
cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update.
......@@ -108,7 +108,7 @@ TEST(HeapGrowingTest, ConstantGrowingFactor) {
TEST(HeapGrowingTest, SmallHeapGrowing) {
// Larger constant to avoid running into special handling for smaller heaps.
constexpr size_t kLargeAllocation = 100 * kMB;
StatsCollector stats_collector;
StatsCollector stats_collector(nullptr /* metric_recorder */);
FakeGarbageCollector gc(&stats_collector);
cppgc::Heap::ResourceConstraints constraints;
// Force GC at the first update.
......@@ -124,7 +124,7 @@ TEST(HeapGrowingTest, SmallHeapGrowing) {
}
TEST(HeapGrowingTest, IncrementalGCStarted) {
StatsCollector stats_collector;
StatsCollector stats_collector(nullptr /* metric_recorder */);
MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints;
HeapGrowing growing(&gc, &stats_collector, constraints,
......@@ -137,7 +137,7 @@ TEST(HeapGrowingTest, IncrementalGCStarted) {
}
TEST(HeapGrowingTest, IncrementalGCFinalized) {
StatsCollector stats_collector;
StatsCollector stats_collector(nullptr /* metric_recorder */);
MockGarbageCollector gc;
cppgc::Heap::ResourceConstraints constraints;
HeapGrowing growing(&gc, &stats_collector, constraints,
......
This diff is collapsed.
......@@ -227,16 +227,17 @@ TEST_F(CppgcTracingScopesTest, InitalScopesAreZero) {
stats_collector->NotifySweepingCompleted();
const StatsCollector::Event& event =
stats_collector->GetPreviousEventForTesting();
for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) {
for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) {
EXPECT_TRUE(event.scope_data[i].IsZero());
}
for (int i = 0; i < StatsCollector::kNumConcurrentScopeIds; ++i) {
for (int i = 0; i < StatsCollector::kNumHistogramConcurrentScopeIds; ++i) {
EXPECT_EQ(0, event.concurrent_scope_data[i]);
}
}
TEST_F(CppgcTracingScopesTest, TestIndividualScopes) {
for (int scope_id = 0; scope_id < StatsCollector::kNumScopeIds; ++scope_id) {
for (int scope_id = 0; scope_id < StatsCollector::kNumHistogramScopeIds;
++scope_id) {
StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector();
stats_collector->NotifyMarkingStarted(
GarbageCollector::Config::CollectionType::kMajor,
......@@ -255,21 +256,21 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) {
stats_collector->NotifySweepingCompleted();
const StatsCollector::Event& event =
stats_collector->GetPreviousEventForTesting();
for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) {
for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) {
if (i == scope_id)
EXPECT_LT(v8::base::TimeDelta(), event.scope_data[i]);
else
EXPECT_TRUE(event.scope_data[i].IsZero());
}
for (int i = 0; i < StatsCollector::kNumConcurrentScopeIds; ++i) {
for (int i = 0; i < StatsCollector::kNumHistogramConcurrentScopeIds; ++i) {
EXPECT_EQ(0, event.concurrent_scope_data[i]);
}
}
}
TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) {
for (int scope_id = 0; scope_id < StatsCollector::kNumConcurrentScopeIds;
++scope_id) {
for (int scope_id = 0;
scope_id < StatsCollector::kNumHistogramConcurrentScopeIds; ++scope_id) {
StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector();
stats_collector->NotifyMarkingStarted(
GarbageCollector::Config::CollectionType::kMajor,
......@@ -288,10 +289,10 @@ TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) {
stats_collector->NotifySweepingCompleted();
const StatsCollector::Event& event =
stats_collector->GetPreviousEventForTesting();
for (int i = 0; i < StatsCollector::kNumScopeIds; ++i) {
for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) {
EXPECT_TRUE(event.scope_data[i].IsZero());
}
for (int i = 0; i < StatsCollector::kNumConcurrentScopeIds; ++i) {
for (int i = 0; i < StatsCollector::kNumHistogramConcurrentScopeIds; ++i) {
if (i == scope_id)
EXPECT_LT(0, event.concurrent_scope_data[i]);
else
......
......@@ -18,6 +18,8 @@ constexpr size_t kMinReportedSize = StatsCollector::kAllocationThresholdBytes;
class StatsCollectorTest : public ::testing::Test {
public:
StatsCollectorTest() : stats(nullptr /* metric_recorder */) {}
void FakeAllocate(size_t bytes) {
stats.NotifyAllocation(bytes);
stats.NotifySafePointForConservativeCollection();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment