Commit 252b84b0 authored by lpy's avatar lpy Committed by Commit bot

[Tracing] Fix runtime call stats tracing for GC.

This patch adds runtime call stats tracing for GC correctly, makes
--runtime-call-stats and tracing mutually exclusive with tracing taking
precedence if both modes are on, and uses only one runtime call stats in
counter.

BUG=v8:5089

Review-Url: https://codereview.chromium.org/2313193002
Cr-Commit-Position: refs/heads/master@{#39295}
parent e4273007
...@@ -5964,8 +5964,8 @@ Local<Context> NewContext(v8::Isolate* external_isolate, ...@@ -5964,8 +5964,8 @@ Local<Context> NewContext(v8::Isolate* external_isolate,
v8::MaybeLocal<Value> global_object, v8::MaybeLocal<Value> global_object,
size_t context_snapshot_index) { size_t context_snapshot_index) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(external_isolate); i::Isolate* isolate = reinterpret_cast<i::Isolate*>(external_isolate);
LOG_API(isolate, Context, New);
TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.NewContext"); TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.NewContext");
LOG_API(isolate, Context, New);
i::HandleScope scope(isolate); i::HandleScope scope(isolate);
ExtensionConfiguration no_extensions; ExtensionConfiguration no_extensions;
if (extensions == NULL) extensions = &no_extensions; if (extensions == NULL) extensions = &no_extensions;
......
...@@ -11,13 +11,21 @@ namespace v8 { ...@@ -11,13 +11,21 @@ namespace v8 {
namespace internal { namespace internal {
RuntimeCallTimerScope::RuntimeCallTimerScope( RuntimeCallTimerScope::RuntimeCallTimerScope(
HeapObject* heap_object, RuntimeCallStats::CounterId counter_id) { Isolate* isolate, RuntimeCallStats::CounterId counter_id) {
if (V8_UNLIKELY(FLAG_runtime_call_stats)) { if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() ||
isolate_ = heap_object->GetIsolate(); FLAG_runtime_call_stats)) {
isolate_ = isolate;
RuntimeCallStats::Enter(isolate_->counters()->runtime_call_stats(), &timer_, RuntimeCallStats::Enter(isolate_->counters()->runtime_call_stats(), &timer_,
counter_id); counter_id);
} }
// TODO(lpy): Add a tracing equivalent for the runtime call stats. }
RuntimeCallTimerScope::RuntimeCallTimerScope(
HeapObject* heap_object, RuntimeCallStats::CounterId counter_id) {
if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() ||
FLAG_runtime_call_stats)) {
RuntimeCallTimerScope(heap_object->GetIsolate(), counter_id);
}
} }
} // namespace internal } // namespace internal
......
...@@ -810,18 +810,14 @@ class RuntimeCallStats { ...@@ -810,18 +810,14 @@ class RuntimeCallStats {
bool in_use_; bool in_use_;
}; };
#define TRACE_RUNTIME_CALL_STATS(isolate, counter_name) \ #define TRACE_RUNTIME_CALL_STATS(isolate, counter_name) \
do { \ do { \
if (FLAG_runtime_call_stats) { \ if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() || \
RuntimeCallStats::CorrectCurrentCounterId( \ FLAG_runtime_call_stats)) { \
isolate->counters()->runtime_call_stats(), \ RuntimeCallStats::CorrectCurrentCounterId( \
&RuntimeCallStats::counter_name); \ isolate->counters()->runtime_call_stats(), \
} \ &RuntimeCallStats::counter_name); \
if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED())) { \ } \
RuntimeCallStats::CorrectCurrentCounterId( \
isolate->counters()->tracing_runtime_call_stats(), \
&RuntimeCallStats::counter_name); \
} \
} while (false) } while (false)
#define TRACE_HANDLER_STATS(isolate, counter_name) \ #define TRACE_HANDLER_STATS(isolate, counter_name) \
...@@ -1172,9 +1168,6 @@ class Counters { ...@@ -1172,9 +1168,6 @@ class Counters {
void ResetCounters(); void ResetCounters();
void ResetHistograms(); void ResetHistograms();
RuntimeCallStats* runtime_call_stats() { return &runtime_call_stats_; } RuntimeCallStats* runtime_call_stats() { return &runtime_call_stats_; }
RuntimeCallStats* tracing_runtime_call_stats() {
return &tracing_runtime_call_stats_;
}
private: private:
#define HR(name, caption, min, max, num_buckets) Histogram name##_; #define HR(name, caption, min, max, num_buckets) Histogram name##_;
...@@ -1237,7 +1230,6 @@ class Counters { ...@@ -1237,7 +1230,6 @@ class Counters {
#undef SC #undef SC
RuntimeCallStats runtime_call_stats_; RuntimeCallStats runtime_call_stats_;
RuntimeCallStats tracing_runtime_call_stats_;
friend class Isolate; friend class Isolate;
...@@ -1251,45 +1243,22 @@ class Counters { ...@@ -1251,45 +1243,22 @@ class Counters {
class RuntimeCallTimerScope { class RuntimeCallTimerScope {
public: public:
inline RuntimeCallTimerScope(Isolate* isolate, inline RuntimeCallTimerScope(Isolate* isolate,
RuntimeCallStats::CounterId counter_id) { RuntimeCallStats::CounterId counter_id);
if (V8_UNLIKELY(FLAG_runtime_call_stats)) {
isolate_ = isolate;
RuntimeCallStats::Enter(isolate_->counters()->runtime_call_stats(),
&timer_, counter_id);
}
if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED())) {
isolate_for_tracing_ = isolate;
RuntimeCallStats::Enter(
isolate_for_tracing_->counters()->tracing_runtime_call_stats(),
&trace_event_timer_, counter_id);
}
}
// This constructor is here just to avoid calling GetIsolate() when the // This constructor is here just to avoid calling GetIsolate() when the
// stats are disabled and the isolate is not directly available. // stats are disabled and the isolate is not directly available.
inline RuntimeCallTimerScope(HeapObject* heap_object, inline RuntimeCallTimerScope(HeapObject* heap_object,
RuntimeCallStats::CounterId counter_id); RuntimeCallStats::CounterId counter_id);
inline ~RuntimeCallTimerScope() { inline ~RuntimeCallTimerScope() {
if (V8_UNLIKELY(FLAG_runtime_call_stats)) { if (V8_UNLIKELY(isolate_ != nullptr)) {
RuntimeCallStats::Leave(isolate_->counters()->runtime_call_stats(), RuntimeCallStats::Leave(isolate_->counters()->runtime_call_stats(),
&timer_); &timer_);
} }
if (V8_UNLIKELY(isolate_for_tracing_ != nullptr)) {
RuntimeCallStats::Leave(
isolate_for_tracing_->counters()->tracing_runtime_call_stats(),
&trace_event_timer_);
isolate_for_tracing_ = nullptr;
}
} }
private: private:
Isolate* isolate_; Isolate* isolate_ = nullptr;
// TODO(lpy): --runtime-call-stats and tracing should be mutually exclusive
// with tracing taking precendence. We need to add checks, and use a single
// isolate reference and a timer for both.
Isolate* isolate_for_tracing_ = nullptr;
RuntimeCallTimer timer_; RuntimeCallTimer timer_;
RuntimeCallTimer trace_event_timer_;
}; };
} // namespace internal } // namespace internal
......
...@@ -25,23 +25,23 @@ GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope) ...@@ -25,23 +25,23 @@ GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope)
: tracer_(tracer), scope_(scope) { : tracer_(tracer), scope_(scope) {
start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs(); start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs();
// TODO(cbruni): remove once we fully moved to a trace-based system. // TODO(cbruni): remove once we fully moved to a trace-based system.
if (FLAG_runtime_call_stats) { if (TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() ||
FLAG_runtime_call_stats) {
RuntimeCallStats::Enter( RuntimeCallStats::Enter(
tracer_->heap_->isolate()->counters()->runtime_call_stats(), &timer_, tracer_->heap_->isolate()->counters()->runtime_call_stats(), &timer_,
&RuntimeCallStats::GC); &RuntimeCallStats::GC);
} }
// TODO(lpy): Add a tracing equivalent for the runtime call stats.
} }
GCTracer::Scope::~Scope() { GCTracer::Scope::~Scope() {
tracer_->AddScopeSample( tracer_->AddScopeSample(
scope_, tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_); scope_, tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_);
// TODO(cbruni): remove once we fully moved to a trace-based system. // TODO(cbruni): remove once we fully moved to a trace-based system.
if (FLAG_runtime_call_stats) { if (TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() ||
FLAG_runtime_call_stats) {
RuntimeCallStats::Leave( RuntimeCallStats::Leave(
tracer_->heap_->isolate()->counters()->runtime_call_stats(), &timer_); tracer_->heap_->isolate()->counters()->runtime_call_stats(), &timer_);
} }
// TODO(lpy): Add a tracing equivalent for the runtime call stats.
} }
const char* GCTracer::Scope::Name(ScopeId id) { const char* GCTracer::Scope::Name(ScopeId id) {
...@@ -217,11 +217,11 @@ void GCTracer::Start(GarbageCollector collector, ...@@ -217,11 +217,11 @@ void GCTracer::Start(GarbageCollector collector,
committed_memory); committed_memory);
counters->aggregated_memory_heap_used()->AddSample(start_time, used_memory); counters->aggregated_memory_heap_used()->AddSample(start_time, used_memory);
// TODO(cbruni): remove once we fully moved to a trace-based system. // TODO(cbruni): remove once we fully moved to a trace-based system.
if (FLAG_runtime_call_stats) { if (TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() ||
FLAG_runtime_call_stats) {
RuntimeCallStats::Enter(heap_->isolate()->counters()->runtime_call_stats(), RuntimeCallStats::Enter(heap_->isolate()->counters()->runtime_call_stats(),
&timer_, &RuntimeCallStats::GC); &timer_, &RuntimeCallStats::GC);
} }
// TODO(lpy): Add a tracing equivalent for the runtime call stats.
} }
void GCTracer::MergeBaseline(const Event& baseline) { void GCTracer::MergeBaseline(const Event& baseline) {
...@@ -323,11 +323,11 @@ void GCTracer::Stop(GarbageCollector collector) { ...@@ -323,11 +323,11 @@ void GCTracer::Stop(GarbageCollector collector) {
} }
// TODO(cbruni): remove once we fully moved to a trace-based system. // TODO(cbruni): remove once we fully moved to a trace-based system.
if (FLAG_runtime_call_stats) { if (TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() ||
FLAG_runtime_call_stats) {
RuntimeCallStats::Leave(heap_->isolate()->counters()->runtime_call_stats(), RuntimeCallStats::Leave(heap_->isolate()->counters()->runtime_call_stats(),
&timer_); &timer_);
} }
// TODO(lpy): Add a tracing equivalent for the runtime call stats.
} }
......
...@@ -8,7 +8,7 @@ ...@@ -8,7 +8,7 @@
#include <cmath> #include <cmath>
#include "src/base/platform/platform.h" #include "src/base/platform/platform.h"
#include "src/counters.h" #include "src/counters-inl.h"
#include "src/heap/heap.h" #include "src/heap/heap.h"
#include "src/heap/incremental-marking-inl.h" #include "src/heap/incremental-marking-inl.h"
#include "src/heap/mark-compact.h" #include "src/heap/mark-compact.h"
......
...@@ -2569,7 +2569,8 @@ void Isolate::DumpAndResetCompilationStats() { ...@@ -2569,7 +2569,8 @@ void Isolate::DumpAndResetCompilationStats() {
turbo_statistics_ = nullptr; turbo_statistics_ = nullptr;
delete hstatistics_; delete hstatistics_;
hstatistics_ = nullptr; hstatistics_ = nullptr;
if (FLAG_runtime_call_stats) { if (FLAG_runtime_call_stats &&
!TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED()) {
OFStream os(stdout); OFStream os(stdout);
counters()->runtime_call_stats()->Print(os); counters()->runtime_call_stats()->Print(os);
counters()->runtime_call_stats()->Reset(); counters()->runtime_call_stats()->Reset();
......
...@@ -28,9 +28,9 @@ void CallStatsScopedTracer::AddEndTraceEvent() { ...@@ -28,9 +28,9 @@ void CallStatsScopedTracer::AddEndTraceEvent() {
TRACE_EVENT_PHASE_END, p_data_->category_group_enabled, p_data_->name, TRACE_EVENT_PHASE_END, p_data_->category_group_enabled, p_data_->name,
v8::internal::tracing::kGlobalScope, v8::internal::tracing::kNoId, v8::internal::tracing::kGlobalScope, v8::internal::tracing::kNoId,
v8::internal::tracing::kNoId, TRACE_EVENT_FLAG_NONE, v8::internal::tracing::kNoId, TRACE_EVENT_FLAG_NONE,
"runtime-call-stats", TRACE_STR_COPY(p_data_->isolate->counters() "runtime-call-stats",
->tracing_runtime_call_stats() TRACE_STR_COPY(
->Dump())); p_data_->isolate->counters()->runtime_call_stats()->Dump()));
} else { } else {
v8::internal::tracing::AddTraceEvent( v8::internal::tracing::AddTraceEvent(
TRACE_EVENT_PHASE_END, p_data_->category_group_enabled, p_data_->name, TRACE_EVENT_PHASE_END, p_data_->category_group_enabled, p_data_->name,
...@@ -46,7 +46,7 @@ void CallStatsScopedTracer::Initialize(v8::internal::Isolate* isolate, ...@@ -46,7 +46,7 @@ void CallStatsScopedTracer::Initialize(v8::internal::Isolate* isolate,
data_.category_group_enabled = category_group_enabled; data_.category_group_enabled = category_group_enabled;
data_.name = name; data_.name = name;
p_data_ = &data_; p_data_ = &data_;
RuntimeCallStats* table = isolate->counters()->tracing_runtime_call_stats(); RuntimeCallStats* table = isolate->counters()->runtime_call_stats();
has_parent_scope_ = table->InUse(); has_parent_scope_ = table->InUse();
if (!has_parent_scope_) table->Reset(); if (!has_parent_scope_) table->Reset();
v8::internal::tracing::AddTraceEvent( v8::internal::tracing::AddTraceEvent(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment