Commit ed7155c6 authored by Alexei Filippov's avatar Alexei Filippov Committed by Commit Bot

[runtime-call-stats] Fix a long standing crash in RuntimeCallStats::Leave

Make sure there is a matching Leave for each Enter. Otherwise it ends up
with a dead stack-allocated object in the timer chain.

The patch incorporates the following fixes:
- RuntimeCallTimerScope::RuntimeCallTimerScope(HeapObject* ...) did create a
  local object instead of calling an overloaded constructor.
- InterpreterCompilationJob::ExecuteJobImpl made an implicit call to a default
  copy constructor of TimerScope which led to a single Enter was made per two
  Leaves.
- InterpreterCompilationJob::FinalizeJobImpl was calling RuntimeCallTimerScope
  from a background thread, which caused timer scopes become unbalanced.
- RuntimeCallTimerScope constructors were put into counters-inl.h which is not
  included into most usages of RCS. That led to a suboptimal performance.
- Added thread check into Enter and Leave

BUG=chromium:669329

Change-Id: Ib5cff0e02e0b6c8b56a03ca3a5ebc37d93fcde55
Reviewed-on: https://chromium-review.googlesource.com/637307Reviewed-by: 's avatarCamillo Bruni <cbruni@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Alexei Filippov <alph@chromium.org>
Cr-Commit-Position: refs/heads/master@{#47666}
parent 402f5151
...@@ -7,8 +7,6 @@ ...@@ -7,8 +7,6 @@
#include "src/counters.h" #include "src/counters.h"
#include "src/isolate.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
...@@ -64,23 +62,8 @@ base::TimeTicks RuntimeCallTimer::Now() { ...@@ -64,23 +62,8 @@ base::TimeTicks RuntimeCallTimer::Now() {
} }
RuntimeCallTimerScope::RuntimeCallTimerScope( RuntimeCallTimerScope::RuntimeCallTimerScope(
Isolate* isolate, RuntimeCallStats::CounterId counter_id) { HeapObject* heap_object, RuntimeCallStats::CounterId counter_id)
if (V8_UNLIKELY(FLAG_runtime_stats)) { : RuntimeCallTimerScope(heap_object->GetIsolate(), counter_id) {}
Initialize(isolate->counters()->runtime_call_stats(), counter_id);
}
}
RuntimeCallTimerScope::RuntimeCallTimerScope(
HeapObject* heap_object, RuntimeCallStats::CounterId counter_id) {
RuntimeCallTimerScope(heap_object->GetIsolate(), counter_id);
}
RuntimeCallTimerScope::RuntimeCallTimerScope(
RuntimeCallStats* stats, RuntimeCallStats::CounterId counter_id) {
if (V8_UNLIKELY(FLAG_runtime_stats)) {
Initialize(stats, counter_id);
}
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -420,7 +420,8 @@ void RuntimeCallTimer::Snapshot() { ...@@ -420,7 +420,8 @@ void RuntimeCallTimer::Snapshot() {
Resume(now); Resume(now);
} }
RuntimeCallStats::RuntimeCallStats() : in_use_(false) { RuntimeCallStats::RuntimeCallStats()
: in_use_(false), thread_id_(ThreadId::Current()) {
static const char* const kNames[] = { static const char* const kNames[] = {
#define CALL_BUILTIN_COUNTER(name) "GC_" #name, #define CALL_BUILTIN_COUNTER(name) "GC_" #name,
FOR_EACH_GC_COUNTER(CALL_BUILTIN_COUNTER) // FOR_EACH_GC_COUNTER(CALL_BUILTIN_COUNTER) //
...@@ -476,6 +477,7 @@ const int RuntimeCallStats::counters_count = ...@@ -476,6 +477,7 @@ const int RuntimeCallStats::counters_count =
// static // static
void RuntimeCallStats::Enter(RuntimeCallStats* stats, RuntimeCallTimer* timer, void RuntimeCallStats::Enter(RuntimeCallStats* stats, RuntimeCallTimer* timer,
CounterId counter_id) { CounterId counter_id) {
DCHECK(ThreadId::Current().Equals(stats->thread_id()));
RuntimeCallCounter* counter = &(stats->*counter_id); RuntimeCallCounter* counter = &(stats->*counter_id);
DCHECK(counter->name() != nullptr); DCHECK(counter->name() != nullptr);
timer->Start(counter, stats->current_timer_.Value()); timer->Start(counter, stats->current_timer_.Value());
...@@ -485,26 +487,11 @@ void RuntimeCallStats::Enter(RuntimeCallStats* stats, RuntimeCallTimer* timer, ...@@ -485,26 +487,11 @@ void RuntimeCallStats::Enter(RuntimeCallStats* stats, RuntimeCallTimer* timer,
// static // static
void RuntimeCallStats::Leave(RuntimeCallStats* stats, RuntimeCallTimer* timer) { void RuntimeCallStats::Leave(RuntimeCallStats* stats, RuntimeCallTimer* timer) {
if (stats->current_timer_.Value() == timer) { DCHECK(ThreadId::Current().Equals(stats->thread_id()));
stats->current_timer_.SetValue(timer->Stop()); CHECK(stats->current_timer_.Value() == timer);
} else { stats->current_timer_.SetValue(timer->Stop());
// Must be a Threading cctest. Walk the chain of Timers to find the RuntimeCallTimer* cur_timer = stats->current_timer_.Value();
// buried one that's leaving. We don't care about keeping nested timings stats->current_counter_.SetValue(cur_timer ? cur_timer->counter() : nullptr);
// accurate, just avoid crashing by keeping the chain intact.
RuntimeCallTimer* next = stats->current_timer_.Value();
while (next && next->parent() != timer) next = next->parent();
if (next == nullptr) return;
next->set_parent(timer->Stop());
}
{
RuntimeCallTimer* cur_timer = stats->current_timer_.Value();
if (cur_timer == nullptr) {
stats->current_counter_.SetValue(nullptr);
} else {
stats->current_counter_.SetValue(cur_timer->counter());
}
}
} }
void RuntimeCallStats::Add(RuntimeCallStats* other) { void RuntimeCallStats::Add(RuntimeCallStats* other) {
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
#include "src/base/platform/time.h" #include "src/base/platform/time.h"
#include "src/globals.h" #include "src/globals.h"
#include "src/heap-symbols.h" #include "src/heap-symbols.h"
#include "src/isolate.h"
#include "src/objects.h" #include "src/objects.h"
#include "src/runtime/runtime.h" #include "src/runtime/runtime.h"
#include "src/tracing/trace-event.h" #include "src/tracing/trace-event.h"
...@@ -934,6 +935,7 @@ class RuntimeCallStats final : public ZoneObject { ...@@ -934,6 +935,7 @@ class RuntimeCallStats final : public ZoneObject {
V8_EXPORT_PRIVATE void Print(std::ostream& os); V8_EXPORT_PRIVATE void Print(std::ostream& os);
V8_NOINLINE void Dump(v8::tracing::TracedValue* value); V8_NOINLINE void Dump(v8::tracing::TracedValue* value);
ThreadId thread_id() const { return thread_id_; }
RuntimeCallTimer* current_timer() { return current_timer_.Value(); } RuntimeCallTimer* current_timer() { return current_timer_.Value(); }
RuntimeCallCounter* current_counter() { return current_counter_.Value(); } RuntimeCallCounter* current_counter() { return current_counter_.Value(); }
bool InUse() { return in_use_; } bool InUse() { return in_use_; }
...@@ -945,6 +947,7 @@ class RuntimeCallStats final : public ZoneObject { ...@@ -945,6 +947,7 @@ class RuntimeCallStats final : public ZoneObject {
base::AtomicValue<RuntimeCallCounter*> current_counter_; base::AtomicValue<RuntimeCallCounter*> current_counter_;
// Used to track nested tracing scopes. // Used to track nested tracing scopes.
bool in_use_; bool in_use_;
ThreadId thread_id_;
}; };
#define CHANGE_CURRENT_RUNTIME_COUNTER(runtime_call_stats, counter_name) \ #define CHANGE_CURRENT_RUNTIME_COUNTER(runtime_call_stats, counter_name) \
...@@ -970,7 +973,11 @@ class RuntimeCallTimerScope { ...@@ -970,7 +973,11 @@ class RuntimeCallTimerScope {
inline RuntimeCallTimerScope(HeapObject* heap_object, inline RuntimeCallTimerScope(HeapObject* heap_object,
RuntimeCallStats::CounterId counter_id); RuntimeCallStats::CounterId counter_id);
inline RuntimeCallTimerScope(RuntimeCallStats* stats, inline RuntimeCallTimerScope(RuntimeCallStats* stats,
RuntimeCallStats::CounterId counter_id); RuntimeCallStats::CounterId counter_id) {
if (V8_LIKELY(!FLAG_runtime_stats || stats == nullptr)) return;
stats_ = stats;
RuntimeCallStats::Enter(stats_, &timer_, counter_id);
}
inline ~RuntimeCallTimerScope() { inline ~RuntimeCallTimerScope() {
if (V8_UNLIKELY(stats_ != nullptr)) { if (V8_UNLIKELY(stats_ != nullptr)) {
...@@ -979,14 +986,10 @@ class RuntimeCallTimerScope { ...@@ -979,14 +986,10 @@ class RuntimeCallTimerScope {
} }
private: private:
V8_INLINE void Initialize(RuntimeCallStats* stats,
RuntimeCallStats::CounterId counter_id) {
stats_ = stats;
RuntimeCallStats::Enter(stats_, &timer_, counter_id);
}
RuntimeCallStats* stats_ = nullptr; RuntimeCallStats* stats_ = nullptr;
RuntimeCallTimer timer_; RuntimeCallTimer timer_;
DISALLOW_COPY_AND_ASSIGN(RuntimeCallTimerScope);
}; };
#define HISTOGRAM_RANGE_LIST(HR) \ #define HISTOGRAM_RANGE_LIST(HR) \
...@@ -1480,6 +1483,13 @@ void HistogramTimer::Stop() { ...@@ -1480,6 +1483,13 @@ void HistogramTimer::Stop() {
TimedHistogram::Stop(&timer_, counters()->isolate()); TimedHistogram::Stop(&timer_, counters()->isolate());
} }
RuntimeCallTimerScope::RuntimeCallTimerScope(
Isolate* isolate, RuntimeCallStats::CounterId counter_id) {
if (V8_LIKELY(!FLAG_runtime_stats)) return;
stats_ = isolate->counters()->runtime_call_stats();
RuntimeCallStats::Enter(stats_, &timer_, counter_id);
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -35,32 +35,24 @@ class InterpreterCompilationJob final : public CompilationJob { ...@@ -35,32 +35,24 @@ class InterpreterCompilationJob final : public CompilationJob {
private: private:
class TimerScope final { class TimerScope final {
public: public:
TimerScope(RuntimeCallStats* stats, RuntimeCallStats::CounterId counter_id) explicit TimerScope(RuntimeCallCounter* counter)
: stats_(stats) { : runtime_stats_enabled_(FLAG_runtime_stats) {
if (V8_UNLIKELY(FLAG_runtime_stats)) { if (V8_UNLIKELY(runtime_stats_enabled_ && counter != nullptr)) {
RuntimeCallStats::Enter(stats_, &timer_, counter_id);
}
}
explicit TimerScope(RuntimeCallCounter* counter) : stats_(nullptr) {
if (V8_UNLIKELY(FLAG_runtime_stats)) {
timer_.Start(counter, nullptr); timer_.Start(counter, nullptr);
} }
} }
~TimerScope() { ~TimerScope() {
if (V8_UNLIKELY(FLAG_runtime_stats)) { if (V8_UNLIKELY(runtime_stats_enabled_)) {
if (stats_) { timer_.Stop();
RuntimeCallStats::Leave(stats_, &timer_);
} else {
timer_.Stop();
}
} }
} }
private: private:
RuntimeCallStats* stats_;
RuntimeCallTimer timer_; RuntimeCallTimer timer_;
bool runtime_stats_enabled_;
DISALLOW_COPY_AND_ASSIGN(TimerScope);
}; };
BytecodeGenerator* generator() { return &generator_; } BytecodeGenerator* generator() { return &generator_; }
...@@ -164,10 +156,12 @@ InterpreterCompilationJob::Status InterpreterCompilationJob::PrepareJobImpl() { ...@@ -164,10 +156,12 @@ InterpreterCompilationJob::Status InterpreterCompilationJob::PrepareJobImpl() {
} }
InterpreterCompilationJob::Status InterpreterCompilationJob::ExecuteJobImpl() { InterpreterCompilationJob::Status InterpreterCompilationJob::ExecuteJobImpl() {
TimerScope runtimeTimer = TimerScope runtimeTimer(
executed_on_background_thread() executed_on_background_thread() ? &background_execute_counter_ : nullptr);
? TimerScope(&background_execute_counter_) RuntimeCallTimerScope runtimeTimerScope(
: TimerScope(runtime_call_stats_, &RuntimeCallStats::CompileIgnition); !executed_on_background_thread() ? runtime_call_stats_ : nullptr,
&RuntimeCallStats::CompileIgnition);
// TODO(lpy): add support for background compilation RCS trace. // TODO(lpy): add support for background compilation RCS trace.
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileIgnition"); TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileIgnition");
...@@ -186,8 +180,9 @@ InterpreterCompilationJob::Status InterpreterCompilationJob::FinalizeJobImpl() { ...@@ -186,8 +180,9 @@ InterpreterCompilationJob::Status InterpreterCompilationJob::FinalizeJobImpl() {
&background_execute_counter_); &background_execute_counter_);
} }
RuntimeCallTimerScope runtimeTimer( RuntimeCallTimerScope runtimeTimerScope(
runtime_call_stats_, &RuntimeCallStats::CompileIgnitionFinalization); !executed_on_background_thread() ? runtime_call_stats_ : nullptr,
&RuntimeCallStats::CompileIgnitionFinalization);
Handle<BytecodeArray> bytecodes = generator()->FinalizeBytecode(isolate()); Handle<BytecodeArray> bytecodes = generator()->FinalizeBytecode(isolate());
if (generator()->HasStackOverflow()) { if (generator()->HasStackOverflow()) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment