Commit ed7155c6 authored by Alexei Filippov's avatar Alexei Filippov Committed by Commit Bot

[runtime-call-stats] Fix a long standing crash in RuntimeCallStats::Leave

Make sure there is a matching Leave for each Enter. Otherwise it ends up
with a dead stack-allocated object in the timer chain.

The patch incorporates the following fixes:
- RuntimeCallTimerScope::RuntimeCallTimerScope(HeapObject* ...) did create a
  local object instead of calling an overloaded constructor.
- InterpreterCompilationJob::ExecuteJobImpl made an implicit call to a default
  copy constructor of TimerScope which led to a single Enter was made per two
  Leaves.
- InterpreterCompilationJob::FinalizeJobImpl was calling RuntimeCallTimerScope
  from a background thread, which caused timer scopes become unbalanced.
- RuntimeCallTimerScope constructors were put into counters-inl.h which is not
  included into most usages of RCS. That led to a suboptimal performance.
- Added thread check into Enter and Leave

BUG=chromium:669329

Change-Id: Ib5cff0e02e0b6c8b56a03ca3a5ebc37d93fcde55
Reviewed-on: https://chromium-review.googlesource.com/637307Reviewed-by: 's avatarCamillo Bruni <cbruni@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Alexei Filippov <alph@chromium.org>
Cr-Commit-Position: refs/heads/master@{#47666}
parent 402f5151
......@@ -7,8 +7,6 @@
#include "src/counters.h"
#include "src/isolate.h"
namespace v8 {
namespace internal {
......@@ -64,23 +62,8 @@ base::TimeTicks RuntimeCallTimer::Now() {
}
RuntimeCallTimerScope::RuntimeCallTimerScope(
Isolate* isolate, RuntimeCallStats::CounterId counter_id) {
if (V8_UNLIKELY(FLAG_runtime_stats)) {
Initialize(isolate->counters()->runtime_call_stats(), counter_id);
}
}
RuntimeCallTimerScope::RuntimeCallTimerScope(
HeapObject* heap_object, RuntimeCallStats::CounterId counter_id) {
RuntimeCallTimerScope(heap_object->GetIsolate(), counter_id);
}
RuntimeCallTimerScope::RuntimeCallTimerScope(
RuntimeCallStats* stats, RuntimeCallStats::CounterId counter_id) {
if (V8_UNLIKELY(FLAG_runtime_stats)) {
Initialize(stats, counter_id);
}
}
HeapObject* heap_object, RuntimeCallStats::CounterId counter_id)
: RuntimeCallTimerScope(heap_object->GetIsolate(), counter_id) {}
} // namespace internal
} // namespace v8
......
......@@ -420,7 +420,8 @@ void RuntimeCallTimer::Snapshot() {
Resume(now);
}
RuntimeCallStats::RuntimeCallStats() : in_use_(false) {
RuntimeCallStats::RuntimeCallStats()
: in_use_(false), thread_id_(ThreadId::Current()) {
static const char* const kNames[] = {
#define CALL_BUILTIN_COUNTER(name) "GC_" #name,
FOR_EACH_GC_COUNTER(CALL_BUILTIN_COUNTER) //
......@@ -476,6 +477,7 @@ const int RuntimeCallStats::counters_count =
// static
void RuntimeCallStats::Enter(RuntimeCallStats* stats, RuntimeCallTimer* timer,
CounterId counter_id) {
DCHECK(ThreadId::Current().Equals(stats->thread_id()));
RuntimeCallCounter* counter = &(stats->*counter_id);
DCHECK(counter->name() != nullptr);
timer->Start(counter, stats->current_timer_.Value());
......@@ -485,26 +487,11 @@ void RuntimeCallStats::Enter(RuntimeCallStats* stats, RuntimeCallTimer* timer,
// static
void RuntimeCallStats::Leave(RuntimeCallStats* stats, RuntimeCallTimer* timer) {
if (stats->current_timer_.Value() == timer) {
DCHECK(ThreadId::Current().Equals(stats->thread_id()));
CHECK(stats->current_timer_.Value() == timer);
stats->current_timer_.SetValue(timer->Stop());
} else {
// Must be a Threading cctest. Walk the chain of Timers to find the
// buried one that's leaving. We don't care about keeping nested timings
// accurate, just avoid crashing by keeping the chain intact.
RuntimeCallTimer* next = stats->current_timer_.Value();
while (next && next->parent() != timer) next = next->parent();
if (next == nullptr) return;
next->set_parent(timer->Stop());
}
{
RuntimeCallTimer* cur_timer = stats->current_timer_.Value();
if (cur_timer == nullptr) {
stats->current_counter_.SetValue(nullptr);
} else {
stats->current_counter_.SetValue(cur_timer->counter());
}
}
stats->current_counter_.SetValue(cur_timer ? cur_timer->counter() : nullptr);
}
void RuntimeCallStats::Add(RuntimeCallStats* other) {
......
......@@ -12,6 +12,7 @@
#include "src/base/platform/time.h"
#include "src/globals.h"
#include "src/heap-symbols.h"
#include "src/isolate.h"
#include "src/objects.h"
#include "src/runtime/runtime.h"
#include "src/tracing/trace-event.h"
......@@ -934,6 +935,7 @@ class RuntimeCallStats final : public ZoneObject {
V8_EXPORT_PRIVATE void Print(std::ostream& os);
V8_NOINLINE void Dump(v8::tracing::TracedValue* value);
ThreadId thread_id() const { return thread_id_; }
RuntimeCallTimer* current_timer() { return current_timer_.Value(); }
RuntimeCallCounter* current_counter() { return current_counter_.Value(); }
bool InUse() { return in_use_; }
......@@ -945,6 +947,7 @@ class RuntimeCallStats final : public ZoneObject {
base::AtomicValue<RuntimeCallCounter*> current_counter_;
// Used to track nested tracing scopes.
bool in_use_;
ThreadId thread_id_;
};
#define CHANGE_CURRENT_RUNTIME_COUNTER(runtime_call_stats, counter_name) \
......@@ -970,7 +973,11 @@ class RuntimeCallTimerScope {
inline RuntimeCallTimerScope(HeapObject* heap_object,
RuntimeCallStats::CounterId counter_id);
inline RuntimeCallTimerScope(RuntimeCallStats* stats,
RuntimeCallStats::CounterId counter_id);
RuntimeCallStats::CounterId counter_id) {
if (V8_LIKELY(!FLAG_runtime_stats || stats == nullptr)) return;
stats_ = stats;
RuntimeCallStats::Enter(stats_, &timer_, counter_id);
}
inline ~RuntimeCallTimerScope() {
if (V8_UNLIKELY(stats_ != nullptr)) {
......@@ -979,14 +986,10 @@ class RuntimeCallTimerScope {
}
private:
V8_INLINE void Initialize(RuntimeCallStats* stats,
RuntimeCallStats::CounterId counter_id) {
stats_ = stats;
RuntimeCallStats::Enter(stats_, &timer_, counter_id);
}
RuntimeCallStats* stats_ = nullptr;
RuntimeCallTimer timer_;
DISALLOW_COPY_AND_ASSIGN(RuntimeCallTimerScope);
};
#define HISTOGRAM_RANGE_LIST(HR) \
......@@ -1480,6 +1483,13 @@ void HistogramTimer::Stop() {
TimedHistogram::Stop(&timer_, counters()->isolate());
}
RuntimeCallTimerScope::RuntimeCallTimerScope(
Isolate* isolate, RuntimeCallStats::CounterId counter_id) {
if (V8_LIKELY(!FLAG_runtime_stats)) return;
stats_ = isolate->counters()->runtime_call_stats();
RuntimeCallStats::Enter(stats_, &timer_, counter_id);
}
} // namespace internal
} // namespace v8
......
......@@ -35,32 +35,24 @@ class InterpreterCompilationJob final : public CompilationJob {
private:
class TimerScope final {
public:
TimerScope(RuntimeCallStats* stats, RuntimeCallStats::CounterId counter_id)
: stats_(stats) {
if (V8_UNLIKELY(FLAG_runtime_stats)) {
RuntimeCallStats::Enter(stats_, &timer_, counter_id);
}
}
explicit TimerScope(RuntimeCallCounter* counter) : stats_(nullptr) {
if (V8_UNLIKELY(FLAG_runtime_stats)) {
explicit TimerScope(RuntimeCallCounter* counter)
: runtime_stats_enabled_(FLAG_runtime_stats) {
if (V8_UNLIKELY(runtime_stats_enabled_ && counter != nullptr)) {
timer_.Start(counter, nullptr);
}
}
~TimerScope() {
if (V8_UNLIKELY(FLAG_runtime_stats)) {
if (stats_) {
RuntimeCallStats::Leave(stats_, &timer_);
} else {
if (V8_UNLIKELY(runtime_stats_enabled_)) {
timer_.Stop();
}
}
}
private:
RuntimeCallStats* stats_;
RuntimeCallTimer timer_;
bool runtime_stats_enabled_;
DISALLOW_COPY_AND_ASSIGN(TimerScope);
};
BytecodeGenerator* generator() { return &generator_; }
......@@ -164,10 +156,12 @@ InterpreterCompilationJob::Status InterpreterCompilationJob::PrepareJobImpl() {
}
InterpreterCompilationJob::Status InterpreterCompilationJob::ExecuteJobImpl() {
TimerScope runtimeTimer =
executed_on_background_thread()
? TimerScope(&background_execute_counter_)
: TimerScope(runtime_call_stats_, &RuntimeCallStats::CompileIgnition);
TimerScope runtimeTimer(
executed_on_background_thread() ? &background_execute_counter_ : nullptr);
RuntimeCallTimerScope runtimeTimerScope(
!executed_on_background_thread() ? runtime_call_stats_ : nullptr,
&RuntimeCallStats::CompileIgnition);
// TODO(lpy): add support for background compilation RCS trace.
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileIgnition");
......@@ -186,8 +180,9 @@ InterpreterCompilationJob::Status InterpreterCompilationJob::FinalizeJobImpl() {
&background_execute_counter_);
}
RuntimeCallTimerScope runtimeTimer(
runtime_call_stats_, &RuntimeCallStats::CompileIgnitionFinalization);
RuntimeCallTimerScope runtimeTimerScope(
!executed_on_background_thread() ? runtime_call_stats_ : nullptr,
&RuntimeCallStats::CompileIgnitionFinalization);
Handle<BytecodeArray> bytecodes = generator()->FinalizeBytecode(isolate());
if (generator()->HasStackOverflow()) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment