Commit fbbc0ff2 authored by kschimpf's avatar kschimpf Committed by Commit bot

Create a thread safe version of StatsCounters and use.

Creates a new class StatsCounterThreadSafe to be used by counters that
can be updated when compiling/decoding etc. are done using workers.

Does this by using a mutex on all opreations.

Also updates the StatsCounterThreadSafe constructor to force counter
initialization, as well as method Reset(). In addition, whenever the
method StatsTable::SetCounterFunction() is called (from the main
thread), it forces counter initialization for all thread safe stats
counters.

BUG=v8:6361

Review-Url: https://codereview.chromium.org/2887193002
Cr-Commit-Position: refs/heads/master@{#45526}
parent 709c906a
......@@ -8690,9 +8690,7 @@ void Isolate::SetUseCounterCallback(UseCounterCallback callback) {
void Isolate::SetCounterFunction(CounterLookupCallback callback) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
isolate->stats_table()->SetCounterFunction(callback);
isolate->InitializeLoggingAndCounters();
isolate->counters()->ResetCounters();
isolate->stats_table()->SetCounterFunction(callback, isolate);
}
......
......@@ -20,11 +20,61 @@ StatsTable::StatsTable()
create_histogram_function_(NULL),
add_histogram_sample_function_(NULL) {}
void StatsTable::SetCounterFunction(CounterLookupCallback f, Isolate* isolate) {
lookup_function_ = f;
if (!isolate->InitializeCounters()) isolate->counters()->ResetCounters();
}
int* StatsCounter::FindLocationInStatsTable() const {
int* StatsCounterBase::FindLocationInStatsTable() const {
return isolate_->stats_table()->FindLocation(name_);
}
StatsCounterThreadSafe::StatsCounterThreadSafe(Isolate* isolate,
const char* name)
: StatsCounterBase(isolate, name) {
GetPtr();
}
void StatsCounterThreadSafe::Set(int Value) {
if (ptr_) {
base::LockGuard<base::Mutex> Guard(&mutex_);
SetLoc(ptr_, Value);
}
}
void StatsCounterThreadSafe::Increment() {
if (ptr_) {
base::LockGuard<base::Mutex> Guard(&mutex_);
IncrementLoc(ptr_);
}
}
void StatsCounterThreadSafe::Increment(int value) {
if (ptr_) {
base::LockGuard<base::Mutex> Guard(&mutex_);
IncrementLoc(ptr_, value);
}
}
void StatsCounterThreadSafe::Decrement() {
if (ptr_) {
base::LockGuard<base::Mutex> Guard(&mutex_);
DecrementLoc(ptr_);
}
}
void StatsCounterThreadSafe::Decrement(int value) {
if (ptr_) {
base::LockGuard<base::Mutex> Guard(&mutex_);
DecrementLoc(ptr_, value);
}
}
int* StatsCounterThreadSafe::GetPtr() {
base::LockGuard<base::Mutex> Guard(&mutex_);
ptr_ = FindLocationInStatsTable();
return ptr_;
}
void Histogram::AddSample(int sample) {
if (Enabled()) {
......@@ -60,8 +110,14 @@ void HistogramTimer::Stop() {
Logger::CallEventLogger(isolate(), name(), Logger::END, true);
}
Counters::Counters(Isolate* isolate) {
Counters::Counters(Isolate* isolate)
:
// clang format off
#define SC(name, caption) name##_(isolate, "c:" #caption),
STATS_COUNTER_TS_LIST(SC)
#undef SC
// clang format on
runtime_call_stats_() {
static const struct {
Histogram Counters::*member;
const char* caption;
......@@ -200,13 +256,16 @@ Counters::Counters(Isolate* isolate) {
}
}
void Counters::ResetCounters() {
#define SC(name, caption) name##_.Reset();
STATS_COUNTER_LIST_1(SC)
STATS_COUNTER_LIST_2(SC)
#undef SC
#define SC(name, caption) name##_.Reset();
STATS_COUNTER_TS_LIST(SC)
#undef SC
#define SC(name) \
count_of_##name##_.Reset(); \
size_of_##name##_.Reset();
......
......@@ -28,10 +28,9 @@ namespace internal {
class StatsTable {
public:
// Register an application-defined function where
// counters can be looked up.
void SetCounterFunction(CounterLookupCallback f) {
lookup_function_ = f;
}
// counters can be looked up. Note: Must be called on main thread,
// so that threaded stats counters can be created now.
void SetCounterFunction(CounterLookupCallback f, Isolate* isolate);
// Register an application-defined function to create
// a histogram for passing to the AddHistogramSample function
......@@ -92,6 +91,27 @@ class StatsTable {
DISALLOW_COPY_AND_ASSIGN(StatsTable);
};
// Base class for stats counters.
class StatsCounterBase {
public:
StatsCounterBase() {}
StatsCounterBase(Isolate* isolate, const char* name)
: isolate_(isolate), name_(name), ptr_(nullptr) {}
protected:
Isolate* isolate_;
const char* name_;
int* ptr_;
void SetLoc(int* loc, int value) { *loc = value; }
void IncrementLoc(int* loc) { (*loc)++; }
void IncrementLoc(int* loc, int value) { (*loc) += value; }
void DecrementLoc(int* loc) { (*loc)--; }
void DecrementLoc(int* loc, int value) { (*loc) -= value; }
int* FindLocationInStatsTable() const;
};
// StatsCounters are dynamically created values which can be tracked in
// the StatsTable. They are designed to be lightweight to create and
// easy to use.
......@@ -100,39 +120,33 @@ class StatsTable {
// The row has a 32bit value for each process/thread in the table and also
// a name (stored in the table metadata). Since the storage location can be
// thread-specific, this class cannot be shared across threads.
class StatsCounter {
class StatsCounter : public StatsCounterBase {
public:
StatsCounter() { }
explicit StatsCounter(Isolate* isolate, const char* name)
: isolate_(isolate), name_(name), ptr_(NULL), lookup_done_(false) { }
StatsCounter(Isolate* isolate, const char* name)
: StatsCounterBase(isolate, name), lookup_done_(false) {}
// Sets the counter to a specific value.
void Set(int value) {
int* loc = GetPtr();
if (loc) *loc = value;
if (int* loc = GetPtr()) SetLoc(loc, value);
}
// Increments the counter.
void Increment() {
int* loc = GetPtr();
if (loc) (*loc)++;
if (int* loc = GetPtr()) IncrementLoc(loc);
}
void Increment(int value) {
int* loc = GetPtr();
if (loc)
(*loc) += value;
if (int* loc = GetPtr()) IncrementLoc(loc, value);
}
// Decrements the counter.
void Decrement() {
int* loc = GetPtr();
if (loc) (*loc)--;
if (int* loc = GetPtr()) DecrementLoc(loc);
}
void Decrement(int value) {
int* loc = GetPtr();
if (loc) (*loc) -= value;
if (int* loc = GetPtr()) DecrementLoc(loc, value);
}
// Is this counter enabled?
......@@ -153,7 +167,7 @@ class StatsCounter {
// Reset the cached internal pointer.
void Reset() { lookup_done_ = false; }
protected:
private:
// Returns the cached address of this counter location.
int* GetPtr() {
if (lookup_done_) return ptr_;
......@@ -162,13 +176,36 @@ class StatsCounter {
return ptr_;
}
bool lookup_done_;
};
// Thread safe version of StatsCounter. WARNING: Unlike StatsCounter,
// StatsCounterThreadSafe's constructor and method Reset() actually do
// the table lookup, and should be called from the main thread
// (i.e. not workers).
class StatsCounterThreadSafe : public StatsCounterBase {
public:
StatsCounterThreadSafe(Isolate* isolate, const char* name);
void Set(int Value);
void Increment();
void Increment(int value);
void Decrement();
void Decrement(int value);
bool Enabled() { return ptr_ != NULL; }
int* GetInternalPointer() {
DCHECK(ptr_ != NULL);
return ptr_;
}
void Reset() { GetPtr(); }
private:
int* FindLocationInStatsTable() const;
int* GetPtr();
Isolate* isolate_;
const char* name_;
int* ptr_;
bool lookup_done_;
base::Mutex mutex_;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(StatsCounterThreadSafe);
};
// A Histogram represents a dynamically created histogram in the StatsTable.
......@@ -1167,14 +1204,18 @@ class RuntimeCallTimerScope {
/* Total code size (including metadata) of baseline code or bytecode. */ \
SC(total_baseline_code_size, V8.TotalBaselineCodeSize) \
/* Total count of functions compiled using the baseline compiler. */ \
SC(total_baseline_compile_count, V8.TotalBaselineCompileCount) \
SC(wasm_generated_code_size, V8.WasmGeneratedCodeBytes) \
SC(wasm_reloc_size, V8.WasmRelocBytes) \
SC(total_baseline_compile_count, V8.TotalBaselineCompileCount)
#define STATS_COUNTER_TS_LIST(SC) \
SC(wasm_generated_code_size, V8.WasmGeneratedCodeBytes) \
SC(wasm_reloc_size, V8.WasmRelocBytes) \
SC(wasm_lazily_compiled_functions, V8.WasmLazilyCompiledFunctions)
// This file contains all the v8 counters that are in use.
class Counters {
class Counters : public std::enable_shared_from_this<Counters> {
public:
explicit Counters(Isolate* isolate);
#define HR(name, caption, min, max, num_buckets) \
Histogram* name() { return &name##_; }
HISTOGRAM_RANGE_LIST(HR)
......@@ -1214,6 +1255,11 @@ class Counters {
STATS_COUNTER_LIST_2(SC)
#undef SC
#define SC(name, caption) \
StatsCounterThreadSafe* name() { return &name##_; }
STATS_COUNTER_TS_LIST(SC)
#undef SC
#define SC(name) \
StatsCounter* count_of_##name() { return &count_of_##name##_; } \
StatsCounter* size_of_##name() { return &size_of_##name##_; }
......@@ -1244,6 +1290,7 @@ class Counters {
CODE_AGE_LIST_COMPLETE(SC)
#undef SC
// clang-format off
enum Id {
#define RATE_ID(name, caption, max, res) k_##name,
HISTOGRAM_TIMER_LIST(RATE_ID)
......@@ -1261,6 +1308,7 @@ class Counters {
#define COUNTER_ID(name, caption) k_##name,
STATS_COUNTER_LIST_1(COUNTER_ID)
STATS_COUNTER_LIST_2(COUNTER_ID)
STATS_COUNTER_TS_LIST(COUNTER_ID)
#undef COUNTER_ID
#define COUNTER_ID(name) kCountOf##name, kSizeOf##name,
INSTANCE_TYPE_LIST(COUNTER_ID)
......@@ -1279,6 +1327,7 @@ class Counters {
#undef COUNTER_ID
stats_counter_count
};
// clang-format on
void ResetCounters();
void ResetHistograms();
......@@ -1322,6 +1371,10 @@ class Counters {
STATS_COUNTER_LIST_2(SC)
#undef SC
#define SC(name, caption) StatsCounterThreadSafe name##_;
STATS_COUNTER_TS_LIST(SC)
#undef SC
#define SC(name) \
StatsCounter size_of_##name##_; \
StatsCounter count_of_##name##_;
......@@ -1350,8 +1403,6 @@ class Counters {
friend class Isolate;
explicit Counters(Isolate* isolate);
DISALLOW_IMPLICIT_CONSTRUCTORS(Counters);
};
......
......@@ -2549,7 +2549,6 @@ Isolate::~Isolate() {
delete logger_;
logger_ = NULL;
delete counters_;
counters_ = NULL;
delete handle_scope_implementer_;
......@@ -2635,14 +2634,22 @@ bool Isolate::PropagatePendingExceptionToExternalTryCatch() {
return true;
}
static base::LazyMutex initialize_counters_mutex = LAZY_MUTEX_INITIALIZER;
bool Isolate::InitializeCounters() {
if (counters_ != nullptr) return false;
base::LockGuard<base::Mutex> guard(initialize_counters_mutex.Pointer());
if (counters_ != nullptr) return false;
counters_shared_ = std::make_shared<Counters>(this);
counters_ = counters_shared_.get();
return true;
}
void Isolate::InitializeLoggingAndCounters() {
if (logger_ == NULL) {
logger_ = new Logger(this);
}
if (counters_ == NULL) {
counters_ = new Counters(this);
}
InitializeCounters();
}
......
......@@ -536,6 +536,7 @@ class Isolate {
//
// Safe to call more than once.
void InitializeLoggingAndCounters();
bool InitializeCounters(); // Returns false if already initialized.
bool Init(Deserializer* des);
......@@ -872,6 +873,7 @@ class Isolate {
DCHECK(counters_ != NULL);
return counters_;
}
std::shared_ptr<Counters> counters_shared() { return counters_shared_; }
RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
CompilationCache* compilation_cache() { return compilation_cache_; }
Logger* logger() {
......@@ -1427,6 +1429,7 @@ class Isolate {
Bootstrapper* bootstrapper_;
RuntimeProfiler* runtime_profiler_;
CompilationCache* compilation_cache_;
std::shared_ptr<Counters> counters_shared_;
Counters* counters_;
base::RecursiveMutex break_access_;
Logger* logger_;
......
......@@ -56,22 +56,15 @@ byte* raw_buffer_ptr(MaybeHandle<JSArrayBuffer> buffer, int offset) {
return static_cast<byte*>(buffer.ToHandleChecked()->backing_store()) + offset;
}
static void RecordStats(Isolate* isolate, Code* code, bool is_sync) {
if (is_sync) {
// TODO(karlschimpf): Make this work when asynchronous.
// https://bugs.chromium.org/p/v8/issues/detail?id=6361
isolate->counters()->wasm_generated_code_size()->Increment(
code->body_size());
isolate->counters()->wasm_reloc_size()->Increment(
code->relocation_info()->length());
}
static void RecordStats(Code* code, Counters* counters) {
counters->wasm_generated_code_size()->Increment(code->body_size());
counters->wasm_reloc_size()->Increment(code->relocation_info()->length());
}
static void RecordStats(Isolate* isolate, Handle<FixedArray> functions,
bool is_sync) {
static void RecordStats(Handle<FixedArray> functions, Counters* counters) {
DisallowHeapAllocation no_gc;
for (int i = 0; i < functions->length(); ++i) {
RecordStats(isolate, Code::cast(functions->get(i)), is_sync);
RecordStats(Code::cast(functions->get(i)), counters);
}
}
......@@ -338,8 +331,11 @@ class CompilationHelper {
bool is_sync)
: isolate_(isolate),
module_(std::move(module)),
counters_shared_(isolate->counters_shared()),
is_sync_(is_sync),
executed_units_(isolate->random_number_generator()) {}
executed_units_(isolate->random_number_generator()) {
counters_ = counters_shared_.get();
}
// The actual runnable task that performs compilations in the background.
class CompilationTask : public CancelableTask {
......@@ -357,6 +353,8 @@ class CompilationHelper {
Isolate* isolate_;
std::unique_ptr<WasmModule> module_;
std::shared_ptr<Counters> counters_shared_;
Counters* counters_;
bool is_sync_;
std::vector<std::unique_ptr<compiler::WasmCompilationUnit>>
compilation_units_;
......@@ -690,7 +688,7 @@ class CompilationHelper {
i < temp_instance->function_code.size(); ++i) {
Code* code = *temp_instance->function_code[i];
code_table->set(static_cast<int>(i), code);
RecordStats(isolate_, code, is_sync_);
RecordStats(code, counters_);
}
// Create heap objects for script, module bytes and asm.js offset table to
......@@ -758,7 +756,7 @@ class CompilationHelper {
int export_index =
static_cast<int>(module->functions.size() + func_index);
code_table->set(export_index, *wrapper_code);
RecordStats(isolate_, *wrapper_code, is_sync_);
RecordStats(*wrapper_code, counters_);
func_index++;
}
......@@ -912,11 +910,10 @@ int ExtractDirectCallIndex(wasm::Decoder& decoder, const byte* pc) {
return static_cast<int>(call_idx);
}
void RecordLazyCodeStats(Isolate* isolate, Code* code) {
isolate->counters()->wasm_lazily_compiled_functions()->Increment();
isolate->counters()->wasm_generated_code_size()->Increment(code->body_size());
isolate->counters()->wasm_reloc_size()->Increment(
code->relocation_info()->length());
void RecordLazyCodeStats(Code* code, Counters* counters) {
counters->wasm_lazily_compiled_functions()->Increment();
counters->wasm_generated_code_size()->Increment(code->body_size());
counters->wasm_reloc_size()->Increment(code->relocation_info()->length());
}
} // namespace
......@@ -1151,12 +1148,15 @@ class InstantiationHelper {
MaybeHandle<JSArrayBuffer> memory)
: isolate_(isolate),
module_(module_object->compiled_module()->module()),
counters_shared_(isolate->counters_shared()),
thrower_(thrower),
module_object_(module_object),
ffi_(ffi.is_null() ? Handle<JSReceiver>::null()
: ffi.ToHandleChecked()),
memory_(memory.is_null() ? Handle<JSArrayBuffer>::null()
: memory.ToHandleChecked()) {}
: memory.ToHandleChecked()) {
counters_ = counters_shared_.get();
}
// Build an instance, in all of its glory.
MaybeHandle<WasmInstanceObject> Build() {
......@@ -1246,7 +1246,7 @@ class InstantiationHelper {
UNREACHABLE();
}
}
RecordStats(isolate_, code_table, is_sync_);
RecordStats(code_table, counters_);
} else {
// There was no owner, so we can reuse the original.
compiled_module_ = original;
......@@ -1523,7 +1523,7 @@ class InstantiationHelper {
Handle<WasmExportedFunction> startup_fct = WasmExportedFunction::New(
isolate_, instance, MaybeHandle<String>(), start_index,
static_cast<int>(sig->parameter_count()), wrapper_code);
RecordStats(isolate_, *startup_code, is_sync_);
RecordStats(*startup_code, counters_);
// Call the JS function.
Handle<Object> undefined = factory->undefined_value();
MaybeHandle<Object> retval =
......@@ -1557,7 +1557,8 @@ class InstantiationHelper {
Isolate* isolate_;
WasmModule* const module_;
constexpr static bool is_sync_ = true;
std::shared_ptr<Counters> counters_shared_;
Counters* counters_;
ErrorThrower* thrower_;
Handle<WasmModuleObject> module_object_;
Handle<JSReceiver> ffi_; // TODO(titzer): Use MaybeHandle
......@@ -1767,7 +1768,7 @@ class InstantiationHelper {
return -1;
}
code_table->set(num_imported_functions, *import_wrapper);
RecordStats(isolate_, *import_wrapper, is_sync_);
RecordStats(*import_wrapper, counters_);
num_imported_functions++;
break;
}
......@@ -2740,6 +2741,7 @@ class AsyncCompileJob {
size_t length, Handle<Context> context,
Handle<JSPromise> promise)
: isolate_(isolate),
counters_shared_(isolate->counters_shared()),
bytes_copy_(std::move(bytes_copy)),
wire_bytes_(bytes_copy_.get(), bytes_copy_.get() + length) {
// The handles for the context and promise must be deferred.
......@@ -2747,6 +2749,7 @@ class AsyncCompileJob {
context_ = Handle<Context>(*context);
module_promise_ = Handle<JSPromise>(*promise);
deferred_handles_.push_back(deferred.Detach());
counters_ = counters_shared_.get();
}
void Start() {
......@@ -2759,6 +2762,8 @@ class AsyncCompileJob {
private:
Isolate* isolate_;
std::shared_ptr<Counters> counters_shared_;
Counters* counters_;
std::unique_ptr<byte[]> bytes_copy_;
ModuleWireBytes wire_bytes_;
Handle<Context> context_;
......@@ -3125,11 +3130,10 @@ class AsyncCompileJob {
TRACE_COMPILE("(5b) Finish compile...\n");
HandleScope scope(job_->isolate_);
// At this point, compilation has completed. Update the code table.
constexpr bool is_sync = true;
for (size_t i = FLAG_skip_compiling_wasm_funcs;
i < job_->temp_instance_->function_code.size(); ++i) {
Code* code = Code::cast(job_->code_table_->get(static_cast<int>(i)));
RecordStats(job_->isolate_, code, !is_sync);
RecordStats(code, job_->counters_);
}
// Create heap objects for script and module bytes to be stored in the
......@@ -3195,7 +3199,6 @@ class AsyncCompileJob {
HandleScope scope(job_->isolate_);
JSToWasmWrapperCache js_to_wasm_cache;
int func_index = 0;
constexpr bool is_sync = true;
WasmModule* module = job_->compiled_module_->module();
for (auto exp : module->export_table) {
if (exp.kind != kExternalFunction) continue;
......@@ -3207,7 +3210,7 @@ class AsyncCompileJob {
int export_index =
static_cast<int>(module->functions.size() + func_index);
job_->code_table_->set(export_index, *wrapper_code);
RecordStats(job_->isolate_, *wrapper_code, !is_sync);
RecordStats(*wrapper_code, job_->counters_);
func_index++;
}
......@@ -3331,7 +3334,8 @@ Handle<Code> wasm::CompileLazy(Isolate* isolate) {
}
void LazyCompilationOrchestrator::CompileFunction(
Isolate* isolate, Handle<WasmInstanceObject> instance, int func_index) {
Isolate* isolate, Handle<WasmInstanceObject> instance, int func_index,
Counters* counters) {
Handle<WasmCompiledModule> compiled_module(instance->compiled_module(),
isolate);
if (Code::cast(compiled_module->code_table()->get(func_index))->kind() ==
......@@ -3419,7 +3423,7 @@ void LazyCompilationOrchestrator::CompileFunction(
code_specialization.ApplyToWasmCode(*code, SKIP_ICACHE_FLUSH);
Assembler::FlushICache(isolate, code->instruction_start(),
code->instruction_size());
RecordLazyCodeStats(isolate, *code);
RecordLazyCodeStats(*code, counters);
}
Handle<Code> LazyCompilationOrchestrator::CompileLazy(
......@@ -3429,6 +3433,8 @@ Handle<Code> LazyCompilationOrchestrator::CompileLazy(
int offset;
int func_index;
};
std::shared_ptr<Counters> counters_shared = isolate->counters_shared();
Counters* counters = counters_shared.get();
std::vector<NonCompiledFunction> non_compiled_functions;
int func_to_return_idx = exported_func_index;
wasm::Decoder decoder(nullptr, nullptr);
......@@ -3473,7 +3479,7 @@ Handle<Code> LazyCompilationOrchestrator::CompileLazy(
// TODO(clemensh): compile all functions in non_compiled_functions in
// background, wait for func_to_return_idx.
CompileFunction(isolate, instance, func_to_return_idx);
CompileFunction(isolate, instance, func_to_return_idx, counters);
if (is_js_to_wasm || patch_caller) {
DisallowHeapAllocation no_gc;
......
......@@ -501,7 +501,8 @@ Handle<Code> CompileLazy(Isolate* isolate);
// logic to actually orchestrate parallel execution of wasm compilation jobs.
// TODO(clemensh): Implement concurrent lazy compilation.
class LazyCompilationOrchestrator {
void CompileFunction(Isolate*, Handle<WasmInstanceObject>, int func_index);
void CompileFunction(Isolate*, Handle<WasmInstanceObject>, int func_index,
Counters* counters);
public:
Handle<Code> CompileLazy(Isolate*, Handle<WasmInstanceObject>,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment