Commit 93716b9e authored by Pierre Langlois's avatar Pierre Langlois Committed by Commit Bot

[snapshot] Add support for native counters.

Counters in generated code, as enabled with --native-code-counters, do not work
in the snapshot. This adds a `v8_enable_snapshot_code_counters` build option
enabled by defaut in debug mode that allows code from the snapshot to increment
the current isolate's set of counters.

For this to work, we need to add native code counters in the external reference
table.

To keep the no snapshot configuration similar, we've also enabled the
--native-code-counters flag by default for debug builds.

Change-Id: I4478b79858c9b04f57e06e7ec67449e9e3a76f53
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1528998
Commit-Queue: Pierre Langlois <pierre.langlois@arm.com>
Reviewed-by: 's avatarPeter Marshall <petermarshall@chromium.org>
Reviewed-by: 's avatarSigurd Schneider <sigurds@chromium.org>
Cr-Commit-Position: refs/heads/master@{#60495}
parent 766edfc8
......@@ -91,6 +91,12 @@ declare_args() {
# Enable code comments for builtins in the snapshot (impacts performance).
v8_enable_snapshot_code_comments = false
# Enable native counters from the snapshot (impacts performance, sets
# -dV8_SNAPSHOT_NATIVE_CODE_COUNTERS).
# This option will generate extra code in the snapshot to increment counters,
# as per the --native-code-counters flag.
v8_enable_snapshot_native_code_counters = ""
# Enable code-generation-time checking of types in the CodeStubAssembler.
v8_enable_verify_csa = false
......@@ -208,6 +214,9 @@ if (v8_check_microtasks_scopes_consistency == "") {
v8_check_microtasks_scopes_consistency =
v8_enable_debugging_features || dcheck_always_on
}
if (v8_enable_snapshot_native_code_counters == "") {
v8_enable_snapshot_native_code_counters = v8_enable_debugging_features
}
assert(v8_current_cpu != "x86" || !v8_untrusted_code_mitigations,
"Untrusted code mitigations are unsupported on ia32")
......@@ -394,6 +403,9 @@ config("features") {
}
if (v8_use_snapshot) {
defines += [ "V8_USE_SNAPSHOT" ]
if (v8_enable_snapshot_native_code_counters) {
defines += [ "V8_SNAPSHOT_NATIVE_CODE_COUNTERS" ]
}
}
if (v8_use_external_startup_data) {
defines += [ "V8_USE_EXTERNAL_STARTUP_DATA" ]
......@@ -1180,6 +1192,14 @@ template("run_mksnapshot") {
args += [ "--code-comments" ]
}
if (v8_enable_snapshot_native_code_counters) {
args += [ "--native-code-counters" ]
} else {
# --native-code-counters is the default in debug mode so make sure we can
# unset it.
args += [ "--no-native-code-counters" ]
}
if (v8_enable_fast_mksnapshot) {
args += [
"--no-turbo-rewrite-far-jumps",
......@@ -1998,6 +2018,7 @@ v8_source_set("v8_base") {
"src/conversions-inl.h",
"src/conversions.cc",
"src/conversions.h",
"src/counters-definitions.h",
"src/counters-inl.h",
"src/counters.cc",
"src/counters.h",
......
......@@ -2684,6 +2684,9 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
Register scratch1, Register scratch2) {
DCHECK_NE(value, 0);
if (FLAG_native_code_counters && counter->Enabled()) {
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
Mov(scratch2, ExternalReference::Create(counter));
Ldr(scratch1.W(), MemOperand(scratch2));
Add(scratch1.W(), scratch1.W(), value);
......
......@@ -8143,6 +8143,9 @@ void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
if (FLAG_native_code_counters && counter->Enabled()) {
Node* counter_address =
ExternalConstant(ExternalReference::Create(counter));
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
Node* value = Load(MachineType::Int32(), counter_address);
value = Int32Add(value, Int32Constant(delta));
StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
......@@ -8154,6 +8157,9 @@ void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
if (FLAG_native_code_counters && counter->Enabled()) {
Node* counter_address =
ExternalConstant(ExternalReference::Create(counter));
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
Node* value = Load(MachineType::Int32(), counter_address);
value = Int32Sub(value, Int32Constant(delta));
StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
......
This diff is collapsed.
......@@ -228,7 +228,9 @@ Counters::Counters(Isolate* isolate)
const char* caption;
} kStatsCounters[] = {
#define SC(name, caption) {&Counters::name##_, "c:" #caption},
STATS_COUNTER_LIST_1(SC) STATS_COUNTER_LIST_2(SC)
STATS_COUNTER_LIST_1(SC)
STATS_COUNTER_LIST_2(SC)
STATS_COUNTER_NATIVE_CODE_LIST(SC)
#undef SC
#define SC(name) \
{&Counters::count_of_##name##_, "c:" "V8.CountOf_" #name}, \
......@@ -262,10 +264,8 @@ void Counters::ResetCounterFunction(CounterLookupCallback f) {
#define SC(name, caption) name##_.Reset();
STATS_COUNTER_LIST_1(SC)
STATS_COUNTER_LIST_2(SC)
#undef SC
#define SC(name, caption) name##_.Reset();
STATS_COUNTER_TS_LIST(SC)
STATS_COUNTER_NATIVE_CODE_LIST(SC)
#undef SC
#define SC(name) \
......
This diff is collapsed.
......@@ -9,6 +9,7 @@
#include <algorithm>
#include <fstream>
#include <iomanip>
#include <unordered_map>
#include <utility>
#include <vector>
......@@ -1978,16 +1979,6 @@ Local<Context> Shell::CreateEvaluationContext(Isolate* isolate) {
return handle_scope.Escape(context);
}
struct CounterAndKey {
Counter* counter;
const char* key;
};
inline bool operator<(const CounterAndKey& lhs, const CounterAndKey& rhs) {
return strcmp(lhs.key, rhs.key) < 0;
}
void Shell::WriteIgnitionDispatchCountersFile(v8::Isolate* isolate) {
HandleScope handle_scope(isolate);
Local<Context> context = Context::New(isolate);
......@@ -2101,54 +2092,52 @@ void Shell::OnExit(v8::Isolate* isolate) {
isolate->Dispose();
if (i::FLAG_dump_counters || i::FLAG_dump_counters_nvp) {
const int number_of_counters = static_cast<int>(counter_map_->size());
CounterAndKey* counters = new CounterAndKey[number_of_counters];
int j = 0;
for (auto map_entry : *counter_map_) {
counters[j].counter = map_entry.second;
counters[j].key = map_entry.first;
j++;
}
std::sort(counters, counters + number_of_counters);
std::vector<std::pair<std::string, Counter*>> counters(
counter_map_->begin(), counter_map_->end());
std::sort(counters.begin(), counters.end());
if (i::FLAG_dump_counters_nvp) {
// Dump counters as name-value pairs.
for (j = 0; j < number_of_counters; j++) {
Counter* counter = counters[j].counter;
const char* key = counters[j].key;
for (auto pair : counters) {
std::string key = pair.first;
Counter* counter = pair.second;
if (counter->is_histogram()) {
printf("\"c:%s\"=%i\n", key, counter->count());
printf("\"t:%s\"=%i\n", key, counter->sample_total());
std::cout << "\"c:" << key << "\"=" << counter->count() << "\n";
std::cout << "\"t:" << key << "\"=" << counter->sample_total()
<< "\n";
} else {
printf("\"%s\"=%i\n", key, counter->count());
std::cout << "\"" << key << "\"=" << counter->count() << "\n";
}
}
} else {
// Dump counters in formatted boxes.
printf(
"+----------------------------------------------------------------+"
"-------------+\n");
printf(
"| Name |"
" Value |\n");
printf(
"+----------------------------------------------------------------+"
"-------------+\n");
for (j = 0; j < number_of_counters; j++) {
Counter* counter = counters[j].counter;
const char* key = counters[j].key;
constexpr int kNameBoxSize = 64;
constexpr int kValueBoxSize = 13;
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
std::cout << "| Name" << std::string(kNameBoxSize - 5, ' ') << "| Value"
<< std::string(kValueBoxSize - 6, ' ') << "|\n";
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
for (auto pair : counters) {
std::string key = pair.first;
Counter* counter = pair.second;
if (counter->is_histogram()) {
printf("| c:%-60s | %11i |\n", key, counter->count());
printf("| t:%-60s | %11i |\n", key, counter->sample_total());
std::cout << "| c:" << std::setw(kNameBoxSize - 4) << std::left << key
<< " | " << std::setw(kValueBoxSize - 2) << std::right
<< counter->count() << " |\n";
std::cout << "| t:" << std::setw(kNameBoxSize - 4) << std::left << key
<< " | " << std::setw(kValueBoxSize - 2) << std::right
<< counter->sample_total() << " |\n";
} else {
printf("| %-62s | %11i |\n", key, counter->count());
std::cout << "| " << std::setw(kNameBoxSize - 2) << std::left << key
<< " | " << std::setw(kValueBoxSize - 2) << std::right
<< counter->count() << " |\n";
}
}
printf(
"+----------------------------------------------------------------+"
"-------------+\n");
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
}
delete [] counters;
}
delete counters_file_;
......
......@@ -59,21 +59,7 @@ class CounterCollection {
Counter counters_[kMaxCounters];
};
struct CStringHasher {
std::size_t operator()(const char* name) const {
size_t h = 0;
size_t c;
while ((c = *name++) != 0) {
h += h << 5;
h += c;
}
return h;
}
};
typedef std::unordered_map<const char*, Counter*, CStringHasher,
i::StringEquals>
CounterMap;
typedef std::unordered_map<std::string, Counter*> CounterMap;
class SourceGroup {
public:
......
......@@ -76,14 +76,15 @@ void StatisticsExtension::GetCounters(
v8::internal::StatsCounter* counter;
const char* name;
};
// clang-format off
const StatisticsCounter counter_list[] = {
#define ADD_COUNTER(name, caption) \
{ counters->name(), #name } \
,
STATS_COUNTER_LIST_1(ADD_COUNTER) STATS_COUNTER_LIST_2(ADD_COUNTER)
#define ADD_COUNTER(name, caption) {counters->name(), #name},
STATS_COUNTER_LIST_1(ADD_COUNTER)
STATS_COUNTER_LIST_2(ADD_COUNTER)
STATS_COUNTER_NATIVE_CODE_LIST(ADD_COUNTER)
#undef ADD_COUNTER
}; // End counter_list array.
// clang-format on
for (size_t i = 0; i < arraysize(counter_list); i++) {
AddCounter(args.GetIsolate(), result, counter_list[i].counter,
......
......@@ -25,14 +25,16 @@ namespace internal {
#define ADD_ACCESSOR_INFO_NAME(_, __, AccessorName, ...) \
"Accessors::" #AccessorName "Getter",
#define ADD_ACCESSOR_SETTER_NAME(name) "Accessors::" #name,
#define ADD_STATS_COUNTER_NAME(name, ...) "StatsCounter::" #name,
// static
// clang-format off
const char* const
ExternalReferenceTable::ref_name_[ExternalReferenceTable::kSize] = {
// Special references:
"nullptr",
// External references:
EXTERNAL_REFERENCE_LIST(ADD_EXT_REF_NAME)
EXTERNAL_REFERENCE_LIST_WITH_ISOLATE(ADD_EXT_REF_NAME)
EXTERNAL_REFERENCE_LIST_WITH_ISOLATE(ADD_EXT_REF_NAME)
// Builtins:
BUILTIN_LIST_C(ADD_BUILTIN_NAME)
// Runtime functions:
......@@ -41,7 +43,7 @@ const char* const
FOR_EACH_ISOLATE_ADDRESS_NAME(ADD_ISOLATE_ADDR)
// Accessors:
ACCESSOR_INFO_LIST_GENERATOR(ADD_ACCESSOR_INFO_NAME, /* not used */)
ACCESSOR_SETTER_LIST(ADD_ACCESSOR_SETTER_NAME)
ACCESSOR_SETTER_LIST(ADD_ACCESSOR_SETTER_NAME)
// Stub cache:
"Load StubCache::primary_->key",
"Load StubCache::primary_->value",
......@@ -55,13 +57,17 @@ const char* const
"Store StubCache::secondary_->key",
"Store StubCache::secondary_->value",
"Store StubCache::secondary_->map",
// Native code counters:
STATS_COUNTER_NATIVE_CODE_LIST(ADD_STATS_COUNTER_NAME)
};
// clang-format on
#undef ADD_EXT_REF_NAME
#undef ADD_BUILTIN_NAME
#undef ADD_RUNTIME_FUNCTION
#undef ADD_ISOLATE_ADDR
#undef ADD_ACCESSOR_INFO_NAME
#undef ADD_ACCESSOR_SETTER_NAME
#undef ADD_STATS_COUNTER_NAME
// Forward declarations for C++ builtins.
#define FORWARD_DECLARE(Name) \
......@@ -80,8 +86,8 @@ void ExternalReferenceTable::Init(Isolate* isolate) {
AddIsolateAddresses(isolate, &index);
AddAccessors(&index);
AddStubCache(isolate, &index);
AddNativeCodeStatsCounters(isolate, &index);
is_initialized_ = static_cast<uint32_t>(true);
USE(unused_padding_);
CHECK_EQ(kSize, index);
}
......@@ -231,6 +237,34 @@ void ExternalReferenceTable::AddStubCache(Isolate* isolate, int* index) {
kIsolateAddressReferenceCount + kAccessorReferenceCount +
kStubCacheReferenceCount,
*index);
}
Address ExternalReferenceTable::GetStatsCounterAddress(StatsCounter* counter) {
int* address = counter->Enabled()
? counter->GetInternalPointer()
: reinterpret_cast<int*>(&dummy_stats_counter_);
return reinterpret_cast<Address>(address);
}
void ExternalReferenceTable::AddNativeCodeStatsCounters(Isolate* isolate,
int* index) {
CHECK_EQ(kSpecialReferenceCount + kExternalReferenceCount +
kBuiltinsReferenceCount + kRuntimeReferenceCount +
kIsolateAddressReferenceCount + kAccessorReferenceCount +
kStubCacheReferenceCount,
*index);
Counters* counters = isolate->counters();
#define SC(name, caption) Add(GetStatsCounterAddress(counters->name()), index);
STATS_COUNTER_NATIVE_CODE_LIST(SC)
#undef SC
CHECK_EQ(kSpecialReferenceCount + kExternalReferenceCount +
kBuiltinsReferenceCount + kRuntimeReferenceCount +
kIsolateAddressReferenceCount + kAccessorReferenceCount +
kStubCacheReferenceCount + kStatsCountersReferenceCount,
*index);
CHECK_EQ(kSize, *index);
}
......
......@@ -9,6 +9,7 @@
#include "src/accessors.h"
#include "src/builtins/builtins.h"
#include "src/counters-definitions.h"
#include "src/external-reference.h"
namespace v8 {
......@@ -37,11 +38,15 @@ class ExternalReferenceTable {
Accessors::kAccessorInfoCount + Accessors::kAccessorSetterCount;
// The number of stub cache external references, see AddStubCache.
static constexpr int kStubCacheReferenceCount = 12;
static constexpr int kStatsCountersReferenceCount =
#define SC(...) +1
STATS_COUNTER_NATIVE_CODE_LIST(SC);
#undef SC
static constexpr int kSize =
kSpecialReferenceCount + kExternalReferenceCount +
kBuiltinsReferenceCount + kRuntimeReferenceCount +
kIsolateAddressReferenceCount + kAccessorReferenceCount +
kStubCacheReferenceCount;
kStubCacheReferenceCount + kStatsCountersReferenceCount;
static constexpr uint32_t kEntrySize =
static_cast<uint32_t>(kSystemPointerSize);
static constexpr uint32_t kSizeInBytes = kSize * kEntrySize + 2 * kUInt32Size;
......@@ -78,12 +83,22 @@ class ExternalReferenceTable {
void AddAccessors(int* index);
void AddStubCache(Isolate* isolate, int* index);
Address GetStatsCounterAddress(StatsCounter* counter);
void AddNativeCodeStatsCounters(Isolate* isolate, int* index);
STATIC_ASSERT(sizeof(Address) == kEntrySize);
Address ref_addr_[kSize];
static const char* const ref_name_[kSize];
uint32_t is_initialized_ = 0; // Not bool to guarantee deterministic size.
uint32_t unused_padding_ = 0; // For alignment.
// Not bool to guarantee deterministic size.
uint32_t is_initialized_ = 0;
// Redirect disabled stats counters to this field. This is done to make sure
// we can have a snapshot that includes native counters even when the embedder
// isn't collecting them.
// This field is uint32_t since the MacroAssembler and CodeStubAssembler
// accesses this field as a uint32_t.
uint32_t dummy_stats_counter_ = 0;
DISALLOW_COPY_AND_ASSIGN(ExternalReferenceTable);
};
......
......@@ -1047,8 +1047,7 @@ DEFINE_BOOL_READONLY(fast_map_update, false,
DEFINE_INT(max_polymorphic_map_count, 4,
"maximum number of maps to track in POLYMORPHIC state")
// macro-assembler-ia32.cc
DEFINE_BOOL(native_code_counters, false,
DEFINE_BOOL(native_code_counters, DEBUG_BOOL,
"generate extra code for manipulating stats counters")
// objects.cc
......
......@@ -5025,6 +5025,9 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
Register scratch1, Register scratch2) {
DCHECK_GT(value, 0);
if (FLAG_native_code_counters && counter->Enabled()) {
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
li(scratch2, ExternalReference::Create(counter));
Lw(scratch1, MemOperand(scratch2));
Addu(scratch1, scratch1, Operand(value));
......@@ -5037,6 +5040,9 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
Register scratch1, Register scratch2) {
DCHECK_GT(value, 0);
if (FLAG_native_code_counters && counter->Enabled()) {
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
li(scratch2, ExternalReference::Create(counter));
Lw(scratch1, MemOperand(scratch2));
Subu(scratch1, scratch1, Operand(value));
......
......@@ -1714,6 +1714,9 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
Register scratch1, Register scratch2) {
DCHECK_GT(value, 0);
if (FLAG_native_code_counters && counter->Enabled()) {
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
Move(scratch2, ExternalReference::Create(counter));
lwz(scratch1, MemOperand(scratch2));
addi(scratch1, scratch1, Operand(value));
......@@ -1726,6 +1729,9 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
Register scratch1, Register scratch2) {
DCHECK_GT(value, 0);
if (FLAG_native_code_counters && counter->Enabled()) {
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
Move(scratch2, ExternalReference::Create(counter));
lwz(scratch1, MemOperand(scratch2));
subi(scratch1, scratch1, Operand(value));
......
......@@ -5,6 +5,7 @@
#include <errno.h>
#include <signal.h>
#include <stdio.h>
#include <iomanip>
#include "include/libplatform/libplatform.h"
#include "src/assembler-arch.h"
......@@ -246,6 +247,9 @@ void WriteEmbeddedFile(i::EmbeddedFileWriter* writer) {
}
} // namespace
typedef std::map<std::string, int> CounterMap;
CounterMap* counter_map_;
int main(int argc, char** argv) {
v8::base::EnsureConsoleOutput();
......@@ -287,7 +291,20 @@ int main(int argc, char** argv) {
i::DisableEmbeddedBlobRefcounting();
v8::StartupData blob;
{
counter_map_ = new CounterMap();
v8::Isolate* isolate = v8::Isolate::Allocate();
// If --native-code-counters is on then we enable all counters to make
// sure we generate code to increment them from the snapshot.
if (i::FLAG_native_code_counters || i::FLAG_dump_counters ||
i::FLAG_dump_counters_nvp) {
isolate->SetCounterFunction([](const char* name) -> int* {
auto map_entry = counter_map_->find(name);
if (map_entry == counter_map_->end()) {
counter_map_->emplace(name, 0);
}
return &counter_map_->at(name);
});
}
if (i::FLAG_embedded_builtins) {
// Set code range such that relative jumps for builtins to
// builtin calls in the snapshot are possible.
......@@ -307,6 +324,37 @@ int main(int argc, char** argv) {
WriteEmbeddedFile(&embedded_writer);
}
blob = CreateSnapshotDataBlob(&snapshot_creator, embed_script.get());
if (i::FLAG_dump_counters || i::FLAG_dump_counters_nvp) {
if (i::FLAG_dump_counters_nvp) {
// Dump counters as name-value pairs.
for (auto entry : *counter_map_) {
std::string key = entry.first;
int counter = entry.second;
std::cout << "\"" << key << "\"=" << counter << "\n";
}
} else {
// Dump counters in formatted boxes.
constexpr int kNameBoxSize = 64;
constexpr int kValueBoxSize = 13;
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
std::cout << "| Name" << std::string(kNameBoxSize - 5, ' ')
<< "| Value" << std::string(kValueBoxSize - 6, ' ')
<< "|\n";
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
for (auto entry : *counter_map_) {
std::string key = entry.first;
int counter = entry.second;
std::cout << "| " << std::setw(kNameBoxSize - 2) << std::left << key
<< " | " << std::setw(kValueBoxSize - 2) << std::right
<< counter << " |\n";
}
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
}
}
delete counter_map_;
}
if (warmup_script) {
......
......@@ -2119,6 +2119,9 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
if (FLAG_native_code_counters && counter->Enabled()) {
Operand counter_operand =
ExternalReferenceAsOperand(ExternalReference::Create(counter));
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
if (value == 1) {
incl(counter_operand);
} else {
......@@ -2133,6 +2136,9 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
if (FLAG_native_code_counters && counter->Enabled()) {
Operand counter_operand =
ExternalReferenceAsOperand(ExternalReference::Create(counter));
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
if (value == 1) {
decl(counter_operand);
} else {
......
......@@ -148,6 +148,38 @@ class NativeTimeScope {
}
};
class SnapshotNativeCounterTest : public TestWithNativeContextAndCounters {
public:
SnapshotNativeCounterTest() {}
bool SupportsNativeCounters() const {
#ifdef V8_USE_SNAPSHOT
#ifdef V8_SNAPSHOT_NATIVE_CODE_COUNTERS
return true;
#else
return false;
#endif // V8_SNAPSHOT_NATIVE_CODE_COUNTERS
#else
// If we do not have a snapshot then we rely on the runtime option.
return internal::FLAG_native_code_counters;
#endif // V8_USE_SNAPSHOT
}
#define SC(name, caption) \
int name() { \
CHECK(isolate()->counters()->name()->Enabled()); \
return *isolate()->counters()->name()->GetInternalPointer(); \
}
STATS_COUNTER_NATIVE_CODE_LIST(SC)
#undef SC
void PrintAll() {
#define SC(name, caption) PrintF(#caption " = %d\n", name());
STATS_COUNTER_NATIVE_CODE_LIST(SC)
#undef SC
}
};
} // namespace
......@@ -765,5 +797,29 @@ TEST_F(RuntimeCallStatsTest, ApiGetter) {
PrintStats();
}
TEST_F(SnapshotNativeCounterTest, StringAddNative) {
RunJS("let s = 'hello, ' + 'world!'");
if (SupportsNativeCounters()) {
EXPECT_NE(0, string_add_native());
} else {
EXPECT_EQ(0, string_add_native());
}
PrintAll();
}
TEST_F(SnapshotNativeCounterTest, SubStringNative) {
RunJS("'hello, world!'.substring(6);");
if (SupportsNativeCounters()) {
EXPECT_NE(0, sub_string_native());
} else {
EXPECT_EQ(0, sub_string_native());
}
PrintAll();
}
} // namespace internal
} // namespace v8
......@@ -15,11 +15,13 @@
namespace v8 {
IsolateWrapper::IsolateWrapper(bool enforce_pointer_compression)
IsolateWrapper::IsolateWrapper(CounterLookupCallback counter_lookup_callback,
bool enforce_pointer_compression)
: array_buffer_allocator_(
v8::ArrayBuffer::Allocator::NewDefaultAllocator()) {
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = array_buffer_allocator_;
create_params.counter_lookup_callback = counter_lookup_callback;
if (enforce_pointer_compression) {
isolate_ = reinterpret_cast<v8::Isolate*>(
i::Isolate::New(i::IsolateAllocationMode::kInV8Heap));
......@@ -41,6 +43,22 @@ IsolateWrapper::~IsolateWrapper() {
// static
v8::IsolateWrapper* SharedIsolateHolder::isolate_wrapper_ = nullptr;
// static
int* SharedIsolateAndCountersHolder::LookupCounter(const char* name) {
DCHECK_NOT_NULL(counter_map_);
auto map_entry = counter_map_->find(name);
if (map_entry == counter_map_->end()) {
counter_map_->emplace(name, 0);
}
return &counter_map_->at(name);
}
// static
v8::IsolateWrapper* SharedIsolateAndCountersHolder::isolate_wrapper_ = nullptr;
// static
CounterMap* SharedIsolateAndCountersHolder::counter_map_ = nullptr;
namespace internal {
SaveFlags::SaveFlags() {
......
......@@ -22,13 +22,16 @@ namespace v8 {
class ArrayBufferAllocator;
typedef std::map<std::string, int> CounterMap;
// RAII-like Isolate instance wrapper.
class IsolateWrapper final {
public:
// When enforce_pointer_compression is true the Isolate is created with
// enabled pointer compression. When it's false then the Isolate is created
// with the default pointer compression state for current build.
explicit IsolateWrapper(bool enforce_pointer_compression = false);
explicit IsolateWrapper(CounterLookupCallback counter_lookup_callback,
bool enforce_pointer_compression = false);
~IsolateWrapper();
v8::Isolate* isolate() const { return isolate_; }
......@@ -46,7 +49,8 @@ class SharedIsolateHolder final {
static void CreateIsolate() {
CHECK_NULL(isolate_wrapper_);
isolate_wrapper_ = new IsolateWrapper();
isolate_wrapper_ =
new IsolateWrapper([](const char* name) -> int* { return nullptr; });
}
static void DeleteIsolate() {
......@@ -61,6 +65,34 @@ class SharedIsolateHolder final {
DISALLOW_IMPLICIT_CONSTRUCTORS(SharedIsolateHolder);
};
class SharedIsolateAndCountersHolder final {
public:
static v8::Isolate* isolate() { return isolate_wrapper_->isolate(); }
static void CreateIsolate() {
CHECK_NULL(counter_map_);
CHECK_NULL(isolate_wrapper_);
counter_map_ = new CounterMap();
isolate_wrapper_ = new IsolateWrapper(LookupCounter);
}
static void DeleteIsolate() {
CHECK_NOT_NULL(counter_map_);
CHECK_NOT_NULL(isolate_wrapper_);
delete isolate_wrapper_;
isolate_wrapper_ = nullptr;
delete counter_map_;
counter_map_ = nullptr;
}
private:
static int* LookupCounter(const char* name);
static CounterMap* counter_map_;
static v8::IsolateWrapper* isolate_wrapper_;
DISALLOW_IMPLICIT_CONSTRUCTORS(SharedIsolateAndCountersHolder);
};
//
// A set of mixins from which the test fixtures will be constructed.
//
......@@ -68,7 +100,8 @@ template <typename TMixin>
class WithPrivateIsolateMixin : public TMixin {
public:
explicit WithPrivateIsolateMixin(bool enforce_pointer_compression = false)
: isolate_wrapper_(enforce_pointer_compression) {}
: isolate_wrapper_([](const char* name) -> int* { return nullptr; },
enforce_pointer_compression) {}
v8::Isolate* v8_isolate() const { return isolate_wrapper_.isolate(); }
......@@ -81,20 +114,20 @@ class WithPrivateIsolateMixin : public TMixin {
DISALLOW_COPY_AND_ASSIGN(WithPrivateIsolateMixin);
};
template <typename TMixin>
template <typename TMixin, typename TSharedIsolateHolder = SharedIsolateHolder>
class WithSharedIsolateMixin : public TMixin {
public:
WithSharedIsolateMixin() = default;
v8::Isolate* v8_isolate() const { return SharedIsolateHolder::isolate(); }
v8::Isolate* v8_isolate() const { return TSharedIsolateHolder::isolate(); }
static void SetUpTestCase() {
TMixin::SetUpTestCase();
SharedIsolateHolder::CreateIsolate();
TSharedIsolateHolder::CreateIsolate();
}
static void TearDownTestCase() {
SharedIsolateHolder::DeleteIsolate();
TSharedIsolateHolder::DeleteIsolate();
TMixin::TearDownTestCase();
}
......@@ -295,6 +328,14 @@ using TestWithNativeContext = //
WithSharedIsolateMixin< //
::testing::Test>>>>;
using TestWithNativeContextAndCounters = //
WithInternalIsolateMixin< //
WithContextMixin< //
WithIsolateScopeMixin< //
WithSharedIsolateMixin< //
::testing::Test, //
SharedIsolateAndCountersHolder>>>>;
using TestWithNativeContextAndZone = //
WithZoneMixin< //
WithInternalIsolateMixin< //
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment