Commit 519bf695 authored by Michael Achenbach's avatar Michael Achenbach Committed by Commit Bot

Revert "[snapshot] Add support for native counters."

This reverts commit 93716b9e.

Reason for revert: Breaks asan debug:
https://ci.chromium.org/p/v8/builders/ci/V8%20Clusterfuzz%20Mac64%20ASAN%20-%20debug%20builder/7872
https://ci.chromium.org/p/v8/builders/ci/V8%20Clusterfuzz%20Linux64%20ASAN%20-%20debug%20builder/7874

Original change's description:
> [snapshot] Add support for native counters.
> 
> Counters in generated code, as enabled with --native-code-counters, do not work
> in the snapshot. This adds a `v8_enable_snapshot_code_counters` build option
> enabled by defaut in debug mode that allows code from the snapshot to increment
> the current isolate's set of counters.
> 
> For this to work, we need to add native code counters in the external reference
> table.
> 
> To keep the no snapshot configuration similar, we've also enabled the
> --native-code-counters flag by default for debug builds.
> 
> Change-Id: I4478b79858c9b04f57e06e7ec67449e9e3a76f53
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1528998
> Commit-Queue: Pierre Langlois <pierre.langlois@arm.com>
> Reviewed-by: Peter Marshall <petermarshall@chromium.org>
> Reviewed-by: Sigurd Schneider <sigurds@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#60495}

TBR=sigurds@chromium.org,jgruber@chromium.org,petermarshall@chromium.org,pierre.langlois@arm.com

Change-Id: I93f1ed714e3dcd309f3100685e4bd282db471d46
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1543209Reviewed-by: 's avatarMichael Achenbach <machenbach@chromium.org>
Commit-Queue: Michael Achenbach <machenbach@chromium.org>
Cr-Commit-Position: refs/heads/master@{#60500}
parent 39bfa157
......@@ -91,12 +91,6 @@ declare_args() {
# Enable code comments for builtins in the snapshot (impacts performance).
v8_enable_snapshot_code_comments = false
# Enable native counters from the snapshot (impacts performance, sets
# -dV8_SNAPSHOT_NATIVE_CODE_COUNTERS).
# This option will generate extra code in the snapshot to increment counters,
# as per the --native-code-counters flag.
v8_enable_snapshot_native_code_counters = ""
# Enable code-generation-time checking of types in the CodeStubAssembler.
v8_enable_verify_csa = false
......@@ -214,9 +208,6 @@ if (v8_check_microtasks_scopes_consistency == "") {
v8_check_microtasks_scopes_consistency =
v8_enable_debugging_features || dcheck_always_on
}
if (v8_enable_snapshot_native_code_counters == "") {
v8_enable_snapshot_native_code_counters = v8_enable_debugging_features
}
assert(v8_current_cpu != "x86" || !v8_untrusted_code_mitigations,
"Untrusted code mitigations are unsupported on ia32")
......@@ -403,9 +394,6 @@ config("features") {
}
if (v8_use_snapshot) {
defines += [ "V8_USE_SNAPSHOT" ]
if (v8_enable_snapshot_native_code_counters) {
defines += [ "V8_SNAPSHOT_NATIVE_CODE_COUNTERS" ]
}
}
if (v8_use_external_startup_data) {
defines += [ "V8_USE_EXTERNAL_STARTUP_DATA" ]
......@@ -1192,14 +1180,6 @@ template("run_mksnapshot") {
args += [ "--code-comments" ]
}
if (v8_enable_snapshot_native_code_counters) {
args += [ "--native-code-counters" ]
} else {
# --native-code-counters is the default in debug mode so make sure we can
# unset it.
args += [ "--no-native-code-counters" ]
}
if (v8_enable_fast_mksnapshot) {
args += [
"--no-turbo-rewrite-far-jumps",
......@@ -2018,7 +1998,6 @@ v8_source_set("v8_base") {
"src/conversions-inl.h",
"src/conversions.cc",
"src/conversions.h",
"src/counters-definitions.h",
"src/counters-inl.h",
"src/counters.cc",
"src/counters.h",
......
......@@ -2684,9 +2684,6 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
Register scratch1, Register scratch2) {
DCHECK_NE(value, 0);
if (FLAG_native_code_counters && counter->Enabled()) {
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
Mov(scratch2, ExternalReference::Create(counter));
Ldr(scratch1.W(), MemOperand(scratch2));
Add(scratch1.W(), scratch1.W(), value);
......
......@@ -8143,9 +8143,6 @@ void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
if (FLAG_native_code_counters && counter->Enabled()) {
Node* counter_address =
ExternalConstant(ExternalReference::Create(counter));
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
Node* value = Load(MachineType::Int32(), counter_address);
value = Int32Add(value, Int32Constant(delta));
StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
......@@ -8157,9 +8154,6 @@ void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
if (FLAG_native_code_counters && counter->Enabled()) {
Node* counter_address =
ExternalConstant(ExternalReference::Create(counter));
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
Node* value = Load(MachineType::Int32(), counter_address);
value = Int32Sub(value, Int32Constant(delta));
StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
......
This diff is collapsed.
......@@ -228,9 +228,7 @@ Counters::Counters(Isolate* isolate)
const char* caption;
} kStatsCounters[] = {
#define SC(name, caption) {&Counters::name##_, "c:" #caption},
STATS_COUNTER_LIST_1(SC)
STATS_COUNTER_LIST_2(SC)
STATS_COUNTER_NATIVE_CODE_LIST(SC)
STATS_COUNTER_LIST_1(SC) STATS_COUNTER_LIST_2(SC)
#undef SC
#define SC(name) \
{&Counters::count_of_##name##_, "c:" "V8.CountOf_" #name}, \
......@@ -264,8 +262,10 @@ void Counters::ResetCounterFunction(CounterLookupCallback f) {
#define SC(name, caption) name##_.Reset();
STATS_COUNTER_LIST_1(SC)
STATS_COUNTER_LIST_2(SC)
#undef SC
#define SC(name, caption) name##_.Reset();
STATS_COUNTER_TS_LIST(SC)
STATS_COUNTER_NATIVE_CODE_LIST(SC)
#undef SC
#define SC(name) \
......
This diff is collapsed.
......@@ -9,7 +9,6 @@
#include <algorithm>
#include <fstream>
#include <iomanip>
#include <unordered_map>
#include <utility>
#include <vector>
......@@ -1979,6 +1978,16 @@ Local<Context> Shell::CreateEvaluationContext(Isolate* isolate) {
return handle_scope.Escape(context);
}
struct CounterAndKey {
Counter* counter;
const char* key;
};
inline bool operator<(const CounterAndKey& lhs, const CounterAndKey& rhs) {
return strcmp(lhs.key, rhs.key) < 0;
}
void Shell::WriteIgnitionDispatchCountersFile(v8::Isolate* isolate) {
HandleScope handle_scope(isolate);
Local<Context> context = Context::New(isolate);
......@@ -2092,52 +2101,54 @@ void Shell::OnExit(v8::Isolate* isolate) {
isolate->Dispose();
if (i::FLAG_dump_counters || i::FLAG_dump_counters_nvp) {
std::vector<std::pair<std::string, Counter*>> counters(
counter_map_->begin(), counter_map_->end());
std::sort(counters.begin(), counters.end());
const int number_of_counters = static_cast<int>(counter_map_->size());
CounterAndKey* counters = new CounterAndKey[number_of_counters];
int j = 0;
for (auto map_entry : *counter_map_) {
counters[j].counter = map_entry.second;
counters[j].key = map_entry.first;
j++;
}
std::sort(counters, counters + number_of_counters);
if (i::FLAG_dump_counters_nvp) {
// Dump counters as name-value pairs.
for (auto pair : counters) {
std::string key = pair.first;
Counter* counter = pair.second;
for (j = 0; j < number_of_counters; j++) {
Counter* counter = counters[j].counter;
const char* key = counters[j].key;
if (counter->is_histogram()) {
std::cout << "\"c:" << key << "\"=" << counter->count() << "\n";
std::cout << "\"t:" << key << "\"=" << counter->sample_total()
<< "\n";
printf("\"c:%s\"=%i\n", key, counter->count());
printf("\"t:%s\"=%i\n", key, counter->sample_total());
} else {
std::cout << "\"" << key << "\"=" << counter->count() << "\n";
printf("\"%s\"=%i\n", key, counter->count());
}
}
} else {
// Dump counters in formatted boxes.
constexpr int kNameBoxSize = 64;
constexpr int kValueBoxSize = 13;
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
std::cout << "| Name" << std::string(kNameBoxSize - 5, ' ') << "| Value"
<< std::string(kValueBoxSize - 6, ' ') << "|\n";
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
for (auto pair : counters) {
std::string key = pair.first;
Counter* counter = pair.second;
printf(
"+----------------------------------------------------------------+"
"-------------+\n");
printf(
"| Name |"
" Value |\n");
printf(
"+----------------------------------------------------------------+"
"-------------+\n");
for (j = 0; j < number_of_counters; j++) {
Counter* counter = counters[j].counter;
const char* key = counters[j].key;
if (counter->is_histogram()) {
std::cout << "| c:" << std::setw(kNameBoxSize - 4) << std::left << key
<< " | " << std::setw(kValueBoxSize - 2) << std::right
<< counter->count() << " |\n";
std::cout << "| t:" << std::setw(kNameBoxSize - 4) << std::left << key
<< " | " << std::setw(kValueBoxSize - 2) << std::right
<< counter->sample_total() << " |\n";
printf("| c:%-60s | %11i |\n", key, counter->count());
printf("| t:%-60s | %11i |\n", key, counter->sample_total());
} else {
std::cout << "| " << std::setw(kNameBoxSize - 2) << std::left << key
<< " | " << std::setw(kValueBoxSize - 2) << std::right
<< counter->count() << " |\n";
printf("| %-62s | %11i |\n", key, counter->count());
}
}
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
printf(
"+----------------------------------------------------------------+"
"-------------+\n");
}
delete [] counters;
}
delete counters_file_;
......
......@@ -59,7 +59,21 @@ class CounterCollection {
Counter counters_[kMaxCounters];
};
typedef std::unordered_map<std::string, Counter*> CounterMap;
struct CStringHasher {
std::size_t operator()(const char* name) const {
size_t h = 0;
size_t c;
while ((c = *name++) != 0) {
h += h << 5;
h += c;
}
return h;
}
};
typedef std::unordered_map<const char*, Counter*, CStringHasher,
i::StringEquals>
CounterMap;
class SourceGroup {
public:
......
......@@ -76,15 +76,14 @@ void StatisticsExtension::GetCounters(
v8::internal::StatsCounter* counter;
const char* name;
};
// clang-format off
const StatisticsCounter counter_list[] = {
#define ADD_COUNTER(name, caption) {counters->name(), #name},
STATS_COUNTER_LIST_1(ADD_COUNTER)
STATS_COUNTER_LIST_2(ADD_COUNTER)
STATS_COUNTER_NATIVE_CODE_LIST(ADD_COUNTER)
#define ADD_COUNTER(name, caption) \
{ counters->name(), #name } \
,
STATS_COUNTER_LIST_1(ADD_COUNTER) STATS_COUNTER_LIST_2(ADD_COUNTER)
#undef ADD_COUNTER
}; // End counter_list array.
// clang-format on
for (size_t i = 0; i < arraysize(counter_list); i++) {
AddCounter(args.GetIsolate(), result, counter_list[i].counter,
......
......@@ -25,16 +25,14 @@ namespace internal {
#define ADD_ACCESSOR_INFO_NAME(_, __, AccessorName, ...) \
"Accessors::" #AccessorName "Getter",
#define ADD_ACCESSOR_SETTER_NAME(name) "Accessors::" #name,
#define ADD_STATS_COUNTER_NAME(name, ...) "StatsCounter::" #name,
// static
// clang-format off
const char* const
ExternalReferenceTable::ref_name_[ExternalReferenceTable::kSize] = {
// Special references:
"nullptr",
// External references:
EXTERNAL_REFERENCE_LIST(ADD_EXT_REF_NAME)
EXTERNAL_REFERENCE_LIST_WITH_ISOLATE(ADD_EXT_REF_NAME)
EXTERNAL_REFERENCE_LIST_WITH_ISOLATE(ADD_EXT_REF_NAME)
// Builtins:
BUILTIN_LIST_C(ADD_BUILTIN_NAME)
// Runtime functions:
......@@ -43,7 +41,7 @@ const char* const
FOR_EACH_ISOLATE_ADDRESS_NAME(ADD_ISOLATE_ADDR)
// Accessors:
ACCESSOR_INFO_LIST_GENERATOR(ADD_ACCESSOR_INFO_NAME, /* not used */)
ACCESSOR_SETTER_LIST(ADD_ACCESSOR_SETTER_NAME)
ACCESSOR_SETTER_LIST(ADD_ACCESSOR_SETTER_NAME)
// Stub cache:
"Load StubCache::primary_->key",
"Load StubCache::primary_->value",
......@@ -57,17 +55,13 @@ const char* const
"Store StubCache::secondary_->key",
"Store StubCache::secondary_->value",
"Store StubCache::secondary_->map",
// Native code counters:
STATS_COUNTER_NATIVE_CODE_LIST(ADD_STATS_COUNTER_NAME)
};
// clang-format on
#undef ADD_EXT_REF_NAME
#undef ADD_BUILTIN_NAME
#undef ADD_RUNTIME_FUNCTION
#undef ADD_ISOLATE_ADDR
#undef ADD_ACCESSOR_INFO_NAME
#undef ADD_ACCESSOR_SETTER_NAME
#undef ADD_STATS_COUNTER_NAME
// Forward declarations for C++ builtins.
#define FORWARD_DECLARE(Name) \
......@@ -86,8 +80,8 @@ void ExternalReferenceTable::Init(Isolate* isolate) {
AddIsolateAddresses(isolate, &index);
AddAccessors(&index);
AddStubCache(isolate, &index);
AddNativeCodeStatsCounters(isolate, &index);
is_initialized_ = static_cast<uint32_t>(true);
USE(unused_padding_);
CHECK_EQ(kSize, index);
}
......@@ -237,34 +231,6 @@ void ExternalReferenceTable::AddStubCache(Isolate* isolate, int* index) {
kIsolateAddressReferenceCount + kAccessorReferenceCount +
kStubCacheReferenceCount,
*index);
}
Address ExternalReferenceTable::GetStatsCounterAddress(StatsCounter* counter) {
int* address = counter->Enabled()
? counter->GetInternalPointer()
: reinterpret_cast<int*>(&dummy_stats_counter_);
return reinterpret_cast<Address>(address);
}
void ExternalReferenceTable::AddNativeCodeStatsCounters(Isolate* isolate,
int* index) {
CHECK_EQ(kSpecialReferenceCount + kExternalReferenceCount +
kBuiltinsReferenceCount + kRuntimeReferenceCount +
kIsolateAddressReferenceCount + kAccessorReferenceCount +
kStubCacheReferenceCount,
*index);
Counters* counters = isolate->counters();
#define SC(name, caption) Add(GetStatsCounterAddress(counters->name()), index);
STATS_COUNTER_NATIVE_CODE_LIST(SC)
#undef SC
CHECK_EQ(kSpecialReferenceCount + kExternalReferenceCount +
kBuiltinsReferenceCount + kRuntimeReferenceCount +
kIsolateAddressReferenceCount + kAccessorReferenceCount +
kStubCacheReferenceCount + kStatsCountersReferenceCount,
*index);
CHECK_EQ(kSize, *index);
}
......
......@@ -9,7 +9,6 @@
#include "src/accessors.h"
#include "src/builtins/builtins.h"
#include "src/counters-definitions.h"
#include "src/external-reference.h"
namespace v8 {
......@@ -38,15 +37,11 @@ class ExternalReferenceTable {
Accessors::kAccessorInfoCount + Accessors::kAccessorSetterCount;
// The number of stub cache external references, see AddStubCache.
static constexpr int kStubCacheReferenceCount = 12;
static constexpr int kStatsCountersReferenceCount =
#define SC(...) +1
STATS_COUNTER_NATIVE_CODE_LIST(SC);
#undef SC
static constexpr int kSize =
kSpecialReferenceCount + kExternalReferenceCount +
kBuiltinsReferenceCount + kRuntimeReferenceCount +
kIsolateAddressReferenceCount + kAccessorReferenceCount +
kStubCacheReferenceCount + kStatsCountersReferenceCount;
kStubCacheReferenceCount;
static constexpr uint32_t kEntrySize =
static_cast<uint32_t>(kSystemPointerSize);
static constexpr uint32_t kSizeInBytes = kSize * kEntrySize + 2 * kUInt32Size;
......@@ -83,22 +78,12 @@ class ExternalReferenceTable {
void AddAccessors(int* index);
void AddStubCache(Isolate* isolate, int* index);
Address GetStatsCounterAddress(StatsCounter* counter);
void AddNativeCodeStatsCounters(Isolate* isolate, int* index);
STATIC_ASSERT(sizeof(Address) == kEntrySize);
Address ref_addr_[kSize];
static const char* const ref_name_[kSize];
// Not bool to guarantee deterministic size.
uint32_t is_initialized_ = 0;
// Redirect disabled stats counters to this field. This is done to make sure
// we can have a snapshot that includes native counters even when the embedder
// isn't collecting them.
// This field is uint32_t since the MacroAssembler and CodeStubAssembler
// accesses this field as a uint32_t.
uint32_t dummy_stats_counter_ = 0;
uint32_t is_initialized_ = 0; // Not bool to guarantee deterministic size.
uint32_t unused_padding_ = 0; // For alignment.
DISALLOW_COPY_AND_ASSIGN(ExternalReferenceTable);
};
......
......@@ -1047,7 +1047,8 @@ DEFINE_BOOL_READONLY(fast_map_update, false,
DEFINE_INT(max_polymorphic_map_count, 4,
"maximum number of maps to track in POLYMORPHIC state")
DEFINE_BOOL(native_code_counters, DEBUG_BOOL,
// macro-assembler-ia32.cc
DEFINE_BOOL(native_code_counters, false,
"generate extra code for manipulating stats counters")
// objects.cc
......
......@@ -5025,9 +5025,6 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
Register scratch1, Register scratch2) {
DCHECK_GT(value, 0);
if (FLAG_native_code_counters && counter->Enabled()) {
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
li(scratch2, ExternalReference::Create(counter));
Lw(scratch1, MemOperand(scratch2));
Addu(scratch1, scratch1, Operand(value));
......@@ -5040,9 +5037,6 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
Register scratch1, Register scratch2) {
DCHECK_GT(value, 0);
if (FLAG_native_code_counters && counter->Enabled()) {
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
li(scratch2, ExternalReference::Create(counter));
Lw(scratch1, MemOperand(scratch2));
Subu(scratch1, scratch1, Operand(value));
......
......@@ -1714,9 +1714,6 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
Register scratch1, Register scratch2) {
DCHECK_GT(value, 0);
if (FLAG_native_code_counters && counter->Enabled()) {
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
Move(scratch2, ExternalReference::Create(counter));
lwz(scratch1, MemOperand(scratch2));
addi(scratch1, scratch1, Operand(value));
......@@ -1729,9 +1726,6 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
Register scratch1, Register scratch2) {
DCHECK_GT(value, 0);
if (FLAG_native_code_counters && counter->Enabled()) {
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
Move(scratch2, ExternalReference::Create(counter));
lwz(scratch1, MemOperand(scratch2));
subi(scratch1, scratch1, Operand(value));
......
......@@ -5,7 +5,6 @@
#include <errno.h>
#include <signal.h>
#include <stdio.h>
#include <iomanip>
#include "include/libplatform/libplatform.h"
#include "src/assembler-arch.h"
......@@ -247,9 +246,6 @@ void WriteEmbeddedFile(i::EmbeddedFileWriter* writer) {
}
} // namespace
typedef std::map<std::string, int> CounterMap;
CounterMap* counter_map_;
int main(int argc, char** argv) {
v8::base::EnsureConsoleOutput();
......@@ -291,20 +287,7 @@ int main(int argc, char** argv) {
i::DisableEmbeddedBlobRefcounting();
v8::StartupData blob;
{
counter_map_ = new CounterMap();
v8::Isolate* isolate = v8::Isolate::Allocate();
// If --native-code-counters is on then we enable all counters to make
// sure we generate code to increment them from the snapshot.
if (i::FLAG_native_code_counters || i::FLAG_dump_counters ||
i::FLAG_dump_counters_nvp) {
isolate->SetCounterFunction([](const char* name) -> int* {
auto map_entry = counter_map_->find(name);
if (map_entry == counter_map_->end()) {
counter_map_->emplace(name, 0);
}
return &counter_map_->at(name);
});
}
if (i::FLAG_embedded_builtins) {
// Set code range such that relative jumps for builtins to
// builtin calls in the snapshot are possible.
......@@ -324,37 +307,6 @@ int main(int argc, char** argv) {
WriteEmbeddedFile(&embedded_writer);
}
blob = CreateSnapshotDataBlob(&snapshot_creator, embed_script.get());
if (i::FLAG_dump_counters || i::FLAG_dump_counters_nvp) {
if (i::FLAG_dump_counters_nvp) {
// Dump counters as name-value pairs.
for (auto entry : *counter_map_) {
std::string key = entry.first;
int counter = entry.second;
std::cout << "\"" << key << "\"=" << counter << "\n";
}
} else {
// Dump counters in formatted boxes.
constexpr int kNameBoxSize = 64;
constexpr int kValueBoxSize = 13;
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
std::cout << "| Name" << std::string(kNameBoxSize - 5, ' ')
<< "| Value" << std::string(kValueBoxSize - 6, ' ')
<< "|\n";
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
for (auto entry : *counter_map_) {
std::string key = entry.first;
int counter = entry.second;
std::cout << "| " << std::setw(kNameBoxSize - 2) << std::left << key
<< " | " << std::setw(kValueBoxSize - 2) << std::right
<< counter << " |\n";
}
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
}
}
delete counter_map_;
}
if (warmup_script) {
......
......@@ -2119,9 +2119,6 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
if (FLAG_native_code_counters && counter->Enabled()) {
Operand counter_operand =
ExternalReferenceAsOperand(ExternalReference::Create(counter));
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
if (value == 1) {
incl(counter_operand);
} else {
......@@ -2136,9 +2133,6 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
if (FLAG_native_code_counters && counter->Enabled()) {
Operand counter_operand =
ExternalReferenceAsOperand(ExternalReference::Create(counter));
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
if (value == 1) {
decl(counter_operand);
} else {
......
......@@ -148,38 +148,6 @@ class NativeTimeScope {
}
};
class SnapshotNativeCounterTest : public TestWithNativeContextAndCounters {
public:
SnapshotNativeCounterTest() {}
bool SupportsNativeCounters() const {
#ifdef V8_USE_SNAPSHOT
#ifdef V8_SNAPSHOT_NATIVE_CODE_COUNTERS
return true;
#else
return false;
#endif // V8_SNAPSHOT_NATIVE_CODE_COUNTERS
#else
// If we do not have a snapshot then we rely on the runtime option.
return internal::FLAG_native_code_counters;
#endif // V8_USE_SNAPSHOT
}
#define SC(name, caption) \
int name() { \
CHECK(isolate()->counters()->name()->Enabled()); \
return *isolate()->counters()->name()->GetInternalPointer(); \
}
STATS_COUNTER_NATIVE_CODE_LIST(SC)
#undef SC
void PrintAll() {
#define SC(name, caption) PrintF(#caption " = %d\n", name());
STATS_COUNTER_NATIVE_CODE_LIST(SC)
#undef SC
}
};
} // namespace
......@@ -797,29 +765,5 @@ TEST_F(RuntimeCallStatsTest, ApiGetter) {
PrintStats();
}
TEST_F(SnapshotNativeCounterTest, StringAddNative) {
RunJS("let s = 'hello, ' + 'world!'");
if (SupportsNativeCounters()) {
EXPECT_NE(0, string_add_native());
} else {
EXPECT_EQ(0, string_add_native());
}
PrintAll();
}
TEST_F(SnapshotNativeCounterTest, SubStringNative) {
RunJS("'hello, world!'.substring(6);");
if (SupportsNativeCounters()) {
EXPECT_NE(0, sub_string_native());
} else {
EXPECT_EQ(0, sub_string_native());
}
PrintAll();
}
} // namespace internal
} // namespace v8
......@@ -15,13 +15,11 @@
namespace v8 {
IsolateWrapper::IsolateWrapper(CounterLookupCallback counter_lookup_callback,
bool enforce_pointer_compression)
IsolateWrapper::IsolateWrapper(bool enforce_pointer_compression)
: array_buffer_allocator_(
v8::ArrayBuffer::Allocator::NewDefaultAllocator()) {
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = array_buffer_allocator_;
create_params.counter_lookup_callback = counter_lookup_callback;
if (enforce_pointer_compression) {
isolate_ = reinterpret_cast<v8::Isolate*>(
i::Isolate::New(i::IsolateAllocationMode::kInV8Heap));
......@@ -43,22 +41,6 @@ IsolateWrapper::~IsolateWrapper() {
// static
v8::IsolateWrapper* SharedIsolateHolder::isolate_wrapper_ = nullptr;
// static
int* SharedIsolateAndCountersHolder::LookupCounter(const char* name) {
DCHECK_NOT_NULL(counter_map_);
auto map_entry = counter_map_->find(name);
if (map_entry == counter_map_->end()) {
counter_map_->emplace(name, 0);
}
return &counter_map_->at(name);
}
// static
v8::IsolateWrapper* SharedIsolateAndCountersHolder::isolate_wrapper_ = nullptr;
// static
CounterMap* SharedIsolateAndCountersHolder::counter_map_ = nullptr;
namespace internal {
SaveFlags::SaveFlags() {
......
......@@ -22,16 +22,13 @@ namespace v8 {
class ArrayBufferAllocator;
typedef std::map<std::string, int> CounterMap;
// RAII-like Isolate instance wrapper.
class IsolateWrapper final {
public:
// When enforce_pointer_compression is true the Isolate is created with
// enabled pointer compression. When it's false then the Isolate is created
// with the default pointer compression state for current build.
explicit IsolateWrapper(CounterLookupCallback counter_lookup_callback,
bool enforce_pointer_compression = false);
explicit IsolateWrapper(bool enforce_pointer_compression = false);
~IsolateWrapper();
v8::Isolate* isolate() const { return isolate_; }
......@@ -49,8 +46,7 @@ class SharedIsolateHolder final {
static void CreateIsolate() {
CHECK_NULL(isolate_wrapper_);
isolate_wrapper_ =
new IsolateWrapper([](const char* name) -> int* { return nullptr; });
isolate_wrapper_ = new IsolateWrapper();
}
static void DeleteIsolate() {
......@@ -65,34 +61,6 @@ class SharedIsolateHolder final {
DISALLOW_IMPLICIT_CONSTRUCTORS(SharedIsolateHolder);
};
class SharedIsolateAndCountersHolder final {
public:
static v8::Isolate* isolate() { return isolate_wrapper_->isolate(); }
static void CreateIsolate() {
CHECK_NULL(counter_map_);
CHECK_NULL(isolate_wrapper_);
counter_map_ = new CounterMap();
isolate_wrapper_ = new IsolateWrapper(LookupCounter);
}
static void DeleteIsolate() {
CHECK_NOT_NULL(counter_map_);
CHECK_NOT_NULL(isolate_wrapper_);
delete isolate_wrapper_;
isolate_wrapper_ = nullptr;
delete counter_map_;
counter_map_ = nullptr;
}
private:
static int* LookupCounter(const char* name);
static CounterMap* counter_map_;
static v8::IsolateWrapper* isolate_wrapper_;
DISALLOW_IMPLICIT_CONSTRUCTORS(SharedIsolateAndCountersHolder);
};
//
// A set of mixins from which the test fixtures will be constructed.
//
......@@ -100,8 +68,7 @@ template <typename TMixin>
class WithPrivateIsolateMixin : public TMixin {
public:
explicit WithPrivateIsolateMixin(bool enforce_pointer_compression = false)
: isolate_wrapper_([](const char* name) -> int* { return nullptr; },
enforce_pointer_compression) {}
: isolate_wrapper_(enforce_pointer_compression) {}
v8::Isolate* v8_isolate() const { return isolate_wrapper_.isolate(); }
......@@ -114,20 +81,20 @@ class WithPrivateIsolateMixin : public TMixin {
DISALLOW_COPY_AND_ASSIGN(WithPrivateIsolateMixin);
};
template <typename TMixin, typename TSharedIsolateHolder = SharedIsolateHolder>
template <typename TMixin>
class WithSharedIsolateMixin : public TMixin {
public:
WithSharedIsolateMixin() = default;
v8::Isolate* v8_isolate() const { return TSharedIsolateHolder::isolate(); }
v8::Isolate* v8_isolate() const { return SharedIsolateHolder::isolate(); }
static void SetUpTestCase() {
TMixin::SetUpTestCase();
TSharedIsolateHolder::CreateIsolate();
SharedIsolateHolder::CreateIsolate();
}
static void TearDownTestCase() {
TSharedIsolateHolder::DeleteIsolate();
SharedIsolateHolder::DeleteIsolate();
TMixin::TearDownTestCase();
}
......@@ -328,14 +295,6 @@ using TestWithNativeContext = //
WithSharedIsolateMixin< //
::testing::Test>>>>;
using TestWithNativeContextAndCounters = //
WithInternalIsolateMixin< //
WithContextMixin< //
WithIsolateScopeMixin< //
WithSharedIsolateMixin< //
::testing::Test, //
SharedIsolateAndCountersHolder>>>>;
using TestWithNativeContextAndZone = //
WithZoneMixin< //
WithInternalIsolateMixin< //
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment