Commit 519bf695 authored by Michael Achenbach's avatar Michael Achenbach Committed by Commit Bot

Revert "[snapshot] Add support for native counters."

This reverts commit 93716b9e.

Reason for revert: Breaks asan debug:
https://ci.chromium.org/p/v8/builders/ci/V8%20Clusterfuzz%20Mac64%20ASAN%20-%20debug%20builder/7872
https://ci.chromium.org/p/v8/builders/ci/V8%20Clusterfuzz%20Linux64%20ASAN%20-%20debug%20builder/7874

Original change's description:
> [snapshot] Add support for native counters.
> 
> Counters in generated code, as enabled with --native-code-counters, do not work
> in the snapshot. This adds a `v8_enable_snapshot_code_counters` build option
> enabled by defaut in debug mode that allows code from the snapshot to increment
> the current isolate's set of counters.
> 
> For this to work, we need to add native code counters in the external reference
> table.
> 
> To keep the no snapshot configuration similar, we've also enabled the
> --native-code-counters flag by default for debug builds.
> 
> Change-Id: I4478b79858c9b04f57e06e7ec67449e9e3a76f53
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1528998
> Commit-Queue: Pierre Langlois <pierre.langlois@arm.com>
> Reviewed-by: Peter Marshall <petermarshall@chromium.org>
> Reviewed-by: Sigurd Schneider <sigurds@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#60495}

TBR=sigurds@chromium.org,jgruber@chromium.org,petermarshall@chromium.org,pierre.langlois@arm.com

Change-Id: I93f1ed714e3dcd309f3100685e4bd282db471d46
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1543209Reviewed-by: 's avatarMichael Achenbach <machenbach@chromium.org>
Commit-Queue: Michael Achenbach <machenbach@chromium.org>
Cr-Commit-Position: refs/heads/master@{#60500}
parent 39bfa157
......@@ -91,12 +91,6 @@ declare_args() {
# Enable code comments for builtins in the snapshot (impacts performance).
v8_enable_snapshot_code_comments = false
# Enable native counters from the snapshot (impacts performance, sets
# -dV8_SNAPSHOT_NATIVE_CODE_COUNTERS).
# This option will generate extra code in the snapshot to increment counters,
# as per the --native-code-counters flag.
v8_enable_snapshot_native_code_counters = ""
# Enable code-generation-time checking of types in the CodeStubAssembler.
v8_enable_verify_csa = false
......@@ -214,9 +208,6 @@ if (v8_check_microtasks_scopes_consistency == "") {
v8_check_microtasks_scopes_consistency =
v8_enable_debugging_features || dcheck_always_on
}
if (v8_enable_snapshot_native_code_counters == "") {
v8_enable_snapshot_native_code_counters = v8_enable_debugging_features
}
assert(v8_current_cpu != "x86" || !v8_untrusted_code_mitigations,
"Untrusted code mitigations are unsupported on ia32")
......@@ -403,9 +394,6 @@ config("features") {
}
if (v8_use_snapshot) {
defines += [ "V8_USE_SNAPSHOT" ]
if (v8_enable_snapshot_native_code_counters) {
defines += [ "V8_SNAPSHOT_NATIVE_CODE_COUNTERS" ]
}
}
if (v8_use_external_startup_data) {
defines += [ "V8_USE_EXTERNAL_STARTUP_DATA" ]
......@@ -1192,14 +1180,6 @@ template("run_mksnapshot") {
args += [ "--code-comments" ]
}
if (v8_enable_snapshot_native_code_counters) {
args += [ "--native-code-counters" ]
} else {
# --native-code-counters is the default in debug mode so make sure we can
# unset it.
args += [ "--no-native-code-counters" ]
}
if (v8_enable_fast_mksnapshot) {
args += [
"--no-turbo-rewrite-far-jumps",
......@@ -2018,7 +1998,6 @@ v8_source_set("v8_base") {
"src/conversions-inl.h",
"src/conversions.cc",
"src/conversions.h",
"src/counters-definitions.h",
"src/counters-inl.h",
"src/counters.cc",
"src/counters.h",
......
......@@ -2684,9 +2684,6 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
Register scratch1, Register scratch2) {
DCHECK_NE(value, 0);
if (FLAG_native_code_counters && counter->Enabled()) {
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
Mov(scratch2, ExternalReference::Create(counter));
Ldr(scratch1.W(), MemOperand(scratch2));
Add(scratch1.W(), scratch1.W(), value);
......
......@@ -8143,9 +8143,6 @@ void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
if (FLAG_native_code_counters && counter->Enabled()) {
Node* counter_address =
ExternalConstant(ExternalReference::Create(counter));
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
Node* value = Load(MachineType::Int32(), counter_address);
value = Int32Add(value, Int32Constant(delta));
StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
......@@ -8157,9 +8154,6 @@ void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
if (FLAG_native_code_counters && counter->Enabled()) {
Node* counter_address =
ExternalConstant(ExternalReference::Create(counter));
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
Node* value = Load(MachineType::Int32(), counter_address);
value = Int32Sub(value, Int32Constant(delta));
StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
......
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_COUNTERS_DEFINITIONS_H_
#define V8_COUNTERS_DEFINITIONS_H_
namespace v8 {
namespace internal {
#define HISTOGRAM_RANGE_LIST(HR) \
/* Generic range histograms: HR(name, caption, min, max, num_buckets) */ \
HR(background_marking, V8.GCBackgroundMarking, 0, 10000, 101) \
HR(background_scavenger, V8.GCBackgroundScavenger, 0, 10000, 101) \
HR(background_sweeping, V8.GCBackgroundSweeping, 0, 10000, 101) \
HR(detached_context_age_in_gc, V8.DetachedContextAgeInGC, 0, 20, 21) \
HR(code_cache_reject_reason, V8.CodeCacheRejectReason, 1, 6, 6) \
HR(errors_thrown_per_context, V8.ErrorsThrownPerContext, 0, 200, 20) \
HR(debug_feature_usage, V8.DebugFeatureUsage, 1, 7, 7) \
HR(incremental_marking_reason, V8.GCIncrementalMarkingReason, 0, 21, 22) \
HR(incremental_marking_sum, V8.GCIncrementalMarkingSum, 0, 10000, 101) \
HR(mark_compact_reason, V8.GCMarkCompactReason, 0, 21, 22) \
HR(gc_finalize_clear, V8.GCFinalizeMC.Clear, 0, 10000, 101) \
HR(gc_finalize_epilogue, V8.GCFinalizeMC.Epilogue, 0, 10000, 101) \
HR(gc_finalize_evacuate, V8.GCFinalizeMC.Evacuate, 0, 10000, 101) \
HR(gc_finalize_finish, V8.GCFinalizeMC.Finish, 0, 10000, 101) \
HR(gc_finalize_mark, V8.GCFinalizeMC.Mark, 0, 10000, 101) \
HR(gc_finalize_prologue, V8.GCFinalizeMC.Prologue, 0, 10000, 101) \
HR(gc_finalize_sweep, V8.GCFinalizeMC.Sweep, 0, 10000, 101) \
HR(gc_scavenger_scavenge_main, V8.GCScavenger.ScavengeMain, 0, 10000, 101) \
HR(gc_scavenger_scavenge_roots, V8.GCScavenger.ScavengeRoots, 0, 10000, 101) \
HR(gc_mark_compactor, V8.GCMarkCompactor, 0, 10000, 101) \
HR(scavenge_reason, V8.GCScavengeReason, 0, 21, 22) \
HR(young_generation_handling, V8.GCYoungGenerationHandling, 0, 2, 3) \
/* Asm/Wasm. */ \
HR(wasm_functions_per_asm_module, V8.WasmFunctionsPerModule.asm, 1, 1000000, \
51) \
HR(wasm_functions_per_wasm_module, V8.WasmFunctionsPerModule.wasm, 1, \
1000000, 51) \
HR(array_buffer_big_allocations, V8.ArrayBufferLargeAllocations, 0, 4096, \
13) \
HR(array_buffer_new_size_failures, V8.ArrayBufferNewSizeFailures, 0, 4096, \
13) \
HR(shared_array_allocations, V8.SharedArrayAllocationSizes, 0, 4096, 13) \
HR(wasm_asm_function_size_bytes, V8.WasmFunctionSizeBytes.asm, 1, GB, 51) \
HR(wasm_wasm_function_size_bytes, V8.WasmFunctionSizeBytes.wasm, 1, GB, 51) \
HR(wasm_asm_module_size_bytes, V8.WasmModuleSizeBytes.asm, 1, GB, 51) \
HR(wasm_wasm_module_size_bytes, V8.WasmModuleSizeBytes.wasm, 1, GB, 51) \
HR(wasm_asm_min_mem_pages_count, V8.WasmMinMemPagesCount.asm, 1, 2 << 16, \
51) \
HR(wasm_wasm_min_mem_pages_count, V8.WasmMinMemPagesCount.wasm, 1, 2 << 16, \
51) \
HR(wasm_wasm_max_mem_pages_count, V8.WasmMaxMemPagesCount.wasm, 1, 2 << 16, \
51) \
HR(wasm_decode_asm_module_peak_memory_bytes, \
V8.WasmDecodeModulePeakMemoryBytes.asm, 1, GB, 51) \
HR(wasm_decode_wasm_module_peak_memory_bytes, \
V8.WasmDecodeModulePeakMemoryBytes.wasm, 1, GB, 51) \
HR(asm_wasm_translation_peak_memory_bytes, \
V8.AsmWasmTranslationPeakMemoryBytes, 1, GB, 51) \
HR(wasm_compile_function_peak_memory_bytes, \
V8.WasmCompileFunctionPeakMemoryBytes, 1, GB, 51) \
HR(asm_module_size_bytes, V8.AsmModuleSizeBytes, 1, GB, 51) \
HR(asm_wasm_translation_throughput, V8.AsmWasmTranslationThroughput, 1, 100, \
20) \
HR(wasm_lazy_compilation_throughput, V8.WasmLazyCompilationThroughput, 1, \
10000, 50) \
HR(compile_script_cache_behaviour, V8.CompileScript.CacheBehaviour, 0, 20, \
21) \
HR(wasm_memory_allocation_result, V8.WasmMemoryAllocationResult, 0, 3, 4) \
HR(wasm_address_space_usage_mb, V8.WasmAddressSpaceUsageMiB, 0, 1 << 20, \
128) \
/* code size of live modules, collected on GC */ \
HR(wasm_module_code_size_mb, V8.WasmModuleCodeSizeMiB, 0, 1024, 64) \
/* code size of modules after baseline compilation */ \
HR(wasm_module_code_size_mb_after_baseline, \
V8.WasmModuleCodeSizeBaselineMiB, 0, 1024, 64) \
/* code size of modules after top-tier compilation */ \
HR(wasm_module_code_size_mb_after_top_tier, V8.WasmModuleCodeSizeTopTierMiB, \
0, 1024, 64)
#define HISTOGRAM_TIMER_LIST(HT) \
/* Timer histograms, not thread safe: HT(name, caption, max, unit) */ \
/* Garbage collection timers. */ \
HT(gc_context, V8.GCContext, 10000, \
MILLISECOND) /* GC context cleanup time */ \
HT(gc_idle_notification, V8.GCIdleNotification, 10000, MILLISECOND) \
HT(gc_incremental_marking, V8.GCIncrementalMarking, 10000, MILLISECOND) \
HT(gc_incremental_marking_start, V8.GCIncrementalMarkingStart, 10000, \
MILLISECOND) \
HT(gc_incremental_marking_finalize, V8.GCIncrementalMarkingFinalize, 10000, \
MILLISECOND) \
HT(gc_low_memory_notification, V8.GCLowMemoryNotification, 10000, \
MILLISECOND) \
/* Compilation times. */ \
HT(collect_source_positions, V8.CollectSourcePositions, 1000000, \
MICROSECOND) \
HT(compile, V8.CompileMicroSeconds, 1000000, MICROSECOND) \
HT(compile_eval, V8.CompileEvalMicroSeconds, 1000000, MICROSECOND) \
/* Serialization as part of compilation (code caching) */ \
HT(compile_serialize, V8.CompileSerializeMicroSeconds, 100000, MICROSECOND) \
HT(compile_deserialize, V8.CompileDeserializeMicroSeconds, 1000000, \
MICROSECOND) \
/* Total compilation time incl. caching/parsing */ \
HT(compile_script, V8.CompileScriptMicroSeconds, 1000000, MICROSECOND) \
/* Total JavaScript execution time (including callbacks and runtime calls */ \
HT(execute, V8.Execute, 1000000, MICROSECOND) \
/* Asm/Wasm */ \
HT(asm_wasm_translation_time, V8.AsmWasmTranslationMicroSeconds, 1000000, \
MICROSECOND) \
HT(wasm_lazy_compilation_time, V8.WasmLazyCompilationMicroSeconds, 1000000, \
MICROSECOND)
#define TIMED_HISTOGRAM_LIST(HT) \
/* Timer histograms, thread safe: HT(name, caption, max, unit) */ \
/* Garbage collection timers. */ \
HT(gc_compactor, V8.GCCompactor, 10000, MILLISECOND) \
HT(gc_compactor_background, V8.GCCompactorBackground, 10000, MILLISECOND) \
HT(gc_compactor_foreground, V8.GCCompactorForeground, 10000, MILLISECOND) \
HT(gc_finalize, V8.GCFinalizeMC, 10000, MILLISECOND) \
HT(gc_finalize_background, V8.GCFinalizeMCBackground, 10000, MILLISECOND) \
HT(gc_finalize_foreground, V8.GCFinalizeMCForeground, 10000, MILLISECOND) \
HT(gc_finalize_reduce_memory, V8.GCFinalizeMCReduceMemory, 10000, \
MILLISECOND) \
HT(gc_finalize_reduce_memory_background, \
V8.GCFinalizeMCReduceMemoryBackground, 10000, MILLISECOND) \
HT(gc_finalize_reduce_memory_foreground, \
V8.GCFinalizeMCReduceMemoryForeground, 10000, MILLISECOND) \
HT(gc_scavenger, V8.GCScavenger, 10000, MILLISECOND) \
HT(gc_scavenger_background, V8.GCScavengerBackground, 10000, MILLISECOND) \
HT(gc_scavenger_foreground, V8.GCScavengerForeground, 10000, MILLISECOND) \
/* Wasm timers. */ \
HT(wasm_decode_asm_module_time, V8.WasmDecodeModuleMicroSeconds.asm, \
1000000, MICROSECOND) \
HT(wasm_decode_wasm_module_time, V8.WasmDecodeModuleMicroSeconds.wasm, \
1000000, MICROSECOND) \
HT(wasm_decode_asm_function_time, V8.WasmDecodeFunctionMicroSeconds.asm, \
1000000, MICROSECOND) \
HT(wasm_decode_wasm_function_time, V8.WasmDecodeFunctionMicroSeconds.wasm, \
1000000, MICROSECOND) \
HT(wasm_compile_asm_module_time, V8.WasmCompileModuleMicroSeconds.asm, \
10000000, MICROSECOND) \
HT(wasm_compile_wasm_module_time, V8.WasmCompileModuleMicroSeconds.wasm, \
10000000, MICROSECOND) \
HT(wasm_compile_asm_function_time, V8.WasmCompileFunctionMicroSeconds.asm, \
1000000, MICROSECOND) \
HT(wasm_compile_wasm_function_time, V8.WasmCompileFunctionMicroSeconds.wasm, \
1000000, MICROSECOND) \
HT(liftoff_compile_time, V8.LiftoffCompileMicroSeconds, 10000000, \
MICROSECOND) \
HT(wasm_instantiate_wasm_module_time, \
V8.WasmInstantiateModuleMicroSeconds.wasm, 10000000, MICROSECOND) \
HT(wasm_instantiate_asm_module_time, \
V8.WasmInstantiateModuleMicroSeconds.asm, 10000000, MICROSECOND) \
/* Total compilation time incl. caching/parsing for various cache states. */ \
HT(compile_script_with_produce_cache, \
V8.CompileScriptMicroSeconds.ProduceCache, 1000000, MICROSECOND) \
HT(compile_script_with_isolate_cache_hit, \
V8.CompileScriptMicroSeconds.IsolateCacheHit, 1000000, MICROSECOND) \
HT(compile_script_with_consume_cache, \
V8.CompileScriptMicroSeconds.ConsumeCache, 1000000, MICROSECOND) \
HT(compile_script_consume_failed, \
V8.CompileScriptMicroSeconds.ConsumeCache.Failed, 1000000, MICROSECOND) \
HT(compile_script_no_cache_other, \
V8.CompileScriptMicroSeconds.NoCache.Other, 1000000, MICROSECOND) \
HT(compile_script_no_cache_because_inline_script, \
V8.CompileScriptMicroSeconds.NoCache.InlineScript, 1000000, MICROSECOND) \
HT(compile_script_no_cache_because_script_too_small, \
V8.CompileScriptMicroSeconds.NoCache.ScriptTooSmall, 1000000, \
MICROSECOND) \
HT(compile_script_no_cache_because_cache_too_cold, \
V8.CompileScriptMicroSeconds.NoCache.CacheTooCold, 1000000, MICROSECOND) \
HT(compile_script_on_background, \
V8.CompileScriptMicroSeconds.BackgroundThread, 1000000, MICROSECOND) \
HT(compile_function_on_background, \
V8.CompileFunctionMicroSeconds.BackgroundThread, 1000000, MICROSECOND)
#define AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT) \
AHT(compile_lazy, V8.CompileLazyMicroSeconds)
#define HISTOGRAM_PERCENTAGE_LIST(HP) \
/* Heap fragmentation. */ \
HP(external_fragmentation_total, V8.MemoryExternalFragmentationTotal) \
HP(external_fragmentation_old_space, V8.MemoryExternalFragmentationOldSpace) \
HP(external_fragmentation_code_space, \
V8.MemoryExternalFragmentationCodeSpace) \
HP(external_fragmentation_map_space, V8.MemoryExternalFragmentationMapSpace) \
HP(external_fragmentation_lo_space, V8.MemoryExternalFragmentationLoSpace)
// Note: These use Histogram with options (min=1000, max=500000, buckets=50).
#define HISTOGRAM_LEGACY_MEMORY_LIST(HM) \
HM(heap_sample_total_committed, V8.MemoryHeapSampleTotalCommitted) \
HM(heap_sample_total_used, V8.MemoryHeapSampleTotalUsed) \
HM(heap_sample_map_space_committed, V8.MemoryHeapSampleMapSpaceCommitted) \
HM(heap_sample_code_space_committed, V8.MemoryHeapSampleCodeSpaceCommitted) \
HM(heap_sample_maximum_committed, V8.MemoryHeapSampleMaximumCommitted)
// WARNING: STATS_COUNTER_LIST_* is a very large macro that is causing MSVC
// Intellisense to crash. It was broken into two macros (each of length 40
// lines) rather than one macro (of length about 80 lines) to work around
// this problem. Please avoid using recursive macros of this length when
// possible.
#define STATS_COUNTER_LIST_1(SC) \
/* Global Handle Count*/ \
SC(global_handles, V8.GlobalHandles) \
/* OS Memory allocated */ \
SC(memory_allocated, V8.OsMemoryAllocated) \
SC(maps_normalized, V8.MapsNormalized) \
SC(maps_created, V8.MapsCreated) \
SC(elements_transitions, V8.ObjectElementsTransitions) \
SC(props_to_dictionary, V8.ObjectPropertiesToDictionary) \
SC(elements_to_dictionary, V8.ObjectElementsToDictionary) \
SC(alive_after_last_gc, V8.AliveAfterLastGC) \
SC(objs_since_last_young, V8.ObjsSinceLastYoung) \
SC(objs_since_last_full, V8.ObjsSinceLastFull) \
SC(string_table_capacity, V8.StringTableCapacity) \
SC(number_of_symbols, V8.NumberOfSymbols) \
SC(inlined_copied_elements, V8.InlinedCopiedElements) \
SC(compilation_cache_hits, V8.CompilationCacheHits) \
SC(compilation_cache_misses, V8.CompilationCacheMisses) \
/* Amount of evaled source code. */ \
SC(total_eval_size, V8.TotalEvalSize) \
/* Amount of loaded source code. */ \
SC(total_load_size, V8.TotalLoadSize) \
/* Amount of parsed source code. */ \
SC(total_parse_size, V8.TotalParseSize) \
/* Amount of source code skipped over using preparsing. */ \
SC(total_preparse_skipped, V8.TotalPreparseSkipped) \
/* Amount of compiled source code. */ \
SC(total_compile_size, V8.TotalCompileSize) \
/* Amount of source code compiled with the full codegen. */ \
SC(total_full_codegen_source_size, V8.TotalFullCodegenSourceSize) \
/* Number of contexts created from scratch. */ \
SC(contexts_created_from_scratch, V8.ContextsCreatedFromScratch) \
/* Number of contexts created by partial snapshot. */ \
SC(contexts_created_by_snapshot, V8.ContextsCreatedBySnapshot) \
/* Number of code objects found from pc. */ \
SC(pc_to_code, V8.PcToCode) \
SC(pc_to_code_cached, V8.PcToCodeCached) \
/* The store-buffer implementation of the write barrier. */ \
SC(store_buffer_overflows, V8.StoreBufferOverflows)
#define STATS_COUNTER_LIST_2(SC) \
/* Amount of (JS) compiled code. */ \
SC(total_compiled_code_size, V8.TotalCompiledCodeSize) \
SC(gc_compactor_caused_by_request, V8.GCCompactorCausedByRequest) \
SC(gc_compactor_caused_by_promoted_data, V8.GCCompactorCausedByPromotedData) \
SC(gc_compactor_caused_by_oldspace_exhaustion, \
V8.GCCompactorCausedByOldspaceExhaustion) \
SC(gc_last_resort_from_js, V8.GCLastResortFromJS) \
SC(gc_last_resort_from_handles, V8.GCLastResortFromHandles) \
SC(ic_named_load_global_stub, V8.ICNamedLoadGlobalStub) \
SC(ic_store_normal_miss, V8.ICStoreNormalMiss) \
SC(ic_store_normal_hit, V8.ICStoreNormalHit) \
SC(ic_binary_op_miss, V8.ICBinaryOpMiss) \
SC(ic_compare_miss, V8.ICCompareMiss) \
SC(ic_call_miss, V8.ICCallMiss) \
SC(ic_keyed_call_miss, V8.ICKeyedCallMiss) \
SC(ic_store_miss, V8.ICStoreMiss) \
SC(ic_keyed_store_miss, V8.ICKeyedStoreMiss) \
SC(cow_arrays_converted, V8.COWArraysConverted) \
SC(constructed_objects_runtime, V8.ConstructedObjectsRuntime) \
SC(megamorphic_stub_cache_updates, V8.MegamorphicStubCacheUpdates) \
SC(enum_cache_hits, V8.EnumCacheHits) \
SC(enum_cache_misses, V8.EnumCacheMisses) \
SC(string_add_runtime, V8.StringAddRuntime) \
SC(sub_string_runtime, V8.SubStringRuntime) \
SC(regexp_entry_runtime, V8.RegExpEntryRuntime) \
SC(math_exp_runtime, V8.MathExpRuntime) \
SC(math_log_runtime, V8.MathLogRuntime) \
SC(math_pow_runtime, V8.MathPowRuntime) \
SC(stack_interrupts, V8.StackInterrupts) \
SC(runtime_profiler_ticks, V8.RuntimeProfilerTicks) \
SC(runtime_calls, V8.RuntimeCalls) \
SC(bounds_checks_eliminated, V8.BoundsChecksEliminated) \
SC(bounds_checks_hoisted, V8.BoundsChecksHoisted) \
SC(soft_deopts_requested, V8.SoftDeoptsRequested) \
SC(soft_deopts_inserted, V8.SoftDeoptsInserted) \
SC(soft_deopts_executed, V8.SoftDeoptsExecuted) \
/* Number of write barriers in generated code. */ \
/* TODO: This isn't functional at the moment, we should teach the */ \
/* compiler about this counter. */ \
SC(write_barriers_static, V8.WriteBarriersStatic) \
SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable) \
SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted) \
SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed) \
SC(old_space_bytes_available, V8.MemoryOldSpaceBytesAvailable) \
SC(old_space_bytes_committed, V8.MemoryOldSpaceBytesCommitted) \
SC(old_space_bytes_used, V8.MemoryOldSpaceBytesUsed) \
SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable) \
SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted) \
SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed) \
SC(map_space_bytes_available, V8.MemoryMapSpaceBytesAvailable) \
SC(map_space_bytes_committed, V8.MemoryMapSpaceBytesCommitted) \
SC(map_space_bytes_used, V8.MemoryMapSpaceBytesUsed) \
SC(lo_space_bytes_available, V8.MemoryLoSpaceBytesAvailable) \
SC(lo_space_bytes_committed, V8.MemoryLoSpaceBytesCommitted) \
SC(lo_space_bytes_used, V8.MemoryLoSpaceBytesUsed) \
/* Total code size (including metadata) of baseline code or bytecode. */ \
SC(total_baseline_code_size, V8.TotalBaselineCodeSize) \
/* Total count of functions compiled using the baseline compiler. */ \
SC(total_baseline_compile_count, V8.TotalBaselineCompileCount)
#define STATS_COUNTER_TS_LIST(SC) \
SC(wasm_generated_code_size, V8.WasmGeneratedCodeBytes) \
SC(wasm_reloc_size, V8.WasmRelocBytes) \
SC(wasm_lazily_compiled_functions, V8.WasmLazilyCompiledFunctions) \
SC(liftoff_compiled_functions, V8.LiftoffCompiledFunctions) \
SC(liftoff_unsupported_functions, V8.LiftoffUnsupportedFunctions)
// List of counters that can be incremented from generated code. We need them in
// a separate list to be able to relocate them.
#define STATS_COUNTER_NATIVE_CODE_LIST(SC) \
/* Number of write barriers executed at runtime. */ \
SC(write_barriers_dynamic, V8.WriteBarriersDynamic) \
SC(constructed_objects, V8.ConstructedObjects) \
SC(fast_new_closure_total, V8.FastNewClosureTotal) \
SC(regexp_entry_native, V8.RegExpEntryNative) \
SC(string_add_native, V8.StringAddNative) \
SC(sub_string_native, V8.SubStringNative) \
SC(ic_keyed_load_generic_smi, V8.ICKeyedLoadGenericSmi) \
SC(ic_keyed_load_generic_symbol, V8.ICKeyedLoadGenericSymbol) \
SC(ic_keyed_load_generic_slow, V8.ICKeyedLoadGenericSlow) \
SC(megamorphic_stub_cache_probes, V8.MegamorphicStubCacheProbes) \
SC(megamorphic_stub_cache_misses, V8.MegamorphicStubCacheMisses)
} // namespace internal
} // namespace v8
#endif // V8_COUNTERS_DEFINITIONS_H_
......@@ -228,9 +228,7 @@ Counters::Counters(Isolate* isolate)
const char* caption;
} kStatsCounters[] = {
#define SC(name, caption) {&Counters::name##_, "c:" #caption},
STATS_COUNTER_LIST_1(SC)
STATS_COUNTER_LIST_2(SC)
STATS_COUNTER_NATIVE_CODE_LIST(SC)
STATS_COUNTER_LIST_1(SC) STATS_COUNTER_LIST_2(SC)
#undef SC
#define SC(name) \
{&Counters::count_of_##name##_, "c:" "V8.CountOf_" #name}, \
......@@ -264,8 +262,10 @@ void Counters::ResetCounterFunction(CounterLookupCallback f) {
#define SC(name, caption) name##_.Reset();
STATS_COUNTER_LIST_1(SC)
STATS_COUNTER_LIST_2(SC)
#undef SC
#define SC(name, caption) name##_.Reset();
STATS_COUNTER_TS_LIST(SC)
STATS_COUNTER_NATIVE_CODE_LIST(SC)
#undef SC
#define SC(name) \
......
......@@ -11,7 +11,6 @@
#include "src/base/optional.h"
#include "src/base/platform/elapsed-timer.h"
#include "src/base/platform/time.h"
#include "src/counters-definitions.h"
#include "src/globals.h"
#include "src/heap-symbols.h"
#include "src/isolate.h"
......@@ -132,7 +131,7 @@ class StatsCounterBase {
void DecrementLoc(int* loc) { (*loc)--; }
void DecrementLoc(int* loc, int value) { (*loc) -= value; }
V8_EXPORT_PRIVATE int* FindLocationInStatsTable() const;
int* FindLocationInStatsTable() const;
};
// StatsCounters are dynamically created values which can be tracked in
......@@ -1198,6 +1197,315 @@ class RuntimeCallTimerScope {
DISALLOW_COPY_AND_ASSIGN(RuntimeCallTimerScope);
};
#define HISTOGRAM_RANGE_LIST(HR) \
/* Generic range histograms: HR(name, caption, min, max, num_buckets) */ \
HR(background_marking, V8.GCBackgroundMarking, 0, 10000, 101) \
HR(background_scavenger, V8.GCBackgroundScavenger, 0, 10000, 101) \
HR(background_sweeping, V8.GCBackgroundSweeping, 0, 10000, 101) \
HR(detached_context_age_in_gc, V8.DetachedContextAgeInGC, 0, 20, 21) \
HR(code_cache_reject_reason, V8.CodeCacheRejectReason, 1, 6, 6) \
HR(errors_thrown_per_context, V8.ErrorsThrownPerContext, 0, 200, 20) \
HR(debug_feature_usage, V8.DebugFeatureUsage, 1, 7, 7) \
HR(incremental_marking_reason, V8.GCIncrementalMarkingReason, 0, 21, 22) \
HR(incremental_marking_sum, V8.GCIncrementalMarkingSum, 0, 10000, 101) \
HR(mark_compact_reason, V8.GCMarkCompactReason, 0, 21, 22) \
HR(gc_finalize_clear, V8.GCFinalizeMC.Clear, 0, 10000, 101) \
HR(gc_finalize_epilogue, V8.GCFinalizeMC.Epilogue, 0, 10000, 101) \
HR(gc_finalize_evacuate, V8.GCFinalizeMC.Evacuate, 0, 10000, 101) \
HR(gc_finalize_finish, V8.GCFinalizeMC.Finish, 0, 10000, 101) \
HR(gc_finalize_mark, V8.GCFinalizeMC.Mark, 0, 10000, 101) \
HR(gc_finalize_prologue, V8.GCFinalizeMC.Prologue, 0, 10000, 101) \
HR(gc_finalize_sweep, V8.GCFinalizeMC.Sweep, 0, 10000, 101) \
HR(gc_scavenger_scavenge_main, V8.GCScavenger.ScavengeMain, 0, 10000, 101) \
HR(gc_scavenger_scavenge_roots, V8.GCScavenger.ScavengeRoots, 0, 10000, 101) \
HR(gc_mark_compactor, V8.GCMarkCompactor, 0, 10000, 101) \
HR(scavenge_reason, V8.GCScavengeReason, 0, 21, 22) \
HR(young_generation_handling, V8.GCYoungGenerationHandling, 0, 2, 3) \
/* Asm/Wasm. */ \
HR(wasm_functions_per_asm_module, V8.WasmFunctionsPerModule.asm, 1, 1000000, \
51) \
HR(wasm_functions_per_wasm_module, V8.WasmFunctionsPerModule.wasm, 1, \
1000000, 51) \
HR(array_buffer_big_allocations, V8.ArrayBufferLargeAllocations, 0, 4096, \
13) \
HR(array_buffer_new_size_failures, V8.ArrayBufferNewSizeFailures, 0, 4096, \
13) \
HR(shared_array_allocations, V8.SharedArrayAllocationSizes, 0, 4096, 13) \
HR(wasm_asm_function_size_bytes, V8.WasmFunctionSizeBytes.asm, 1, GB, 51) \
HR(wasm_wasm_function_size_bytes, V8.WasmFunctionSizeBytes.wasm, 1, GB, 51) \
HR(wasm_asm_module_size_bytes, V8.WasmModuleSizeBytes.asm, 1, GB, 51) \
HR(wasm_wasm_module_size_bytes, V8.WasmModuleSizeBytes.wasm, 1, GB, 51) \
HR(wasm_asm_min_mem_pages_count, V8.WasmMinMemPagesCount.asm, 1, 2 << 16, \
51) \
HR(wasm_wasm_min_mem_pages_count, V8.WasmMinMemPagesCount.wasm, 1, 2 << 16, \
51) \
HR(wasm_wasm_max_mem_pages_count, V8.WasmMaxMemPagesCount.wasm, 1, 2 << 16, \
51) \
HR(wasm_decode_asm_module_peak_memory_bytes, \
V8.WasmDecodeModulePeakMemoryBytes.asm, 1, GB, 51) \
HR(wasm_decode_wasm_module_peak_memory_bytes, \
V8.WasmDecodeModulePeakMemoryBytes.wasm, 1, GB, 51) \
HR(asm_wasm_translation_peak_memory_bytes, \
V8.AsmWasmTranslationPeakMemoryBytes, 1, GB, 51) \
HR(wasm_compile_function_peak_memory_bytes, \
V8.WasmCompileFunctionPeakMemoryBytes, 1, GB, 51) \
HR(asm_module_size_bytes, V8.AsmModuleSizeBytes, 1, GB, 51) \
HR(asm_wasm_translation_throughput, V8.AsmWasmTranslationThroughput, 1, 100, \
20) \
HR(wasm_lazy_compilation_throughput, V8.WasmLazyCompilationThroughput, 1, \
10000, 50) \
HR(compile_script_cache_behaviour, V8.CompileScript.CacheBehaviour, 0, 20, \
21) \
HR(wasm_memory_allocation_result, V8.WasmMemoryAllocationResult, 0, 3, 4) \
HR(wasm_address_space_usage_mb, V8.WasmAddressSpaceUsageMiB, 0, 1 << 20, \
128) \
/* code size of live modules, collected on GC */ \
HR(wasm_module_code_size_mb, V8.WasmModuleCodeSizeMiB, 0, 1024, 64) \
/* code size of modules after baseline compilation */ \
HR(wasm_module_code_size_mb_after_baseline, \
V8.WasmModuleCodeSizeBaselineMiB, 0, 1024, 64) \
/* code size of modules after top-tier compilation */ \
HR(wasm_module_code_size_mb_after_top_tier, V8.WasmModuleCodeSizeTopTierMiB, \
0, 1024, 64)
#define HISTOGRAM_TIMER_LIST(HT) \
/* Timer histograms, not thread safe: HT(name, caption, max, unit) */ \
/* Garbage collection timers. */ \
HT(gc_context, V8.GCContext, 10000, \
MILLISECOND) /* GC context cleanup time */ \
HT(gc_idle_notification, V8.GCIdleNotification, 10000, MILLISECOND) \
HT(gc_incremental_marking, V8.GCIncrementalMarking, 10000, MILLISECOND) \
HT(gc_incremental_marking_start, V8.GCIncrementalMarkingStart, 10000, \
MILLISECOND) \
HT(gc_incremental_marking_finalize, V8.GCIncrementalMarkingFinalize, 10000, \
MILLISECOND) \
HT(gc_low_memory_notification, V8.GCLowMemoryNotification, 10000, \
MILLISECOND) \
/* Compilation times. */ \
HT(collect_source_positions, V8.CollectSourcePositions, 1000000, \
MICROSECOND) \
HT(compile, V8.CompileMicroSeconds, 1000000, MICROSECOND) \
HT(compile_eval, V8.CompileEvalMicroSeconds, 1000000, MICROSECOND) \
/* Serialization as part of compilation (code caching) */ \
HT(compile_serialize, V8.CompileSerializeMicroSeconds, 100000, MICROSECOND) \
HT(compile_deserialize, V8.CompileDeserializeMicroSeconds, 1000000, \
MICROSECOND) \
/* Total compilation time incl. caching/parsing */ \
HT(compile_script, V8.CompileScriptMicroSeconds, 1000000, MICROSECOND) \
/* Total JavaScript execution time (including callbacks and runtime calls */ \
HT(execute, V8.Execute, 1000000, MICROSECOND) \
/* Asm/Wasm */ \
HT(asm_wasm_translation_time, V8.AsmWasmTranslationMicroSeconds, 1000000, \
MICROSECOND) \
HT(wasm_lazy_compilation_time, V8.WasmLazyCompilationMicroSeconds, 1000000, \
MICROSECOND)
#define TIMED_HISTOGRAM_LIST(HT) \
/* Timer histograms, thread safe: HT(name, caption, max, unit) */ \
/* Garbage collection timers. */ \
HT(gc_compactor, V8.GCCompactor, 10000, MILLISECOND) \
HT(gc_compactor_background, V8.GCCompactorBackground, 10000, MILLISECOND) \
HT(gc_compactor_foreground, V8.GCCompactorForeground, 10000, MILLISECOND) \
HT(gc_finalize, V8.GCFinalizeMC, 10000, MILLISECOND) \
HT(gc_finalize_background, V8.GCFinalizeMCBackground, 10000, MILLISECOND) \
HT(gc_finalize_foreground, V8.GCFinalizeMCForeground, 10000, MILLISECOND) \
HT(gc_finalize_reduce_memory, V8.GCFinalizeMCReduceMemory, 10000, \
MILLISECOND) \
HT(gc_finalize_reduce_memory_background, \
V8.GCFinalizeMCReduceMemoryBackground, 10000, MILLISECOND) \
HT(gc_finalize_reduce_memory_foreground, \
V8.GCFinalizeMCReduceMemoryForeground, 10000, MILLISECOND) \
HT(gc_scavenger, V8.GCScavenger, 10000, MILLISECOND) \
HT(gc_scavenger_background, V8.GCScavengerBackground, 10000, MILLISECOND) \
HT(gc_scavenger_foreground, V8.GCScavengerForeground, 10000, MILLISECOND) \
/* Wasm timers. */ \
HT(wasm_decode_asm_module_time, V8.WasmDecodeModuleMicroSeconds.asm, \
1000000, MICROSECOND) \
HT(wasm_decode_wasm_module_time, V8.WasmDecodeModuleMicroSeconds.wasm, \
1000000, MICROSECOND) \
HT(wasm_decode_asm_function_time, V8.WasmDecodeFunctionMicroSeconds.asm, \
1000000, MICROSECOND) \
HT(wasm_decode_wasm_function_time, V8.WasmDecodeFunctionMicroSeconds.wasm, \
1000000, MICROSECOND) \
HT(wasm_compile_asm_module_time, V8.WasmCompileModuleMicroSeconds.asm, \
10000000, MICROSECOND) \
HT(wasm_compile_wasm_module_time, V8.WasmCompileModuleMicroSeconds.wasm, \
10000000, MICROSECOND) \
HT(wasm_compile_asm_function_time, V8.WasmCompileFunctionMicroSeconds.asm, \
1000000, MICROSECOND) \
HT(wasm_compile_wasm_function_time, V8.WasmCompileFunctionMicroSeconds.wasm, \
1000000, MICROSECOND) \
HT(liftoff_compile_time, V8.LiftoffCompileMicroSeconds, 10000000, \
MICROSECOND) \
HT(wasm_instantiate_wasm_module_time, \
V8.WasmInstantiateModuleMicroSeconds.wasm, 10000000, MICROSECOND) \
HT(wasm_instantiate_asm_module_time, \
V8.WasmInstantiateModuleMicroSeconds.asm, 10000000, MICROSECOND) \
/* Total compilation time incl. caching/parsing for various cache states. */ \
HT(compile_script_with_produce_cache, \
V8.CompileScriptMicroSeconds.ProduceCache, 1000000, MICROSECOND) \
HT(compile_script_with_isolate_cache_hit, \
V8.CompileScriptMicroSeconds.IsolateCacheHit, 1000000, MICROSECOND) \
HT(compile_script_with_consume_cache, \
V8.CompileScriptMicroSeconds.ConsumeCache, 1000000, MICROSECOND) \
HT(compile_script_consume_failed, \
V8.CompileScriptMicroSeconds.ConsumeCache.Failed, 1000000, MICROSECOND) \
HT(compile_script_no_cache_other, \
V8.CompileScriptMicroSeconds.NoCache.Other, 1000000, MICROSECOND) \
HT(compile_script_no_cache_because_inline_script, \
V8.CompileScriptMicroSeconds.NoCache.InlineScript, 1000000, MICROSECOND) \
HT(compile_script_no_cache_because_script_too_small, \
V8.CompileScriptMicroSeconds.NoCache.ScriptTooSmall, 1000000, \
MICROSECOND) \
HT(compile_script_no_cache_because_cache_too_cold, \
V8.CompileScriptMicroSeconds.NoCache.CacheTooCold, 1000000, MICROSECOND) \
HT(compile_script_on_background, \
V8.CompileScriptMicroSeconds.BackgroundThread, 1000000, MICROSECOND) \
HT(compile_function_on_background, \
V8.CompileFunctionMicroSeconds.BackgroundThread, 1000000, MICROSECOND)
#define AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT) \
AHT(compile_lazy, V8.CompileLazyMicroSeconds)
#define HISTOGRAM_PERCENTAGE_LIST(HP) \
/* Heap fragmentation. */ \
HP(external_fragmentation_total, V8.MemoryExternalFragmentationTotal) \
HP(external_fragmentation_old_space, V8.MemoryExternalFragmentationOldSpace) \
HP(external_fragmentation_code_space, \
V8.MemoryExternalFragmentationCodeSpace) \
HP(external_fragmentation_map_space, V8.MemoryExternalFragmentationMapSpace) \
HP(external_fragmentation_lo_space, V8.MemoryExternalFragmentationLoSpace)
// Note: These use Histogram with options (min=1000, max=500000, buckets=50).
#define HISTOGRAM_LEGACY_MEMORY_LIST(HM) \
HM(heap_sample_total_committed, V8.MemoryHeapSampleTotalCommitted) \
HM(heap_sample_total_used, V8.MemoryHeapSampleTotalUsed) \
HM(heap_sample_map_space_committed, V8.MemoryHeapSampleMapSpaceCommitted) \
HM(heap_sample_code_space_committed, V8.MemoryHeapSampleCodeSpaceCommitted) \
HM(heap_sample_maximum_committed, V8.MemoryHeapSampleMaximumCommitted)
// WARNING: STATS_COUNTER_LIST_* is a very large macro that is causing MSVC
// Intellisense to crash. It was broken into two macros (each of length 40
// lines) rather than one macro (of length about 80 lines) to work around
// this problem. Please avoid using recursive macros of this length when
// possible.
#define STATS_COUNTER_LIST_1(SC) \
/* Global Handle Count*/ \
SC(global_handles, V8.GlobalHandles) \
/* OS Memory allocated */ \
SC(memory_allocated, V8.OsMemoryAllocated) \
SC(maps_normalized, V8.MapsNormalized) \
SC(maps_created, V8.MapsCreated) \
SC(elements_transitions, V8.ObjectElementsTransitions) \
SC(props_to_dictionary, V8.ObjectPropertiesToDictionary) \
SC(elements_to_dictionary, V8.ObjectElementsToDictionary) \
SC(alive_after_last_gc, V8.AliveAfterLastGC) \
SC(objs_since_last_young, V8.ObjsSinceLastYoung) \
SC(objs_since_last_full, V8.ObjsSinceLastFull) \
SC(string_table_capacity, V8.StringTableCapacity) \
SC(number_of_symbols, V8.NumberOfSymbols) \
SC(inlined_copied_elements, V8.InlinedCopiedElements) \
SC(compilation_cache_hits, V8.CompilationCacheHits) \
SC(compilation_cache_misses, V8.CompilationCacheMisses) \
/* Amount of evaled source code. */ \
SC(total_eval_size, V8.TotalEvalSize) \
/* Amount of loaded source code. */ \
SC(total_load_size, V8.TotalLoadSize) \
/* Amount of parsed source code. */ \
SC(total_parse_size, V8.TotalParseSize) \
/* Amount of source code skipped over using preparsing. */ \
SC(total_preparse_skipped, V8.TotalPreparseSkipped) \
/* Amount of compiled source code. */ \
SC(total_compile_size, V8.TotalCompileSize) \
/* Amount of source code compiled with the full codegen. */ \
SC(total_full_codegen_source_size, V8.TotalFullCodegenSourceSize) \
/* Number of contexts created from scratch. */ \
SC(contexts_created_from_scratch, V8.ContextsCreatedFromScratch) \
/* Number of contexts created by partial snapshot. */ \
SC(contexts_created_by_snapshot, V8.ContextsCreatedBySnapshot) \
/* Number of code objects found from pc. */ \
SC(pc_to_code, V8.PcToCode) \
SC(pc_to_code_cached, V8.PcToCodeCached) \
/* The store-buffer implementation of the write barrier. */ \
SC(store_buffer_overflows, V8.StoreBufferOverflows)
#define STATS_COUNTER_LIST_2(SC) \
/* Amount of (JS) compiled code. */ \
SC(total_compiled_code_size, V8.TotalCompiledCodeSize) \
SC(gc_compactor_caused_by_request, V8.GCCompactorCausedByRequest) \
SC(gc_compactor_caused_by_promoted_data, V8.GCCompactorCausedByPromotedData) \
SC(gc_compactor_caused_by_oldspace_exhaustion, \
V8.GCCompactorCausedByOldspaceExhaustion) \
SC(gc_last_resort_from_js, V8.GCLastResortFromJS) \
SC(gc_last_resort_from_handles, V8.GCLastResortFromHandles) \
SC(ic_keyed_load_generic_smi, V8.ICKeyedLoadGenericSmi) \
SC(ic_keyed_load_generic_symbol, V8.ICKeyedLoadGenericSymbol) \
SC(ic_keyed_load_generic_slow, V8.ICKeyedLoadGenericSlow) \
SC(ic_named_load_global_stub, V8.ICNamedLoadGlobalStub) \
SC(ic_store_normal_miss, V8.ICStoreNormalMiss) \
SC(ic_store_normal_hit, V8.ICStoreNormalHit) \
SC(ic_binary_op_miss, V8.ICBinaryOpMiss) \
SC(ic_compare_miss, V8.ICCompareMiss) \
SC(ic_call_miss, V8.ICCallMiss) \
SC(ic_keyed_call_miss, V8.ICKeyedCallMiss) \
SC(ic_store_miss, V8.ICStoreMiss) \
SC(ic_keyed_store_miss, V8.ICKeyedStoreMiss) \
SC(cow_arrays_converted, V8.COWArraysConverted) \
SC(constructed_objects, V8.ConstructedObjects) \
SC(constructed_objects_runtime, V8.ConstructedObjectsRuntime) \
SC(megamorphic_stub_cache_probes, V8.MegamorphicStubCacheProbes) \
SC(megamorphic_stub_cache_misses, V8.MegamorphicStubCacheMisses) \
SC(megamorphic_stub_cache_updates, V8.MegamorphicStubCacheUpdates) \
SC(enum_cache_hits, V8.EnumCacheHits) \
SC(enum_cache_misses, V8.EnumCacheMisses) \
SC(fast_new_closure_total, V8.FastNewClosureTotal) \
SC(string_add_runtime, V8.StringAddRuntime) \
SC(string_add_native, V8.StringAddNative) \
SC(sub_string_runtime, V8.SubStringRuntime) \
SC(sub_string_native, V8.SubStringNative) \
SC(regexp_entry_runtime, V8.RegExpEntryRuntime) \
SC(regexp_entry_native, V8.RegExpEntryNative) \
SC(math_exp_runtime, V8.MathExpRuntime) \
SC(math_log_runtime, V8.MathLogRuntime) \
SC(math_pow_runtime, V8.MathPowRuntime) \
SC(stack_interrupts, V8.StackInterrupts) \
SC(runtime_profiler_ticks, V8.RuntimeProfilerTicks) \
SC(runtime_calls, V8.RuntimeCalls) \
SC(bounds_checks_eliminated, V8.BoundsChecksEliminated) \
SC(bounds_checks_hoisted, V8.BoundsChecksHoisted) \
SC(soft_deopts_requested, V8.SoftDeoptsRequested) \
SC(soft_deopts_inserted, V8.SoftDeoptsInserted) \
SC(soft_deopts_executed, V8.SoftDeoptsExecuted) \
/* Number of write barriers in generated code. */ \
SC(write_barriers_dynamic, V8.WriteBarriersDynamic) \
SC(write_barriers_static, V8.WriteBarriersStatic) \
SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable) \
SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted) \
SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed) \
SC(old_space_bytes_available, V8.MemoryOldSpaceBytesAvailable) \
SC(old_space_bytes_committed, V8.MemoryOldSpaceBytesCommitted) \
SC(old_space_bytes_used, V8.MemoryOldSpaceBytesUsed) \
SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable) \
SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted) \
SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed) \
SC(map_space_bytes_available, V8.MemoryMapSpaceBytesAvailable) \
SC(map_space_bytes_committed, V8.MemoryMapSpaceBytesCommitted) \
SC(map_space_bytes_used, V8.MemoryMapSpaceBytesUsed) \
SC(lo_space_bytes_available, V8.MemoryLoSpaceBytesAvailable) \
SC(lo_space_bytes_committed, V8.MemoryLoSpaceBytesCommitted) \
SC(lo_space_bytes_used, V8.MemoryLoSpaceBytesUsed) \
/* Total code size (including metadata) of baseline code or bytecode. */ \
SC(total_baseline_code_size, V8.TotalBaselineCodeSize) \
/* Total count of functions compiled using the baseline compiler. */ \
SC(total_baseline_compile_count, V8.TotalBaselineCompileCount)
#define STATS_COUNTER_TS_LIST(SC) \
SC(wasm_generated_code_size, V8.WasmGeneratedCodeBytes) \
SC(wasm_reloc_size, V8.WasmRelocBytes) \
SC(wasm_lazily_compiled_functions, V8.WasmLazilyCompiledFunctions) \
SC(liftoff_compiled_functions, V8.LiftoffCompiledFunctions) \
SC(liftoff_unsupported_functions, V8.LiftoffUnsupportedFunctions)
// This file contains all the v8 counters that are in use.
class Counters : public std::enable_shared_from_this<Counters> {
public:
......@@ -1254,7 +1562,6 @@ class Counters : public std::enable_shared_from_this<Counters> {
StatsCounter* name() { return &name##_; }
STATS_COUNTER_LIST_1(SC)
STATS_COUNTER_LIST_2(SC)
STATS_COUNTER_NATIVE_CODE_LIST(SC)
#undef SC
#define SC(name, caption) \
......@@ -1281,7 +1588,6 @@ class Counters : public std::enable_shared_from_this<Counters> {
STATS_COUNTER_LIST_1(COUNTER_ID)
STATS_COUNTER_LIST_2(COUNTER_ID)
STATS_COUNTER_TS_LIST(COUNTER_ID)
STATS_COUNTER_NATIVE_CODE_LIST(COUNTER_ID)
#undef COUNTER_ID
#define COUNTER_ID(name) kCountOf##name, kSizeOf##name,
INSTANCE_TYPE_LIST(COUNTER_ID)
......@@ -1358,7 +1664,6 @@ class Counters : public std::enable_shared_from_this<Counters> {
StatsCounter name##_;
STATS_COUNTER_LIST_1(SC)
STATS_COUNTER_LIST_2(SC)
STATS_COUNTER_NATIVE_CODE_LIST(SC)
#undef SC
#define SC(name, caption) StatsCounterThreadSafe name##_;
......
......@@ -9,7 +9,6 @@
#include <algorithm>
#include <fstream>
#include <iomanip>
#include <unordered_map>
#include <utility>
#include <vector>
......@@ -1979,6 +1978,16 @@ Local<Context> Shell::CreateEvaluationContext(Isolate* isolate) {
return handle_scope.Escape(context);
}
struct CounterAndKey {
Counter* counter;
const char* key;
};
inline bool operator<(const CounterAndKey& lhs, const CounterAndKey& rhs) {
return strcmp(lhs.key, rhs.key) < 0;
}
void Shell::WriteIgnitionDispatchCountersFile(v8::Isolate* isolate) {
HandleScope handle_scope(isolate);
Local<Context> context = Context::New(isolate);
......@@ -2092,52 +2101,54 @@ void Shell::OnExit(v8::Isolate* isolate) {
isolate->Dispose();
if (i::FLAG_dump_counters || i::FLAG_dump_counters_nvp) {
std::vector<std::pair<std::string, Counter*>> counters(
counter_map_->begin(), counter_map_->end());
std::sort(counters.begin(), counters.end());
const int number_of_counters = static_cast<int>(counter_map_->size());
CounterAndKey* counters = new CounterAndKey[number_of_counters];
int j = 0;
for (auto map_entry : *counter_map_) {
counters[j].counter = map_entry.second;
counters[j].key = map_entry.first;
j++;
}
std::sort(counters, counters + number_of_counters);
if (i::FLAG_dump_counters_nvp) {
// Dump counters as name-value pairs.
for (auto pair : counters) {
std::string key = pair.first;
Counter* counter = pair.second;
for (j = 0; j < number_of_counters; j++) {
Counter* counter = counters[j].counter;
const char* key = counters[j].key;
if (counter->is_histogram()) {
std::cout << "\"c:" << key << "\"=" << counter->count() << "\n";
std::cout << "\"t:" << key << "\"=" << counter->sample_total()
<< "\n";
printf("\"c:%s\"=%i\n", key, counter->count());
printf("\"t:%s\"=%i\n", key, counter->sample_total());
} else {
std::cout << "\"" << key << "\"=" << counter->count() << "\n";
printf("\"%s\"=%i\n", key, counter->count());
}
}
} else {
// Dump counters in formatted boxes.
constexpr int kNameBoxSize = 64;
constexpr int kValueBoxSize = 13;
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
std::cout << "| Name" << std::string(kNameBoxSize - 5, ' ') << "| Value"
<< std::string(kValueBoxSize - 6, ' ') << "|\n";
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
for (auto pair : counters) {
std::string key = pair.first;
Counter* counter = pair.second;
printf(
"+----------------------------------------------------------------+"
"-------------+\n");
printf(
"| Name |"
" Value |\n");
printf(
"+----------------------------------------------------------------+"
"-------------+\n");
for (j = 0; j < number_of_counters; j++) {
Counter* counter = counters[j].counter;
const char* key = counters[j].key;
if (counter->is_histogram()) {
std::cout << "| c:" << std::setw(kNameBoxSize - 4) << std::left << key
<< " | " << std::setw(kValueBoxSize - 2) << std::right
<< counter->count() << " |\n";
std::cout << "| t:" << std::setw(kNameBoxSize - 4) << std::left << key
<< " | " << std::setw(kValueBoxSize - 2) << std::right
<< counter->sample_total() << " |\n";
printf("| c:%-60s | %11i |\n", key, counter->count());
printf("| t:%-60s | %11i |\n", key, counter->sample_total());
} else {
std::cout << "| " << std::setw(kNameBoxSize - 2) << std::left << key
<< " | " << std::setw(kValueBoxSize - 2) << std::right
<< counter->count() << " |\n";
printf("| %-62s | %11i |\n", key, counter->count());
}
}
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
printf(
"+----------------------------------------------------------------+"
"-------------+\n");
}
delete [] counters;
}
delete counters_file_;
......
......@@ -59,7 +59,21 @@ class CounterCollection {
Counter counters_[kMaxCounters];
};
typedef std::unordered_map<std::string, Counter*> CounterMap;
struct CStringHasher {
std::size_t operator()(const char* name) const {
size_t h = 0;
size_t c;
while ((c = *name++) != 0) {
h += h << 5;
h += c;
}
return h;
}
};
typedef std::unordered_map<const char*, Counter*, CStringHasher,
i::StringEquals>
CounterMap;
class SourceGroup {
public:
......
......@@ -76,15 +76,14 @@ void StatisticsExtension::GetCounters(
v8::internal::StatsCounter* counter;
const char* name;
};
// clang-format off
const StatisticsCounter counter_list[] = {
#define ADD_COUNTER(name, caption) {counters->name(), #name},
STATS_COUNTER_LIST_1(ADD_COUNTER)
STATS_COUNTER_LIST_2(ADD_COUNTER)
STATS_COUNTER_NATIVE_CODE_LIST(ADD_COUNTER)
#define ADD_COUNTER(name, caption) \
{ counters->name(), #name } \
,
STATS_COUNTER_LIST_1(ADD_COUNTER) STATS_COUNTER_LIST_2(ADD_COUNTER)
#undef ADD_COUNTER
}; // End counter_list array.
// clang-format on
for (size_t i = 0; i < arraysize(counter_list); i++) {
AddCounter(args.GetIsolate(), result, counter_list[i].counter,
......
......@@ -25,16 +25,14 @@ namespace internal {
#define ADD_ACCESSOR_INFO_NAME(_, __, AccessorName, ...) \
"Accessors::" #AccessorName "Getter",
#define ADD_ACCESSOR_SETTER_NAME(name) "Accessors::" #name,
#define ADD_STATS_COUNTER_NAME(name, ...) "StatsCounter::" #name,
// static
// clang-format off
const char* const
ExternalReferenceTable::ref_name_[ExternalReferenceTable::kSize] = {
// Special references:
"nullptr",
// External references:
EXTERNAL_REFERENCE_LIST(ADD_EXT_REF_NAME)
EXTERNAL_REFERENCE_LIST_WITH_ISOLATE(ADD_EXT_REF_NAME)
EXTERNAL_REFERENCE_LIST_WITH_ISOLATE(ADD_EXT_REF_NAME)
// Builtins:
BUILTIN_LIST_C(ADD_BUILTIN_NAME)
// Runtime functions:
......@@ -43,7 +41,7 @@ const char* const
FOR_EACH_ISOLATE_ADDRESS_NAME(ADD_ISOLATE_ADDR)
// Accessors:
ACCESSOR_INFO_LIST_GENERATOR(ADD_ACCESSOR_INFO_NAME, /* not used */)
ACCESSOR_SETTER_LIST(ADD_ACCESSOR_SETTER_NAME)
ACCESSOR_SETTER_LIST(ADD_ACCESSOR_SETTER_NAME)
// Stub cache:
"Load StubCache::primary_->key",
"Load StubCache::primary_->value",
......@@ -57,17 +55,13 @@ const char* const
"Store StubCache::secondary_->key",
"Store StubCache::secondary_->value",
"Store StubCache::secondary_->map",
// Native code counters:
STATS_COUNTER_NATIVE_CODE_LIST(ADD_STATS_COUNTER_NAME)
};
// clang-format on
#undef ADD_EXT_REF_NAME
#undef ADD_BUILTIN_NAME
#undef ADD_RUNTIME_FUNCTION
#undef ADD_ISOLATE_ADDR
#undef ADD_ACCESSOR_INFO_NAME
#undef ADD_ACCESSOR_SETTER_NAME
#undef ADD_STATS_COUNTER_NAME
// Forward declarations for C++ builtins.
#define FORWARD_DECLARE(Name) \
......@@ -86,8 +80,8 @@ void ExternalReferenceTable::Init(Isolate* isolate) {
AddIsolateAddresses(isolate, &index);
AddAccessors(&index);
AddStubCache(isolate, &index);
AddNativeCodeStatsCounters(isolate, &index);
is_initialized_ = static_cast<uint32_t>(true);
USE(unused_padding_);
CHECK_EQ(kSize, index);
}
......@@ -237,34 +231,6 @@ void ExternalReferenceTable::AddStubCache(Isolate* isolate, int* index) {
kIsolateAddressReferenceCount + kAccessorReferenceCount +
kStubCacheReferenceCount,
*index);
}
Address ExternalReferenceTable::GetStatsCounterAddress(StatsCounter* counter) {
int* address = counter->Enabled()
? counter->GetInternalPointer()
: reinterpret_cast<int*>(&dummy_stats_counter_);
return reinterpret_cast<Address>(address);
}
void ExternalReferenceTable::AddNativeCodeStatsCounters(Isolate* isolate,
int* index) {
CHECK_EQ(kSpecialReferenceCount + kExternalReferenceCount +
kBuiltinsReferenceCount + kRuntimeReferenceCount +
kIsolateAddressReferenceCount + kAccessorReferenceCount +
kStubCacheReferenceCount,
*index);
Counters* counters = isolate->counters();
#define SC(name, caption) Add(GetStatsCounterAddress(counters->name()), index);
STATS_COUNTER_NATIVE_CODE_LIST(SC)
#undef SC
CHECK_EQ(kSpecialReferenceCount + kExternalReferenceCount +
kBuiltinsReferenceCount + kRuntimeReferenceCount +
kIsolateAddressReferenceCount + kAccessorReferenceCount +
kStubCacheReferenceCount + kStatsCountersReferenceCount,
*index);
CHECK_EQ(kSize, *index);
}
......
......@@ -9,7 +9,6 @@
#include "src/accessors.h"
#include "src/builtins/builtins.h"
#include "src/counters-definitions.h"
#include "src/external-reference.h"
namespace v8 {
......@@ -38,15 +37,11 @@ class ExternalReferenceTable {
Accessors::kAccessorInfoCount + Accessors::kAccessorSetterCount;
// The number of stub cache external references, see AddStubCache.
static constexpr int kStubCacheReferenceCount = 12;
static constexpr int kStatsCountersReferenceCount =
#define SC(...) +1
STATS_COUNTER_NATIVE_CODE_LIST(SC);
#undef SC
static constexpr int kSize =
kSpecialReferenceCount + kExternalReferenceCount +
kBuiltinsReferenceCount + kRuntimeReferenceCount +
kIsolateAddressReferenceCount + kAccessorReferenceCount +
kStubCacheReferenceCount + kStatsCountersReferenceCount;
kStubCacheReferenceCount;
static constexpr uint32_t kEntrySize =
static_cast<uint32_t>(kSystemPointerSize);
static constexpr uint32_t kSizeInBytes = kSize * kEntrySize + 2 * kUInt32Size;
......@@ -83,22 +78,12 @@ class ExternalReferenceTable {
void AddAccessors(int* index);
void AddStubCache(Isolate* isolate, int* index);
Address GetStatsCounterAddress(StatsCounter* counter);
void AddNativeCodeStatsCounters(Isolate* isolate, int* index);
STATIC_ASSERT(sizeof(Address) == kEntrySize);
Address ref_addr_[kSize];
static const char* const ref_name_[kSize];
// Not bool to guarantee deterministic size.
uint32_t is_initialized_ = 0;
// Redirect disabled stats counters to this field. This is done to make sure
// we can have a snapshot that includes native counters even when the embedder
// isn't collecting them.
// This field is uint32_t since the MacroAssembler and CodeStubAssembler
// accesses this field as a uint32_t.
uint32_t dummy_stats_counter_ = 0;
uint32_t is_initialized_ = 0; // Not bool to guarantee deterministic size.
uint32_t unused_padding_ = 0; // For alignment.
DISALLOW_COPY_AND_ASSIGN(ExternalReferenceTable);
};
......
......@@ -1047,7 +1047,8 @@ DEFINE_BOOL_READONLY(fast_map_update, false,
DEFINE_INT(max_polymorphic_map_count, 4,
"maximum number of maps to track in POLYMORPHIC state")
DEFINE_BOOL(native_code_counters, DEBUG_BOOL,
// macro-assembler-ia32.cc
DEFINE_BOOL(native_code_counters, false,
"generate extra code for manipulating stats counters")
// objects.cc
......
......@@ -5025,9 +5025,6 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
Register scratch1, Register scratch2) {
DCHECK_GT(value, 0);
if (FLAG_native_code_counters && counter->Enabled()) {
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
li(scratch2, ExternalReference::Create(counter));
Lw(scratch1, MemOperand(scratch2));
Addu(scratch1, scratch1, Operand(value));
......@@ -5040,9 +5037,6 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
Register scratch1, Register scratch2) {
DCHECK_GT(value, 0);
if (FLAG_native_code_counters && counter->Enabled()) {
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
li(scratch2, ExternalReference::Create(counter));
Lw(scratch1, MemOperand(scratch2));
Subu(scratch1, scratch1, Operand(value));
......
......@@ -1714,9 +1714,6 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
Register scratch1, Register scratch2) {
DCHECK_GT(value, 0);
if (FLAG_native_code_counters && counter->Enabled()) {
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
Move(scratch2, ExternalReference::Create(counter));
lwz(scratch1, MemOperand(scratch2));
addi(scratch1, scratch1, Operand(value));
......@@ -1729,9 +1726,6 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
Register scratch1, Register scratch2) {
DCHECK_GT(value, 0);
if (FLAG_native_code_counters && counter->Enabled()) {
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
Move(scratch2, ExternalReference::Create(counter));
lwz(scratch1, MemOperand(scratch2));
subi(scratch1, scratch1, Operand(value));
......
......@@ -5,7 +5,6 @@
#include <errno.h>
#include <signal.h>
#include <stdio.h>
#include <iomanip>
#include "include/libplatform/libplatform.h"
#include "src/assembler-arch.h"
......@@ -247,9 +246,6 @@ void WriteEmbeddedFile(i::EmbeddedFileWriter* writer) {
}
} // namespace
typedef std::map<std::string, int> CounterMap;
CounterMap* counter_map_;
int main(int argc, char** argv) {
v8::base::EnsureConsoleOutput();
......@@ -291,20 +287,7 @@ int main(int argc, char** argv) {
i::DisableEmbeddedBlobRefcounting();
v8::StartupData blob;
{
counter_map_ = new CounterMap();
v8::Isolate* isolate = v8::Isolate::Allocate();
// If --native-code-counters is on then we enable all counters to make
// sure we generate code to increment them from the snapshot.
if (i::FLAG_native_code_counters || i::FLAG_dump_counters ||
i::FLAG_dump_counters_nvp) {
isolate->SetCounterFunction([](const char* name) -> int* {
auto map_entry = counter_map_->find(name);
if (map_entry == counter_map_->end()) {
counter_map_->emplace(name, 0);
}
return &counter_map_->at(name);
});
}
if (i::FLAG_embedded_builtins) {
// Set code range such that relative jumps for builtins to
// builtin calls in the snapshot are possible.
......@@ -324,37 +307,6 @@ int main(int argc, char** argv) {
WriteEmbeddedFile(&embedded_writer);
}
blob = CreateSnapshotDataBlob(&snapshot_creator, embed_script.get());
if (i::FLAG_dump_counters || i::FLAG_dump_counters_nvp) {
if (i::FLAG_dump_counters_nvp) {
// Dump counters as name-value pairs.
for (auto entry : *counter_map_) {
std::string key = entry.first;
int counter = entry.second;
std::cout << "\"" << key << "\"=" << counter << "\n";
}
} else {
// Dump counters in formatted boxes.
constexpr int kNameBoxSize = 64;
constexpr int kValueBoxSize = 13;
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
std::cout << "| Name" << std::string(kNameBoxSize - 5, ' ')
<< "| Value" << std::string(kValueBoxSize - 6, ' ')
<< "|\n";
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
for (auto entry : *counter_map_) {
std::string key = entry.first;
int counter = entry.second;
std::cout << "| " << std::setw(kNameBoxSize - 2) << std::left << key
<< " | " << std::setw(kValueBoxSize - 2) << std::right
<< counter << " |\n";
}
std::cout << "+" << std::string(kNameBoxSize, '-') << "+"
<< std::string(kValueBoxSize, '-') << "+\n";
}
}
delete counter_map_;
}
if (warmup_script) {
......
......@@ -2119,9 +2119,6 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
if (FLAG_native_code_counters && counter->Enabled()) {
Operand counter_operand =
ExternalReferenceAsOperand(ExternalReference::Create(counter));
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
if (value == 1) {
incl(counter_operand);
} else {
......@@ -2136,9 +2133,6 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
if (FLAG_native_code_counters && counter->Enabled()) {
Operand counter_operand =
ExternalReferenceAsOperand(ExternalReference::Create(counter));
// This operation has to be exactly 32-bit wide in case the external
// reference table redirects the counter to a uint32_t dummy_stats_counter_
// field.
if (value == 1) {
decl(counter_operand);
} else {
......
......@@ -148,38 +148,6 @@ class NativeTimeScope {
}
};
class SnapshotNativeCounterTest : public TestWithNativeContextAndCounters {
public:
SnapshotNativeCounterTest() {}
bool SupportsNativeCounters() const {
#ifdef V8_USE_SNAPSHOT
#ifdef V8_SNAPSHOT_NATIVE_CODE_COUNTERS
return true;
#else
return false;
#endif // V8_SNAPSHOT_NATIVE_CODE_COUNTERS
#else
// If we do not have a snapshot then we rely on the runtime option.
return internal::FLAG_native_code_counters;
#endif // V8_USE_SNAPSHOT
}
#define SC(name, caption) \
int name() { \
CHECK(isolate()->counters()->name()->Enabled()); \
return *isolate()->counters()->name()->GetInternalPointer(); \
}
STATS_COUNTER_NATIVE_CODE_LIST(SC)
#undef SC
void PrintAll() {
#define SC(name, caption) PrintF(#caption " = %d\n", name());
STATS_COUNTER_NATIVE_CODE_LIST(SC)
#undef SC
}
};
} // namespace
......@@ -797,29 +765,5 @@ TEST_F(RuntimeCallStatsTest, ApiGetter) {
PrintStats();
}
TEST_F(SnapshotNativeCounterTest, StringAddNative) {
RunJS("let s = 'hello, ' + 'world!'");
if (SupportsNativeCounters()) {
EXPECT_NE(0, string_add_native());
} else {
EXPECT_EQ(0, string_add_native());
}
PrintAll();
}
TEST_F(SnapshotNativeCounterTest, SubStringNative) {
RunJS("'hello, world!'.substring(6);");
if (SupportsNativeCounters()) {
EXPECT_NE(0, sub_string_native());
} else {
EXPECT_EQ(0, sub_string_native());
}
PrintAll();
}
} // namespace internal
} // namespace v8
......@@ -15,13 +15,11 @@
namespace v8 {
IsolateWrapper::IsolateWrapper(CounterLookupCallback counter_lookup_callback,
bool enforce_pointer_compression)
IsolateWrapper::IsolateWrapper(bool enforce_pointer_compression)
: array_buffer_allocator_(
v8::ArrayBuffer::Allocator::NewDefaultAllocator()) {
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = array_buffer_allocator_;
create_params.counter_lookup_callback = counter_lookup_callback;
if (enforce_pointer_compression) {
isolate_ = reinterpret_cast<v8::Isolate*>(
i::Isolate::New(i::IsolateAllocationMode::kInV8Heap));
......@@ -43,22 +41,6 @@ IsolateWrapper::~IsolateWrapper() {
// static
v8::IsolateWrapper* SharedIsolateHolder::isolate_wrapper_ = nullptr;
// static
int* SharedIsolateAndCountersHolder::LookupCounter(const char* name) {
DCHECK_NOT_NULL(counter_map_);
auto map_entry = counter_map_->find(name);
if (map_entry == counter_map_->end()) {
counter_map_->emplace(name, 0);
}
return &counter_map_->at(name);
}
// static
v8::IsolateWrapper* SharedIsolateAndCountersHolder::isolate_wrapper_ = nullptr;
// static
CounterMap* SharedIsolateAndCountersHolder::counter_map_ = nullptr;
namespace internal {
SaveFlags::SaveFlags() {
......
......@@ -22,16 +22,13 @@ namespace v8 {
class ArrayBufferAllocator;
typedef std::map<std::string, int> CounterMap;
// RAII-like Isolate instance wrapper.
class IsolateWrapper final {
public:
// When enforce_pointer_compression is true the Isolate is created with
// enabled pointer compression. When it's false then the Isolate is created
// with the default pointer compression state for current build.
explicit IsolateWrapper(CounterLookupCallback counter_lookup_callback,
bool enforce_pointer_compression = false);
explicit IsolateWrapper(bool enforce_pointer_compression = false);
~IsolateWrapper();
v8::Isolate* isolate() const { return isolate_; }
......@@ -49,8 +46,7 @@ class SharedIsolateHolder final {
static void CreateIsolate() {
CHECK_NULL(isolate_wrapper_);
isolate_wrapper_ =
new IsolateWrapper([](const char* name) -> int* { return nullptr; });
isolate_wrapper_ = new IsolateWrapper();
}
static void DeleteIsolate() {
......@@ -65,34 +61,6 @@ class SharedIsolateHolder final {
DISALLOW_IMPLICIT_CONSTRUCTORS(SharedIsolateHolder);
};
class SharedIsolateAndCountersHolder final {
public:
static v8::Isolate* isolate() { return isolate_wrapper_->isolate(); }
static void CreateIsolate() {
CHECK_NULL(counter_map_);
CHECK_NULL(isolate_wrapper_);
counter_map_ = new CounterMap();
isolate_wrapper_ = new IsolateWrapper(LookupCounter);
}
static void DeleteIsolate() {
CHECK_NOT_NULL(counter_map_);
CHECK_NOT_NULL(isolate_wrapper_);
delete isolate_wrapper_;
isolate_wrapper_ = nullptr;
delete counter_map_;
counter_map_ = nullptr;
}
private:
static int* LookupCounter(const char* name);
static CounterMap* counter_map_;
static v8::IsolateWrapper* isolate_wrapper_;
DISALLOW_IMPLICIT_CONSTRUCTORS(SharedIsolateAndCountersHolder);
};
//
// A set of mixins from which the test fixtures will be constructed.
//
......@@ -100,8 +68,7 @@ template <typename TMixin>
class WithPrivateIsolateMixin : public TMixin {
public:
explicit WithPrivateIsolateMixin(bool enforce_pointer_compression = false)
: isolate_wrapper_([](const char* name) -> int* { return nullptr; },
enforce_pointer_compression) {}
: isolate_wrapper_(enforce_pointer_compression) {}
v8::Isolate* v8_isolate() const { return isolate_wrapper_.isolate(); }
......@@ -114,20 +81,20 @@ class WithPrivateIsolateMixin : public TMixin {
DISALLOW_COPY_AND_ASSIGN(WithPrivateIsolateMixin);
};
template <typename TMixin, typename TSharedIsolateHolder = SharedIsolateHolder>
template <typename TMixin>
class WithSharedIsolateMixin : public TMixin {
public:
WithSharedIsolateMixin() = default;
v8::Isolate* v8_isolate() const { return TSharedIsolateHolder::isolate(); }
v8::Isolate* v8_isolate() const { return SharedIsolateHolder::isolate(); }
static void SetUpTestCase() {
TMixin::SetUpTestCase();
TSharedIsolateHolder::CreateIsolate();
SharedIsolateHolder::CreateIsolate();
}
static void TearDownTestCase() {
TSharedIsolateHolder::DeleteIsolate();
SharedIsolateHolder::DeleteIsolate();
TMixin::TearDownTestCase();
}
......@@ -328,14 +295,6 @@ using TestWithNativeContext = //
WithSharedIsolateMixin< //
::testing::Test>>>>;
using TestWithNativeContextAndCounters = //
WithInternalIsolateMixin< //
WithContextMixin< //
WithIsolateScopeMixin< //
WithSharedIsolateMixin< //
::testing::Test, //
SharedIsolateAndCountersHolder>>>>;
using TestWithNativeContextAndZone = //
WithZoneMixin< //
WithInternalIsolateMixin< //
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment