Commit 627b8781 authored by Igor Sheludko's avatar Igor Sheludko Committed by Commit Bot

[zone-stats] Implement collecting per-object-type zone stats

... behind --trace-zone-type-stats flag.

Per-object-type statistics requires the following GN args:
  v8_enable_precise_zone_stats = true
  use_rtti = true

When precise zone stats is enabled, the used zone memory value is
calculated more precisely, in particular it takes into account
the state of the active segment. By default, the used memory in
the active segment is not taken into account because of performance
overhead.

Bug: v8:10572
Change-Id: I938d9e264cfe6a8b63a89db87d187d8e2be63c8b
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2281006
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/master@{#68972}
parent f181dff3
......@@ -253,6 +253,10 @@ declare_args() {
# Experimental support for native context independent code.
# https://crbug.com/v8/8888
v8_enable_nci_code = false
# Experimental feature for collecting per-class zone memory stats.
# Requires use_rtti = true
v8_enable_precise_zone_stats = false
}
# Derived defaults.
......@@ -604,6 +608,9 @@ config("features") {
if (v8_enable_nci_code) {
defines += [ "V8_ENABLE_NCI_CODE" ]
}
if (v8_enable_precise_zone_stats) {
defines += [ "V8_ENABLE_PRECISE_ZONE_STATS" ]
}
if (v8_fuzzilli) {
defines += [ "V8_FUZZILLI" ]
}
......@@ -3253,6 +3260,8 @@ v8_source_set("v8_base_without_compiler") {
"src/wasm/wasm-value.h",
"src/zone/accounting-allocator.cc",
"src/zone/accounting-allocator.h",
"src/zone/type-stats.cc",
"src/zone/type-stats.h",
"src/zone/zone-allocator.h",
"src/zone/zone-chunk-list.h",
"src/zone/zone-containers.h",
......
......@@ -89,6 +89,7 @@
#include "src/wasm/wasm-engine.h"
#include "src/wasm/wasm-objects.h"
#include "src/zone/accounting-allocator.h"
#include "src/zone/type-stats.h"
#ifdef V8_INTL_SUPPORT
#include "unicode/uobject.h"
#endif // V8_INTL_SUPPORT
......@@ -2669,6 +2670,7 @@ void Isolate::ThreadDataTable::RemoveAllThreads() {
class TracingAccountingAllocator : public AccountingAllocator {
public:
explicit TracingAccountingAllocator(Isolate* isolate) : isolate_(isolate) {}
~TracingAccountingAllocator() = default;
protected:
void TraceAllocateSegmentImpl(v8::internal::Segment* segment) override {
......@@ -2684,13 +2686,32 @@ class TracingAccountingAllocator : public AccountingAllocator {
void TraceZoneDestructionImpl(const Zone* zone) override {
base::MutexGuard lock(&mutex_);
#ifdef V8_ENABLE_PRECISE_ZONE_STATS
if (FLAG_trace_zone_type_stats) {
type_stats_.MergeWith(zone->type_stats());
}
#endif
UpdateMemoryTrafficAndReportMemoryUsage(zone->segment_bytes_allocated());
active_zones_.erase(zone);
nesting_depth_--;
#ifdef V8_ENABLE_PRECISE_ZONE_STATS
if (FLAG_trace_zone_type_stats && active_zones_.empty()) {
type_stats_.Dump();
}
#endif
}
private:
void UpdateMemoryTrafficAndReportMemoryUsage(size_t memory_traffic_delta) {
if (!FLAG_trace_zone_stats &&
!(TracingFlags::zone_stats.load(std::memory_order_relaxed) &
v8::tracing::TracingCategoryObserver::ENABLED_BY_TRACING)) {
// Don't print anything if the zone tracing was enabled only because of
// FLAG_trace_zone_type_stats.
return;
}
memory_traffic_since_last_report_ += memory_traffic_delta;
if (memory_traffic_since_last_report_ < FLAG_zone_stats_tolerance) return;
memory_traffic_since_last_report_ = 0;
......@@ -2767,6 +2788,9 @@ class TracingAccountingAllocator : public AccountingAllocator {
base::Mutex mutex_;
std::unordered_set<const Zone*> active_zones_;
#ifdef V8_ENABLE_PRECISE_ZONE_STATS
TypeStats type_stats_;
#endif
std::ostringstream buffer_;
// This value is increased on both allocations and deallocations.
size_t memory_traffic_since_last_report_ = 0;
......
......@@ -993,6 +993,11 @@ DEFINE_GENERIC_IMPLICATION(
DEFINE_SIZE_T(
zone_stats_tolerance, 1 * MB,
"report a tick only when allocated zone memory changes by this amount")
DEFINE_BOOL(trace_zone_type_stats, false, "trace per-type zone memory usage")
DEFINE_GENERIC_IMPLICATION(
trace_zone_type_stats,
TracingFlags::zone_stats.store(
v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE))
DEFINE_BOOL(track_retaining_path, false,
"enable support for tracking retaining path")
DEFINE_DEBUG_BOOL(trace_backing_store, false, "trace backing store events")
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifdef V8_ENABLE_PRECISE_ZONE_STATS
#if defined(__clang__) || defined(__GLIBCXX__)
#include <cxxabi.h>
#endif // __GLIBCXX__
#include <cinttypes>
#include <cstdio>
#include "src/utils/utils.h"
#include "src/zone/type-stats.h"
namespace v8 {
namespace internal {
void TypeStats::MergeWith(const TypeStats& other) {
for (auto const& item : other.map_) {
Add(item.first, item.second);
}
}
class Demangler {
public:
Demangler() = default;
~Demangler() {
if (buffer_) free(buffer_);
USE(buffer_len_); // In case demangling is not supported.
}
const char* demangle(std::type_index type_id) {
#if defined(__clang__) || defined(__GLIBCXX__)
int status = -1;
char* result =
abi::__cxa_demangle(type_id.name(), buffer_, &buffer_len_, &status);
if (status == 0) {
// Upon success, the buffer_ may be reallocated.
buffer_ = result;
return buffer_;
}
#endif
return type_id.name();
}
private:
char* buffer_ = nullptr;
size_t buffer_len_ = 0;
};
void TypeStats::Dump() const {
Demangler d;
PrintF("===== TypeStats =====\n");
PrintF("-------------+--------------+------------+--------+--------------\n");
PrintF(" alloc | dealloc | count | sizeof | name\n");
PrintF("-------------+--------------+------------+--------+--------------\n");
uint64_t total_allocation_count = 0;
uint64_t total_allocated_bytes = 0;
uint64_t total_deallocated_bytes = 0;
for (auto const& item : map_) {
const StatsEntry& entry = item.second;
total_allocation_count += entry.allocation_count;
total_allocated_bytes += entry.allocated_bytes;
PrintF("%12zu | %12zu | %10zu | %6zu | %s\n", entry.allocated_bytes,
entry.deallocated_bytes, entry.allocation_count, entry.instance_size,
d.demangle(item.first));
}
PrintF("%12" PRIu64 " | %12" PRIu64 " | %10" PRIu64
" | ===== TOTAL STATS =====\n",
total_allocated_bytes, total_deallocated_bytes,
total_allocation_count);
}
} // namespace internal
} // namespace v8
#endif // V8_ENABLE_PRECISE_ZONE_STATS
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_ZONE_TYPE_STATS_H_
#define V8_ZONE_TYPE_STATS_H_
#include <iosfwd>
#include <type_traits>
#include <typeindex>
#include <unordered_map>
#include "src/common/globals.h"
namespace v8 {
namespace internal {
class TypeStats;
#ifdef V8_ENABLE_PRECISE_ZONE_STATS
class TypeStats {
public:
TypeStats() = default;
template <typename TypeTag>
void AddAllocated(size_t bytes) {
StatsEntry& entry = map_[std::type_index(typeid(TypeTag))];
entry.allocation_count++;
entry.allocated_bytes += bytes;
// sizeof(IncompleteType) is not allowed so record size as a sizeof(char).
constexpr bool kIsIncomplete =
std::is_same<TypeTag, void>::value || std::is_array<TypeTag>::value;
using TypeTagForSizeof =
typename std::conditional<kIsIncomplete, char, TypeTag>::type;
entry.instance_size = sizeof(TypeTagForSizeof);
}
template <typename TypeTag>
void AddDeallocated(size_t bytes) {
StatsEntry& entry = map_[std::type_index(typeid(TypeTag))];
entry.deallocated_bytes += bytes;
}
// Merges other stats into this stats object.
void MergeWith(const TypeStats& other);
// Prints recorded statisticts to stdout.
void Dump() const;
private:
struct StatsEntry {
size_t allocation_count = 0;
size_t allocated_bytes = 0;
size_t deallocated_bytes = 0;
size_t instance_size = 0;
};
void Add(std::type_index type_id, const StatsEntry& other_entry) {
StatsEntry& entry = map_[type_id];
entry.allocation_count += other_entry.allocation_count;
entry.allocated_bytes += other_entry.allocated_bytes;
entry.deallocated_bytes += other_entry.deallocated_bytes;
entry.instance_size = other_entry.instance_size;
}
using HashMap = std::unordered_map<std::type_index, StatsEntry>;
HashMap map_;
};
#endif // V8_ENABLE_PRECISE_ZONE_STATS
} // namespace internal
} // namespace v8
#endif // V8_ZONE_TYPE_STATS_H_
......@@ -5,10 +5,12 @@
#include "src/zone/zone.h"
#include <cstring>
#include <memory>
#include "src/init/v8.h"
#include "src/sanitizer/asan.h"
#include "src/utils/utils.h"
#include "src/zone/type-stats.h"
namespace v8 {
namespace internal {
......@@ -28,14 +30,7 @@ constexpr size_t kASanRedzoneBytes = 0;
} // namespace
Zone::Zone(AccountingAllocator* allocator, const char* name)
: allocation_size_(0),
segment_bytes_allocated_(0),
position_(0),
limit_(0),
allocator_(allocator),
segment_head_(nullptr),
name_(name),
sealed_(false) {
: allocator_(allocator), name_(name) {
allocator_->TraceZoneCreation(this);
}
......@@ -104,6 +99,9 @@ void Zone::DeleteAll() {
position_ = limit_ = 0;
allocation_size_ = 0;
#ifdef V8_ENABLE_PRECISE_ZONE_STATS
allocation_size_for_tracing_ = 0;
#endif
}
Address Zone::NewExpand(size_t size) {
......
......@@ -10,6 +10,7 @@
#include "src/base/logging.h"
#include "src/common/globals.h"
#include "src/zone/accounting-allocator.h"
#include "src/zone/type-stats.h"
#include "src/zone/zone-segment.h"
#ifndef ZONE_NAME
......@@ -42,13 +43,21 @@ class V8_EXPORT_PRIVATE Zone final {
// Allocate 'size' bytes of uninitialized memory in the Zone; expands the Zone
// by allocating new segments of memory on demand using AccountingAllocator
// (see AccountingAllocator::AllocateSegment()).
// TODO(v8:10689): account allocated bytes with the provided TypeTag type.
//
// When V8_ENABLE_PRECISE_ZONE_STATS is defined, the allocated bytes are
// associated with the provided TypeTag type.
template <typename TypeTag>
void* Allocate(size_t size) {
#ifdef V8_USE_ADDRESS_SANITIZER
return AsanNew(size);
#else
size = RoundUp(size, kAlignmentInBytes);
#ifdef V8_ENABLE_PRECISE_ZONE_STATS
if (V8_UNLIKELY(TracingFlags::is_zone_stats_enabled())) {
type_stats_.AddAllocated<TypeTag>(size);
}
allocation_size_for_tracing_ += size;
#endif
Address result = position_;
if (V8_UNLIKELY(size > limit_ - position_)) {
result = NewExpand(size);
......@@ -59,13 +68,24 @@ class V8_EXPORT_PRIVATE Zone final {
#endif
}
// Return 'size' bytes of memory back to Zone. These bytes can be reused
// for following allocations.
//
// When V8_ENABLE_PRECISE_ZONE_STATS is defined, the deallocated bytes are
// associated with the provided TypeTag type.
template <typename TypeTag = void>
void Delete(void* pointer, size_t size) {
DCHECK_NOT_NULL(pointer);
DCHECK_NE(size, 0);
// TODO(v8:10572): implement accounting for reusable zone memory
#ifdef DEBUG
size = RoundUp(size, kAlignmentInBytes);
#ifdef V8_ENABLE_PRECISE_ZONE_STATS
if (V8_UNLIKELY(TracingFlags::is_zone_stats_enabled())) {
type_stats_.AddDeallocated<TypeTag>(size);
}
#endif
#ifdef DEBUG
static const unsigned char kZapDeadByte = 0xcd;
memset(pointer, kZapDeadByte, size);
#endif
......@@ -73,7 +93,9 @@ class V8_EXPORT_PRIVATE Zone final {
// Allocates memory for T instance and constructs object by calling respective
// Args... constructor.
// TODO(v8:10689): account allocated bytes with the T type.
//
// When V8_ENABLE_PRECISE_ZONE_STATS is defined, the allocated bytes are
// associated with the T type.
template <typename T, typename... Args>
T* New(Args&&... args) {
size_t size = RoundUp(sizeof(T), kAlignmentInBytes);
......@@ -82,15 +104,22 @@ class V8_EXPORT_PRIVATE Zone final {
}
// Allocates uninitialized memory for 'length' number of T instances.
// TODO(v8:10689): account allocated bytes with the provided TypeTag type.
// It might be useful to tag buffer allocations with meaningful names to make
// buffer allocation sites distinguishable between each other.
//
// When V8_ENABLE_PRECISE_ZONE_STATS is defined, the allocated bytes are
// associated with the provided TypeTag type. It might be useful to tag
// buffer allocations with meaningful names to make buffer allocation sites
// distinguishable between each other.
template <typename T, typename TypeTag = T[]>
T* NewArray(size_t length) {
DCHECK_LT(length, std::numeric_limits<size_t>::max() / sizeof(T));
return static_cast<T*>(Allocate<TypeTag>(length * sizeof(T)));
}
// Return array of 'length' elements back to Zone. These bytes can be reused
// for following allocations.
//
// When V8_ENABLE_PRECISE_ZONE_STATS is defined, the deallocated bytes are
// associated with the provided TypeTag type.
template <typename T, typename TypeTag = T[]>
void DeleteArray(T* pointer, size_t length) {
Delete<TypeTag>(pointer, length * sizeof(T));
......@@ -120,12 +149,23 @@ class V8_EXPORT_PRIVATE Zone final {
return allocation_size_ + extra;
}
// Returns used zone memory not including the head segment, can be called
// from threads not owning the zone.
size_t allocation_size_for_tracing() const { return allocation_size_; }
// When V8_ENABLE_PRECISE_ZONE_STATS is not enabled, returns used zone memory
// not including the head segment.
// Can be called from threads not owning the zone.
size_t allocation_size_for_tracing() const {
#ifdef V8_ENABLE_PRECISE_ZONE_STATS
return allocation_size_for_tracing_;
#else
return allocation_size_;
#endif
}
AccountingAllocator* allocator() const { return allocator_; }
#ifdef V8_ENABLE_PRECISE_ZONE_STATS
const TypeStats& type_stats() const { return type_stats_; }
#endif
private:
void* AsanNew(size_t size);
......@@ -145,12 +185,12 @@ class V8_EXPORT_PRIVATE Zone final {
static const size_t kExcessLimit = 256 * MB;
// The number of bytes allocated in this zone so far.
size_t allocation_size_;
size_t allocation_size_ = 0;
// The number of bytes allocated in segments. Note that this number
// includes memory allocated from the OS but not yet allocated from
// the zone.
size_t segment_bytes_allocated_;
size_t segment_bytes_allocated_ = 0;
// Expand the Zone to hold at least 'size' more bytes and allocate
// the bytes. Returns the address of the newly allocated chunk of
......@@ -161,14 +201,19 @@ class V8_EXPORT_PRIVATE Zone final {
// The free region in the current (front) segment is represented as
// the half-open interval [position, limit). The 'position' variable
// is guaranteed to be aligned as dictated by kAlignment.
Address position_;
Address limit_;
Address position_ = 0;
Address limit_ = 0;
AccountingAllocator* allocator_;
Segment* segment_head_;
Segment* segment_head_ = nullptr;
const char* name_;
bool sealed_;
bool sealed_ = false;
#ifdef V8_ENABLE_PRECISE_ZONE_STATS
TypeStats type_stats_;
size_t allocation_size_for_tracing_ = 0;
#endif
};
// ZoneObject is an abstraction that helps define classes of objects
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment