Commit e5b4229b authored by Alexei Filippov's avatar Alexei Filippov Committed by Commit Bot

[heap profiler] Provide detailed samples information in heap profiler

Make heap profiler provide information about each sample currently
alive. That information can be used to build diagrams of memory
allocations over time.

BUG=chromium:889545

Cq-Include-Trybots: luci.chromium.try:linux_chromium_rel_ng
Change-Id: Ibcbe2f5302263d0b3976ee4cd3601eff11375cae
Reviewed-on: https://chromium-review.googlesource.com/c/1285130
Commit-Queue: Alexei Filippov <alph@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#56793}
parent 4555961e
......@@ -605,6 +605,11 @@ class V8_EXPORT AllocationProfile {
*/
int column_number;
/**
* Unique id of the node.
*/
uint32_t node_id;
/**
* List of callees called from this node for which we have sampled
* allocations. The lifetime of the children is scoped to the containing
......@@ -618,12 +623,39 @@ class V8_EXPORT AllocationProfile {
std::vector<Allocation> allocations;
};
/**
* Represent a single sample recorded for an allocation.
*/
struct Sample {
/**
* id of the node in the profile tree.
*/
uint32_t node_id;
/**
* Size of the sampled allocation object.
*/
size_t size;
/**
* The number of objects of such size that were sampled.
*/
unsigned int count;
/**
* Unique time-ordered id of the allocation sample. Can be used to track
* what samples were added or removed between two snapshots.
*/
uint64_t sample_id;
};
/**
* Returns the root node of the call-graph. The root node corresponds to an
* empty JS call-stack. The lifetime of the returned Node* is scoped to the
* containing AllocationProfile.
*/
virtual Node* GetRootNode() = 0;
virtual const std::vector<Sample>& GetSamples() = 0;
virtual ~AllocationProfile() = default;
......
......@@ -10363,7 +10363,6 @@ AllocationProfile* HeapProfiler::GetAllocationProfile() {
return reinterpret_cast<i::HeapProfiler*>(this)->GetAllocationProfile();
}
void HeapProfiler::DeleteAllHeapSnapshots() {
reinterpret_cast<i::HeapProfiler*>(this)->DeleteAllSnapshots();
}
......
......@@ -44,7 +44,7 @@ intptr_t SamplingAllocationObserver::GetNextSampleInterval(uint64_t rate) {
// approximate the true number of allocations with size *size* given that
// *count* samples were observed.
v8::AllocationProfile::Allocation SamplingHeapProfiler::ScaleSample(
size_t size, unsigned int count) {
size_t size, unsigned int count) const {
double scale = 1.0 / (1.0 - std::exp(-static_cast<double>(size) / rate_));
// Round count instead of truncating.
return {size, static_cast<unsigned int>(count * scale + 0.5)};
......@@ -62,23 +62,21 @@ SamplingHeapProfiler::SamplingHeapProfiler(
heap_, static_cast<intptr_t>(rate), rate, this,
heap->isolate()->random_number_generator())),
names_(names),
profile_root_(nullptr, "(root)", v8::UnboundScript::kNoScriptId, 0),
profile_root_(nullptr, "(root)", v8::UnboundScript::kNoScriptId, 0,
next_node_id()),
stack_depth_(stack_depth),
rate_(rate),
flags_(flags) {
CHECK_GT(rate_, 0u);
heap_->AddAllocationObserversToAllSpaces(other_spaces_observer_.get(),
new_space_observer_.get());
}
SamplingHeapProfiler::~SamplingHeapProfiler() {
heap_->RemoveAllocationObserversFromAllSpaces(other_spaces_observer_.get(),
new_space_observer_.get());
}
void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) {
DisallowHeapAllocation no_allocation;
......@@ -88,14 +86,15 @@ void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) {
// Mark the new block as FreeSpace to make sure the heap is iterable while we
// are taking the sample.
heap()->CreateFillerObjectAt(soon_object, static_cast<int>(size),
ClearRecordedSlots::kNo);
heap_->CreateFillerObjectAt(soon_object, static_cast<int>(size),
ClearRecordedSlots::kNo);
Local<v8::Value> loc = v8::Utils::ToLocal(obj);
AllocationNode* node = AddStack();
node->allocations_[size]++;
auto sample = base::make_unique<Sample>(size, node, loc, this);
auto sample =
base::make_unique<Sample>(size, node, loc, this, next_sample_id());
sample->global.SetWeak(sample.get(), OnWeakCallback,
WeakCallbackType::kParameter);
#if __clang__
......@@ -132,19 +131,19 @@ void SamplingHeapProfiler::OnWeakCallback(
// sample is deleted because its unique ptr was erased from samples_.
}
SamplingHeapProfiler::AllocationNode*
SamplingHeapProfiler::AllocationNode::FindOrAddChildNode(const char* name,
int script_id,
int start_position) {
FunctionId id = function_id(script_id, start_position, name);
auto it = children_.find(id);
if (it != children_.end()) {
DCHECK_EQ(strcmp(it->second->name_, name), 0);
return it->second.get();
SamplingHeapProfiler::AllocationNode* SamplingHeapProfiler::FindOrAddChildNode(
AllocationNode* parent, const char* name, int script_id,
int start_position) {
AllocationNode::FunctionId id =
AllocationNode::function_id(script_id, start_position, name);
AllocationNode* child = parent->FindChildNode(id);
if (child) {
DCHECK_EQ(strcmp(child->name_, name), 0);
return child;
}
auto child =
base::make_unique<AllocationNode>(this, name, script_id, start_position);
return children_.emplace(id, std::move(child)).first->second.get();
auto new_child = base::make_unique<AllocationNode>(
parent, name, script_id, start_position, next_node_id());
return parent->AddChildNode(id, std::move(new_child));
}
SamplingHeapProfiler::AllocationNode* SamplingHeapProfiler::AddStack() {
......@@ -199,7 +198,7 @@ SamplingHeapProfiler::AllocationNode* SamplingHeapProfiler::AddStack() {
name = "(JS)";
break;
}
return node->FindOrAddChildNode(name, v8::UnboundScript::kNoScriptId, 0);
return FindOrAddChildNode(node, name, v8::UnboundScript::kNoScriptId, 0);
}
// We need to process the stack in reverse order as the top of the stack is
......@@ -212,12 +211,12 @@ SamplingHeapProfiler::AllocationNode* SamplingHeapProfiler::AddStack() {
Script* script = Script::cast(shared->script());
script_id = script->id();
}
node = node->FindOrAddChildNode(name, script_id, shared->StartPosition());
node = FindOrAddChildNode(node, name, script_id, shared->StartPosition());
}
if (found_arguments_marker_frames) {
node =
node->FindOrAddChildNode("(deopt)", v8::UnboundScript::kNoScriptId, 0);
FindOrAddChildNode(node, "(deopt)", v8::UnboundScript::kNoScriptId, 0);
}
return node;
......@@ -255,12 +254,12 @@ v8::AllocationProfile::Node* SamplingHeapProfiler::TranslateAllocationNode(
allocations.push_back(ScaleSample(alloc.first, alloc.second));
}
profile->nodes().push_back(v8::AllocationProfile::Node{
profile->nodes_.push_back(v8::AllocationProfile::Node{
ToApiHandle<v8::String>(
isolate_->factory()->InternalizeUtf8String(node->name_)),
script_name, node->script_id_, node->script_position_, line, column,
std::vector<v8::AllocationProfile::Node*>(), allocations});
v8::AllocationProfile::Node* current = &profile->nodes().back();
node->id_, std::vector<v8::AllocationProfile::Node*>(), allocations});
v8::AllocationProfile::Node* current = &profile->nodes_.back();
// The |children_| map may have nodes inserted into it during translation
// because the translation may allocate strings on the JS heap that have
// the potential to be sampled. That's ok since map iterators are not
......@@ -289,8 +288,23 @@ v8::AllocationProfile* SamplingHeapProfiler::GetAllocationProfile() {
}
auto profile = new v8::internal::AllocationProfile();
TranslateAllocationNode(profile, &profile_root_, scripts);
profile->samples_ = SamplingHeapProfiler::BuildSamples();
return profile;
}
const std::vector<v8::AllocationProfile::Sample>
SamplingHeapProfiler::BuildSamples() const {
std::vector<v8::AllocationProfile::Sample> samples;
samples.reserve(samples_.size());
for (const auto& it : samples_) {
const Sample* sample = it.second.get();
samples.emplace_back(v8::AllocationProfile::Sample{
sample->owner->id_, sample->size, ScaleSample(sample->size, 1).count,
sample->sample_id});
}
return samples;
}
} // namespace internal
} // namespace v8
......@@ -25,62 +25,48 @@ class SamplingAllocationObserver;
class AllocationProfile : public v8::AllocationProfile {
public:
AllocationProfile() : nodes_() {}
AllocationProfile() = default;
v8::AllocationProfile::Node* GetRootNode() override {
return nodes_.size() == 0 ? nullptr : &nodes_.front();
}
std::deque<v8::AllocationProfile::Node>& nodes() { return nodes_; }
const std::vector<v8::AllocationProfile::Sample>& GetSamples() override {
return samples_;
}
private:
std::deque<v8::AllocationProfile::Node> nodes_;
std::vector<v8::AllocationProfile::Sample> samples_;
friend class SamplingHeapProfiler;
DISALLOW_COPY_AND_ASSIGN(AllocationProfile);
};
class SamplingHeapProfiler {
public:
SamplingHeapProfiler(Heap* heap, StringsStorage* names, uint64_t rate,
int stack_depth, v8::HeapProfiler::SamplingFlags flags);
~SamplingHeapProfiler();
v8::AllocationProfile* GetAllocationProfile();
StringsStorage* names() const { return names_; }
class AllocationNode;
struct Sample {
public:
Sample(size_t size_, AllocationNode* owner_, Local<Value> local_,
SamplingHeapProfiler* profiler_)
: size(size_),
owner(owner_),
global(Global<Value>(
reinterpret_cast<v8::Isolate*>(profiler_->isolate_), local_)),
profiler(profiler_) {}
~Sample() { global.Reset(); }
const size_t size;
AllocationNode* const owner;
Global<Value> global;
SamplingHeapProfiler* const profiler;
private:
DISALLOW_COPY_AND_ASSIGN(Sample);
};
class AllocationNode {
public:
typedef uint64_t FunctionId;
AllocationNode(AllocationNode* parent, const char* name, int script_id,
int start_position)
int start_position, uint32_t id)
: parent_(parent),
script_id_(script_id),
script_position_(start_position),
name_(name) {}
name_(name),
id_(id) {}
AllocationNode* FindChildNode(FunctionId id) {
auto it = children_.find(id);
return it != children_.end() ? it->second.get() : nullptr;
}
AllocationNode* AddChildNode(FunctionId id,
std::unique_ptr<AllocationNode> node) {
return children_.emplace(id, std::move(node)).first->second.get();
}
private:
typedef uint64_t FunctionId;
static FunctionId function_id(int script_id, int start_position,
const char* name) {
// script_id == kNoScriptId case:
......@@ -96,8 +82,8 @@ class SamplingHeapProfiler {
DCHECK(static_cast<unsigned>(start_position) < (1u << 31));
return (static_cast<uint64_t>(script_id) << 32) + (start_position << 1);
}
AllocationNode* FindOrAddChildNode(const char* name, int script_id,
int start_position);
private:
// TODO(alph): make use of unordered_map's here. Pay attention to
// iterator invalidation during TranslateAllocationNode.
std::map<size_t, unsigned int> allocations_;
......@@ -106,6 +92,7 @@ class SamplingHeapProfiler {
const int script_id_;
const int script_position_;
const char* const name_;
uint32_t id_;
bool pinned_ = false;
friend class SamplingHeapProfiler;
......@@ -113,13 +100,45 @@ class SamplingHeapProfiler {
DISALLOW_COPY_AND_ASSIGN(AllocationNode);
};
private:
Heap* heap() const { return heap_; }
struct Sample {
Sample(size_t size_, AllocationNode* owner_, Local<Value> local_,
SamplingHeapProfiler* profiler_, uint64_t sample_id)
: size(size_),
owner(owner_),
global(Global<Value>(
reinterpret_cast<v8::Isolate*>(profiler_->isolate_), local_)),
profiler(profiler_),
sample_id(sample_id) {}
~Sample() { global.Reset(); }
const size_t size;
AllocationNode* const owner;
Global<Value> global;
SamplingHeapProfiler* const profiler;
const uint64_t sample_id;
private:
DISALLOW_COPY_AND_ASSIGN(Sample);
};
SamplingHeapProfiler(Heap* heap, StringsStorage* names, uint64_t rate,
int stack_depth, v8::HeapProfiler::SamplingFlags flags);
~SamplingHeapProfiler();
v8::AllocationProfile* GetAllocationProfile();
StringsStorage* names() const { return names_; }
private:
void SampleObject(Address soon_object, size_t size);
const std::vector<v8::AllocationProfile::Sample> BuildSamples() const;
AllocationNode* FindOrAddChildNode(AllocationNode* parent, const char* name,
int script_id, int start_position);
static void OnWeakCallback(const WeakCallbackInfo<Sample>& data);
uint32_t next_node_id() { return ++last_node_id_; }
uint64_t next_sample_id() { return ++last_sample_id_; }
// Methods that construct v8::AllocationProfile.
// Translates the provided AllocationNode *node* returning an equivalent
......@@ -131,11 +150,13 @@ class SamplingHeapProfiler {
AllocationProfile* profile, SamplingHeapProfiler::AllocationNode* node,
const std::map<int, Handle<Script>>& scripts);
v8::AllocationProfile::Allocation ScaleSample(size_t size,
unsigned int count);
unsigned int count) const;
AllocationNode* AddStack();
Isolate* const isolate_;
Heap* const heap_;
uint64_t last_sample_id_ = 0;
uint32_t last_node_id_ = 0;
std::unique_ptr<SamplingAllocationObserver> new_space_observer_;
std::unique_ptr<SamplingAllocationObserver> other_spaces_observer_;
StringsStorage* const names_;
......
......@@ -3656,6 +3656,51 @@ TEST(SamplingHeapProfilerApiAllocation) {
heap_profiler->StopSamplingHeapProfiler();
}
TEST(SamplingHeapProfilerApiSamples) {
v8::HandleScope scope(v8::Isolate::GetCurrent());
LocalContext env;
v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler();
// Suppress randomness to avoid flakiness in tests.
v8::internal::FLAG_sampling_heap_profiler_suppress_randomness = true;
heap_profiler->StartSamplingHeapProfiler(1024);
size_t count = 8 * 1024;
for (size_t i = 0; i < count; ++i) v8::Object::New(env->GetIsolate());
std::unique_ptr<v8::AllocationProfile> profile(
heap_profiler->GetAllocationProfile());
CHECK(profile);
std::vector<v8::AllocationProfile::Node*> nodes_to_visit;
std::unordered_set<uint32_t> node_ids;
nodes_to_visit.push_back(profile->GetRootNode());
while (!nodes_to_visit.empty()) {
v8::AllocationProfile::Node* node = nodes_to_visit.back();
nodes_to_visit.pop_back();
CHECK_LT(0, node->node_id);
CHECK_EQ(0, node_ids.count(node->node_id));
node_ids.insert(node->node_id);
nodes_to_visit.insert(nodes_to_visit.end(), node->children.begin(),
node->children.end());
}
size_t total_size = 0;
std::unordered_set<uint64_t> samples_set;
for (auto& sample : profile->GetSamples()) {
total_size += sample.size * sample.count;
CHECK_EQ(0, samples_set.count(sample.sample_id));
CHECK_EQ(1, node_ids.count(sample.node_id));
CHECK_GT(sample.node_id, 0);
CHECK_GT(sample.sample_id, 0);
samples_set.insert(sample.sample_id);
}
size_t object_size = total_size / count;
CHECK_GE(object_size, sizeof(void*) * 2);
heap_profiler->StopSamplingHeapProfiler();
}
TEST(SamplingHeapProfilerLeftTrimming) {
v8::HandleScope scope(v8::Isolate::GetCurrent());
LocalContext env;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment