Commit 920025f5 authored by Alexei Filippov's avatar Alexei Filippov Committed by Commit Bot

[heap-profiler] Remove unused methods from Heap Profiler.

Remove GetProfilerMemorySize from HeapProfiler API.
Remove HeapObjectsMap::FindUntrackedObjects

Cq-Include-Trybots: master.tryserver.chromium.linux:linux_chromium_rel_ng
Change-Id: I32a9a0676485c17c08c068a8ca501525b0d2670e
Reviewed-on: https://chromium-review.googlesource.com/590651Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Alexei Filippov <alph@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46984}
parent 4a9718c7
......@@ -809,9 +809,6 @@ class V8_EXPORT HeapProfiler {
*/
static const uint16_t kPersistentHandleNoClassId = 0;
/** Returns memory used for profiler internal data and snapshots. */
size_t GetProfilerMemorySize();
private:
HeapProfiler();
~HeapProfiler();
......
......@@ -10450,11 +10450,6 @@ void HeapProfiler::SetGetRetainerInfosCallback(
callback);
}
size_t HeapProfiler::GetProfilerMemorySize() {
return reinterpret_cast<i::HeapProfiler*>(this)->
GetMemorySizeUsedByProfiler();
}
v8::Testing::StressType internal::Testing::stress_type_ =
v8::Testing::kStressTypeOpt;
......
......@@ -200,13 +200,6 @@ class List {
DISALLOW_COPY_AND_ASSIGN(List);
};
template<typename T, class P>
size_t GetMemoryUsedByList(const List<T, P>& list) {
return list.length() * sizeof(T) + sizeof(list);
}
class Map;
class FieldType;
class Code;
......
......@@ -135,13 +135,11 @@ void HeapProfiler::StartHeapObjectsTracking(bool track_allocations) {
}
}
SnapshotObjectId HeapProfiler::PushHeapObjectsStats(OutputStream* stream,
int64_t* timestamp_us) {
return ids_->PushHeapObjectsStats(stream, timestamp_us);
}
void HeapProfiler::StopHeapObjectsTracking() {
ids_->StopHeapObjectsTracking();
if (is_tracking_allocations()) {
......@@ -150,36 +148,20 @@ void HeapProfiler::StopHeapObjectsTracking() {
}
}
size_t HeapProfiler::GetMemorySizeUsedByProfiler() {
size_t size = sizeof(*this);
size += names_->GetUsedMemorySize();
size += ids_->GetUsedMemorySize();
size += GetMemoryUsedByList(snapshots_);
for (int i = 0; i < snapshots_.length(); ++i) {
size += snapshots_[i]->RawSnapshotSize();
}
return size;
}
int HeapProfiler::GetSnapshotsCount() {
return snapshots_.length();
}
HeapSnapshot* HeapProfiler::GetSnapshot(int index) {
return snapshots_.at(index);
}
SnapshotObjectId HeapProfiler::GetSnapshotObjectId(Handle<Object> obj) {
if (!obj->IsHeapObject())
return v8::HeapProfiler::kUnknownObjectId;
return ids_->FindEntry(HeapObject::cast(*obj)->address());
}
void HeapProfiler::ObjectMoveEvent(Address from, Address to, int size) {
base::LockGuard<base::Mutex> guard(&profiler_mutex_);
bool known_object = ids_->MoveObject(from, to, size);
......@@ -188,7 +170,6 @@ void HeapProfiler::ObjectMoveEvent(Address from, Address to, int size) {
}
}
void HeapProfiler::AllocationEvent(Address addr, int size) {
DisallowHeapAllocation no_allocation;
if (allocation_tracker_) {
......
......@@ -25,8 +25,6 @@ class HeapProfiler {
explicit HeapProfiler(Heap* heap);
~HeapProfiler();
size_t GetMemorySizeUsedByProfiler();
HeapSnapshot* TakeSnapshot(
v8::ActivityControl* control,
v8::HeapProfiler::ObjectNameResolver* resolver);
......
......@@ -328,20 +328,10 @@ List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
return &sorted_entries_;
}
void HeapSnapshot::Print(int max_depth) {
root()->Print("", "", max_depth, 0);
}
size_t HeapSnapshot::RawSnapshotSize() const {
return sizeof(*this) + GetMemoryUsedByList(entries_) +
edges_.size() * sizeof(decltype(edges_)::value_type) +
children_.size() * sizeof(decltype(children_)::value_type) +
GetMemoryUsedByList(sorted_entries_);
}
// We split IDs on evens for embedder objects (see
// HeapObjectsMap::GenerateId) and odds for native objects.
const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
......@@ -355,17 +345,13 @@ const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
HeapObjectsMap::HeapObjectsMap(Heap* heap)
: next_id_(kFirstAvailableObjectId), heap_(heap) {
// This dummy element solves a problem with entries_map_.
// When we do lookup in HashMap we see no difference between two cases:
// it has an entry with NULL as the value or it has created
// a new entry on the fly with NULL as the default value.
// With such dummy element we have a guaranty that all entries_map_ entries
// will have the value field grater than 0.
// This fact is using in MoveObject method.
entries_.Add(EntryInfo(0, NULL, 0));
// The dummy element at zero index is needed as entries_map_ cannot hold
// an entry with zero value. Otherwise it's impossible to tell if
// LookupOrInsert has added a new item or just returning exisiting one
// having the value of zero.
entries_.Add(EntryInfo(0, nullptr, 0, true));
}
bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
DCHECK(to != NULL);
DCHECK(from != NULL);
......@@ -485,114 +471,6 @@ void HeapObjectsMap::UpdateHeapObjectsMap() {
}
}
namespace {
struct HeapObjectInfo {
HeapObjectInfo(HeapObject* obj, int expected_size)
: obj(obj),
expected_size(expected_size) {
}
HeapObject* obj;
int expected_size;
bool IsValid() const { return expected_size == obj->Size(); }
void Print() const {
if (expected_size == 0) {
PrintF("Untracked object : %p %6d. Next address is %p\n",
static_cast<void*>(obj->address()), obj->Size(),
static_cast<void*>(obj->address() + obj->Size()));
} else if (obj->Size() != expected_size) {
PrintF("Wrong size %6d: %p %6d. Next address is %p\n", expected_size,
static_cast<void*>(obj->address()), obj->Size(),
static_cast<void*>(obj->address() + obj->Size()));
} else {
PrintF("Good object : %p %6d. Next address is %p\n",
static_cast<void*>(obj->address()), expected_size,
static_cast<void*>(obj->address() + obj->Size()));
}
}
};
static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
if (a->obj < b->obj) return -1;
if (a->obj > b->obj) return 1;
return 0;
}
} // namespace
int HeapObjectsMap::FindUntrackedObjects() {
List<HeapObjectInfo> heap_objects(1000);
HeapIterator iterator(heap_);
int untracked = 0;
for (HeapObject* obj = iterator.next();
obj != NULL;
obj = iterator.next()) {
base::HashMap::Entry* entry =
entries_map_.Lookup(obj->address(), ComputePointerHash(obj->address()));
if (entry == NULL) {
++untracked;
if (FLAG_heap_profiler_trace_objects) {
heap_objects.Add(HeapObjectInfo(obj, 0));
}
} else {
int entry_index = static_cast<int>(
reinterpret_cast<intptr_t>(entry->value));
EntryInfo& entry_info = entries_.at(entry_index);
if (FLAG_heap_profiler_trace_objects) {
heap_objects.Add(HeapObjectInfo(obj,
static_cast<int>(entry_info.size)));
if (obj->Size() != static_cast<int>(entry_info.size))
++untracked;
} else {
CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
}
}
}
if (FLAG_heap_profiler_trace_objects) {
PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
entries_map_.occupancy());
heap_objects.Sort(comparator);
int last_printed_object = -1;
bool print_next_object = false;
for (int i = 0; i < heap_objects.length(); ++i) {
const HeapObjectInfo& object_info = heap_objects[i];
if (!object_info.IsValid()) {
++untracked;
if (last_printed_object != i - 1) {
if (i > 0) {
PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
heap_objects[i - 1].Print();
}
}
object_info.Print();
last_printed_object = i;
print_next_object = true;
} else if (print_next_object) {
object_info.Print();
print_next_object = false;
last_printed_object = i;
}
}
if (last_printed_object < heap_objects.length() - 1) {
PrintF("Last %d objects were skipped\n",
heap_objects.length() - 1 - last_printed_object);
}
PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
entries_map_.occupancy());
}
return untracked;
}
SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream,
int64_t* timestamp_us) {
UpdateHeapObjectsMap();
......@@ -687,13 +565,6 @@ SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
return id << 1;
}
size_t HeapObjectsMap::GetUsedMemorySize() const {
return sizeof(*this) +
sizeof(base::HashMap::Entry) * entries_map_.capacity() +
GetMemoryUsedByList(entries_) + GetMemoryUsedByList(time_intervals_);
}
HeapEntriesMap::HeapEntriesMap() : entries_() {}
int HeapEntriesMap::Map(HeapThing thing) {
......
......@@ -158,7 +158,6 @@ class HeapSnapshot {
void Delete();
HeapProfiler* profiler() { return profiler_; }
size_t RawSnapshotSize() const;
HeapEntry* root() { return &entries_[root_index_]; }
HeapEntry* gc_roots() { return &entries_[gc_roots_index_]; }
HeapEntry* gc_subroot(int index) {
......@@ -235,7 +234,6 @@ class HeapObjectsMap {
SnapshotObjectId PushHeapObjectsStats(OutputStream* stream,
int64_t* timestamp_us);
const List<TimeInterval>& samples() const { return time_intervals_; }
size_t GetUsedMemorySize() const;
SnapshotObjectId GenerateId(v8::RetainedObjectInfo* info);
......@@ -245,17 +243,14 @@ class HeapObjectsMap {
static const SnapshotObjectId kGcRootsFirstSubrootId;
static const SnapshotObjectId kFirstAvailableObjectId;
int FindUntrackedObjects();
void UpdateHeapObjectsMap();
void RemoveDeadEntries();
private:
struct EntryInfo {
EntryInfo(SnapshotObjectId id, Address addr, unsigned int size)
: id(id), addr(addr), size(size), accessed(true) { }
EntryInfo(SnapshotObjectId id, Address addr, unsigned int size, bool accessed)
: id(id), addr(addr), size(size), accessed(accessed) { }
EntryInfo(SnapshotObjectId id, Address addr, unsigned int size,
bool accessed)
: id(id), addr(addr), size(size), accessed(accessed) {}
SnapshotObjectId id;
Address addr;
unsigned int size;
......@@ -271,12 +266,10 @@ class HeapObjectsMap {
DISALLOW_COPY_AND_ASSIGN(HeapObjectsMap);
};
// A typedef for referencing anything that can be snapshotted living
// in any kind of heap memory.
typedef void* HeapThing;
// An interface that creates HeapEntries by HeapThings.
class HeapEntriesAllocator {
public:
......@@ -284,7 +277,6 @@ class HeapEntriesAllocator {
virtual HeapEntry* AllocateEntry(HeapThing ptr) = 0;
};
// The HeapEntriesMap instance is used to track a mapping between
// real heap objects and their representations in heap snapshots.
class HeapEntriesMap {
......
......@@ -91,35 +91,22 @@ const char* StringsStorage::GetName(Name* name) {
return "";
}
const char* StringsStorage::GetName(int index) {
return GetFormatted("%d", index);
}
const char* StringsStorage::GetFunctionName(Name* name) {
return GetName(name);
}
const char* StringsStorage::GetFunctionName(const char* name) {
return GetCopy(name);
}
size_t StringsStorage::GetUsedMemorySize() const {
size_t size = sizeof(*this);
size += sizeof(base::HashMap::Entry) * names_.capacity();
for (base::HashMap::Entry* p = names_.Start(); p != NULL;
p = names_.Next(p)) {
size += strlen(reinterpret_cast<const char*>(p->value)) + 1;
}
return size;
}
base::HashMap::Entry* StringsStorage::GetEntry(const char* str, int len) {
uint32_t hash = StringHasher::HashSequentialString(str, len, hash_seed_);
return names_.LookupOrInsert(const_cast<char*>(str), hash);
}
} // namespace internal
} // namespace v8
......@@ -29,7 +29,6 @@ class StringsStorage {
const char* GetName(int index);
const char* GetFunctionName(Name* name);
const char* GetFunctionName(const char* name);
size_t GetUsedMemorySize() const;
private:
static const int kMaxNameSize = 1024;
......@@ -43,6 +42,7 @@ class StringsStorage {
DISALLOW_COPY_AND_ASSIGN(StringsStorage);
};
} // namespace internal
} // namespace v8
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment