Commit aca33312 authored by Wez's avatar Wez Committed by Commit Bot

[heap] Replace retained_size() with ExternalBackingStoreBytes().

ArrayBuffer memory allocated off-heap was previously tracked by a test-
only retained_size() field on each LocalArrayBufferTracker.

Changes in off-heap ArrayBuffer memory usage are now reported to the
Space with which the ArrayBuffer is associated, so that the value is
cheaply available to include in e.g. GC limit calculations, via a new
getter, ExternalBackingStoreBytes().

Changes to external ArrayBuffer backing-store allocations are tracked in
an AtomicNumber associated with each Space, to allow for ArrayBuffers
being concurrently moved or freed from multiple Pages in the same Space
during sweeps & compactions.

Bug: chromium:837583
Change-Id: I8b1b6addd5cd05533d8da55ca813e134bc36e181
Reviewed-on: https://chromium-review.googlesource.com/1052347
Commit-Queue: Wez <wez@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53192}
parent 3b59a3dd
...@@ -47,6 +47,7 @@ class ArrayBufferCollector::FreeingTask final : public CancelableTask { ...@@ -47,6 +47,7 @@ class ArrayBufferCollector::FreeingTask final : public CancelableTask {
}; };
void ArrayBufferCollector::FreeAllocationsOnBackgroundThread() { void ArrayBufferCollector::FreeAllocationsOnBackgroundThread() {
// TODO(wez): Remove backing-store from external memory accounting.
heap_->account_external_memory_concurrently_freed(); heap_->account_external_memory_concurrently_freed();
if (!heap_->IsTearingDown() && FLAG_concurrent_array_buffer_freeing) { if (!heap_->IsTearingDown() && FLAG_concurrent_array_buffer_freeing) {
V8::GetCurrentPlatform()->CallOnWorkerThread( V8::GetCurrentPlatform()->CallOnWorkerThread(
......
...@@ -8,6 +8,7 @@ ...@@ -8,6 +8,7 @@
#include "src/conversions-inl.h" #include "src/conversions-inl.h"
#include "src/heap/array-buffer-tracker.h" #include "src/heap/array-buffer-tracker.h"
#include "src/heap/heap.h" #include "src/heap/heap.h"
#include "src/heap/spaces.h"
#include "src/objects.h" #include "src/objects.h"
namespace v8 { namespace v8 {
...@@ -28,6 +29,8 @@ void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) { ...@@ -28,6 +29,8 @@ void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) {
DCHECK_NOT_NULL(tracker); DCHECK_NOT_NULL(tracker);
tracker->Add(buffer, length); tracker->Add(buffer, length);
} }
// TODO(wez): Remove backing-store from external memory accounting.
// We may go over the limit of externally allocated memory here. We call the // We may go over the limit of externally allocated memory here. We call the
// api function to trigger a GC in this case. // api function to trigger a GC in this case.
reinterpret_cast<v8::Isolate*>(heap->isolate()) reinterpret_cast<v8::Isolate*>(heap->isolate())
...@@ -45,33 +48,38 @@ void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) { ...@@ -45,33 +48,38 @@ void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
DCHECK_NOT_NULL(tracker); DCHECK_NOT_NULL(tracker);
tracker->Remove(buffer, length); tracker->Remove(buffer, length);
} }
// TODO(wez): Remove backing-store from external memory accounting.
heap->update_external_memory(-static_cast<intptr_t>(length)); heap->update_external_memory(-static_cast<intptr_t>(length));
} }
template <typename Callback> template <typename Callback>
void LocalArrayBufferTracker::Free(Callback should_free) { void LocalArrayBufferTracker::Free(Callback should_free) {
size_t new_retained_size = 0; size_t freed_memory = 0;
Isolate* isolate = heap_->isolate(); Isolate* isolate = space_->heap()->isolate();
for (TrackingData::iterator it = array_buffers_.begin(); for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) { it != array_buffers_.end();) {
JSArrayBuffer* buffer = reinterpret_cast<JSArrayBuffer*>(it->first); JSArrayBuffer* buffer = reinterpret_cast<JSArrayBuffer*>(it->first);
const size_t length = it->second; const size_t length = it->second;
if (should_free(buffer)) { if (should_free(buffer)) {
JSArrayBuffer::FreeBackingStore( JSArrayBuffer::FreeBackingStore(
isolate, {buffer->backing_store(), length, buffer->backing_store(), isolate, {buffer->backing_store(), length, buffer->backing_store(),
buffer->allocation_mode(), buffer->is_wasm_memory()}); buffer->allocation_mode(), buffer->is_wasm_memory()});
it = array_buffers_.erase(it); it = array_buffers_.erase(it);
freed_memory += length;
} else { } else {
new_retained_size += length;
++it; ++it;
} }
} }
const size_t freed_memory = retained_size_ - new_retained_size;
if (freed_memory > 0) { if (freed_memory > 0) {
heap_->update_external_memory_concurrently_freed( // Update the Space with any freed backing-store bytes.
space_->DecrementExternalBackingStoreBytes(freed_memory);
// TODO(wez): Remove backing-store from external memory accounting.
space_->heap()->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory)); static_cast<intptr_t>(freed_memory));
} }
retained_size_ = new_retained_size;
} }
template <typename MarkingState> template <typename MarkingState>
...@@ -88,8 +96,9 @@ void ArrayBufferTracker::FreeDead(Page* page, MarkingState* marking_state) { ...@@ -88,8 +96,9 @@ void ArrayBufferTracker::FreeDead(Page* page, MarkingState* marking_state) {
} }
void LocalArrayBufferTracker::Add(JSArrayBuffer* buffer, size_t length) { void LocalArrayBufferTracker::Add(JSArrayBuffer* buffer, size_t length) {
DCHECK_GE(retained_size_ + length, retained_size_); // Track the backing-store usage against the owning Space.
retained_size_ += length; space_->IncrementExternalBackingStoreBytes(length);
auto ret = array_buffers_.insert({buffer, length}); auto ret = array_buffers_.insert({buffer, length});
USE(ret); USE(ret);
// Check that we indeed inserted a new value and did not overwrite an existing // Check that we indeed inserted a new value and did not overwrite an existing
...@@ -98,8 +107,9 @@ void LocalArrayBufferTracker::Add(JSArrayBuffer* buffer, size_t length) { ...@@ -98,8 +107,9 @@ void LocalArrayBufferTracker::Add(JSArrayBuffer* buffer, size_t length) {
} }
void LocalArrayBufferTracker::Remove(JSArrayBuffer* buffer, size_t length) { void LocalArrayBufferTracker::Remove(JSArrayBuffer* buffer, size_t length) {
DCHECK_GE(retained_size_, retained_size_ - length); // Remove the backing-store accounting from the owning Space.
retained_size_ -= length; space_->DecrementExternalBackingStoreBytes(length);
TrackingData::iterator it = array_buffers_.find(buffer); TrackingData::iterator it = array_buffers_.find(buffer);
// Check that we indeed find a key to remove. // Check that we indeed find a key to remove.
DCHECK(it != array_buffers_.end()); DCHECK(it != array_buffers_.end());
......
...@@ -25,14 +25,13 @@ void LocalArrayBufferTracker::Process(Callback callback) { ...@@ -25,14 +25,13 @@ void LocalArrayBufferTracker::Process(Callback callback) {
JSArrayBuffer* new_buffer = nullptr; JSArrayBuffer* new_buffer = nullptr;
JSArrayBuffer* old_buffer = nullptr; JSArrayBuffer* old_buffer = nullptr;
size_t new_retained_size = 0; size_t freed_memory = 0;
size_t moved_size = 0; size_t moved_memory = 0;
for (TrackingData::iterator it = array_buffers_.begin(); for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) { it != array_buffers_.end();) {
old_buffer = reinterpret_cast<JSArrayBuffer*>(it->first); old_buffer = reinterpret_cast<JSArrayBuffer*>(it->first);
const CallbackResult result = callback(old_buffer, &new_buffer); const CallbackResult result = callback(old_buffer, &new_buffer);
if (result == kKeepEntry) { if (result == kKeepEntry) {
new_retained_size += NumberToSize(old_buffer->byte_length());
++it; ++it;
} else if (result == kUpdateEntry) { } else if (result == kUpdateEntry) {
DCHECK_NOT_NULL(new_buffer); DCHECK_NOT_NULL(new_buffer);
...@@ -46,11 +45,12 @@ void LocalArrayBufferTracker::Process(Callback callback) { ...@@ -46,11 +45,12 @@ void LocalArrayBufferTracker::Process(Callback callback) {
} }
DCHECK_NOT_NULL(tracker); DCHECK_NOT_NULL(tracker);
const size_t size = NumberToSize(new_buffer->byte_length()); const size_t size = NumberToSize(new_buffer->byte_length());
moved_size += size;
tracker->Add(new_buffer, size); tracker->Add(new_buffer, size);
} }
moved_memory += it->second;
it = array_buffers_.erase(it); it = array_buffers_.erase(it);
} else if (result == kRemoveEntry) { } else if (result == kRemoveEntry) {
freed_memory += it->second;
// We pass backing_store() and stored length to the collector for freeing // We pass backing_store() and stored length to the collector for freeing
// the backing store. Wasm allocations will go through their own tracker // the backing store. Wasm allocations will go through their own tracker
// based on the backing store. // based on the backing store.
...@@ -62,17 +62,19 @@ void LocalArrayBufferTracker::Process(Callback callback) { ...@@ -62,17 +62,19 @@ void LocalArrayBufferTracker::Process(Callback callback) {
UNREACHABLE(); UNREACHABLE();
} }
} }
const size_t freed_memory = retained_size_ - new_retained_size - moved_size; if (moved_memory || freed_memory) {
if (freed_memory > 0) { // Update the Space with any moved or freed backing-store bytes.
heap_->update_external_memory_concurrently_freed( space_->DecrementExternalBackingStoreBytes(freed_memory + moved_memory);
// TODO(wez): Remove backing-store from external memory accounting.
space_->heap()->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory)); static_cast<intptr_t>(freed_memory));
} }
retained_size_ = new_retained_size;
// Pass the backing stores that need to be freed to the main thread for later // Pass the backing stores that need to be freed to the main thread for later
// distribution. // distribution.
// ArrayBufferCollector takes ownership of this pointer. // ArrayBufferCollector takes ownership of this pointer.
heap_->array_buffer_collector()->AddGarbageAllocations( space_->heap()->array_buffer_collector()->AddGarbageAllocations(
backing_stores_to_free); backing_stores_to_free);
} }
...@@ -85,17 +87,6 @@ void ArrayBufferTracker::PrepareToFreeDeadInNewSpace(Heap* heap) { ...@@ -85,17 +87,6 @@ void ArrayBufferTracker::PrepareToFreeDeadInNewSpace(Heap* heap) {
} }
} }
size_t ArrayBufferTracker::RetainedInNewSpace(Heap* heap) {
size_t retained_size = 0;
for (Page* page : PageRange(heap->new_space()->ToSpaceStart(),
heap->new_space()->ToSpaceEnd())) {
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) continue;
retained_size += tracker->retained_size();
}
return retained_size;
}
void ArrayBufferTracker::FreeAll(Page* page) { void ArrayBufferTracker::FreeAll(Page* page) {
LocalArrayBufferTracker* tracker = page->local_tracker(); LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return; if (tracker == nullptr) return;
......
...@@ -14,10 +14,10 @@ ...@@ -14,10 +14,10 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
class Heap;
class JSArrayBuffer; class JSArrayBuffer;
class MarkingState; class MarkingState;
class Page; class Page;
class Space;
class ArrayBufferTracker : public AllStatic { class ArrayBufferTracker : public AllStatic {
public: public:
...@@ -38,9 +38,6 @@ class ArrayBufferTracker : public AllStatic { ...@@ -38,9 +38,6 @@ class ArrayBufferTracker : public AllStatic {
// Does not take any locks and can only be called during Scavenge. // Does not take any locks and can only be called during Scavenge.
static void PrepareToFreeDeadInNewSpace(Heap* heap); static void PrepareToFreeDeadInNewSpace(Heap* heap);
// Number of array buffer bytes retained from new space.
static size_t RetainedInNewSpace(Heap* heap);
// Frees all backing store pointers for dead JSArrayBuffer on a given page. // Frees all backing store pointers for dead JSArrayBuffer on a given page.
// Requires marking information to be present. Requires the page lock to be // Requires marking information to be present. Requires the page lock to be
// taken by the caller. // taken by the caller.
...@@ -70,8 +67,7 @@ class LocalArrayBufferTracker { ...@@ -70,8 +67,7 @@ class LocalArrayBufferTracker {
enum CallbackResult { kKeepEntry, kUpdateEntry, kRemoveEntry }; enum CallbackResult { kKeepEntry, kUpdateEntry, kRemoveEntry };
enum FreeMode { kFreeDead, kFreeAll }; enum FreeMode { kFreeDead, kFreeAll };
explicit LocalArrayBufferTracker(Heap* heap) explicit LocalArrayBufferTracker(Space* space) : space_(space) {}
: heap_(heap), retained_size_(0) {}
~LocalArrayBufferTracker(); ~LocalArrayBufferTracker();
inline void Add(JSArrayBuffer* buffer, size_t length); inline void Add(JSArrayBuffer* buffer, size_t length);
...@@ -101,8 +97,6 @@ class LocalArrayBufferTracker { ...@@ -101,8 +97,6 @@ class LocalArrayBufferTracker {
return array_buffers_.find(buffer) != array_buffers_.end(); return array_buffers_.find(buffer) != array_buffers_.end();
} }
size_t retained_size() const { return retained_size_; }
private: private:
class Hasher { class Hasher {
public: public:
...@@ -118,12 +112,10 @@ class LocalArrayBufferTracker { ...@@ -118,12 +112,10 @@ class LocalArrayBufferTracker {
// different memory pages, making it impossible to guarantee order of freeing. // different memory pages, making it impossible to guarantee order of freeing.
typedef std::unordered_map<JSArrayBuffer*, size_t, Hasher> TrackingData; typedef std::unordered_map<JSArrayBuffer*, size_t, Hasher> TrackingData;
Heap* heap_; Space* space_;
// The set contains raw heap pointers which are removed by the GC upon // The set contains raw heap pointers which are removed by the GC upon
// processing the tracker through its owning page. // processing the tracker through its owning page.
TrackingData array_buffers_; TrackingData array_buffers_;
// Retained size of array buffers for this tracker in bytes.
size_t retained_size_;
}; };
} // namespace internal } // namespace internal
......
...@@ -1395,7 +1395,7 @@ void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject* object, ...@@ -1395,7 +1395,7 @@ void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject* object,
void MemoryChunk::AllocateLocalTracker() { void MemoryChunk::AllocateLocalTracker() {
DCHECK_NULL(local_tracker_); DCHECK_NULL(local_tracker_);
local_tracker_ = new LocalArrayBufferTracker(heap()); local_tracker_ = new LocalArrayBufferTracker(owner());
} }
void MemoryChunk::ReleaseLocalTracker() { void MemoryChunk::ReleaseLocalTracker() {
......
...@@ -907,7 +907,8 @@ class Space : public Malloced { ...@@ -907,7 +907,8 @@ class Space : public Malloced {
heap_(heap), heap_(heap),
id_(id), id_(id),
committed_(0), committed_(0),
max_committed_(0) {} max_committed_(0),
external_backing_store_bytes_(0) {}
virtual ~Space() {} virtual ~Space() {}
...@@ -943,6 +944,11 @@ class Space : public Malloced { ...@@ -943,6 +944,11 @@ class Space : public Malloced {
// (e.g. see LargeObjectSpace). // (e.g. see LargeObjectSpace).
virtual size_t SizeOfObjects() { return Size(); } virtual size_t SizeOfObjects() { return Size(); }
// Returns amount of off-heap memory in-use by objects in this Space.
virtual size_t ExternalBackingStoreBytes() const {
return external_backing_store_bytes_;
}
// Approximate amount of physical memory committed for this space. // Approximate amount of physical memory committed for this space.
virtual size_t CommittedPhysicalMemory() = 0; virtual size_t CommittedPhysicalMemory() = 0;
...@@ -972,6 +978,13 @@ class Space : public Malloced { ...@@ -972,6 +978,13 @@ class Space : public Malloced {
committed_ -= bytes; committed_ -= bytes;
} }
void IncrementExternalBackingStoreBytes(size_t amount) {
external_backing_store_bytes_ += amount;
}
void DecrementExternalBackingStoreBytes(size_t amount) {
external_backing_store_bytes_ -= amount;
}
V8_EXPORT_PRIVATE void* GetRandomMmapAddr(); V8_EXPORT_PRIVATE void* GetRandomMmapAddr();
#ifdef DEBUG #ifdef DEBUG
...@@ -995,6 +1008,9 @@ class Space : public Malloced { ...@@ -995,6 +1008,9 @@ class Space : public Malloced {
size_t committed_; size_t committed_;
size_t max_committed_; size_t max_committed_;
// Tracks off-heap memory used by this space.
std::atomic<size_t> external_backing_store_bytes_;
DISALLOW_COPY_AND_ASSIGN(Space); DISALLOW_COPY_AND_ASSIGN(Space);
}; };
...@@ -2620,6 +2636,11 @@ class NewSpace : public SpaceWithLinearArea { ...@@ -2620,6 +2636,11 @@ class NewSpace : public SpaceWithLinearArea {
return Capacity() - Size(); return Capacity() - Size();
} }
size_t ExternalBackingStoreBytes() const override {
DCHECK_EQ(0, from_space_.ExternalBackingStoreBytes());
return to_space_.ExternalBackingStoreBytes();
}
size_t AllocatedSinceLastGC() { size_t AllocatedSinceLastGC() {
const Address age_mark = to_space_.age_mark(); const Address age_mark = to_space_.age_mark();
DCHECK_NE(age_mark, kNullAddress); DCHECK_NE(age_mark, kNullAddress);
...@@ -2968,7 +2989,6 @@ class LargeObjectSpace : public Space { ...@@ -2968,7 +2989,6 @@ class LargeObjectSpace : public Space {
inline size_t Available() override; inline size_t Available() override;
size_t Size() override { return size_; } size_t Size() override { return size_; }
size_t SizeOfObjects() override { return objects_size_; } size_t SizeOfObjects() override { return objects_size_; }
// Approximate amount of physical memory committed for this space. // Approximate amount of physical memory committed for this space.
...@@ -3029,12 +3049,14 @@ class LargeObjectSpace : public Space { ...@@ -3029,12 +3049,14 @@ class LargeObjectSpace : public Space {
private: private:
// The head of the linked list of large object chunks. // The head of the linked list of large object chunks.
LargePage* first_page_; LargePage* first_page_;
size_t size_; // allocated bytes size_t size_; // allocated bytes
int page_count_; // number of chunks int page_count_; // number of chunks
size_t objects_size_; // size of objects size_t objects_size_; // size of objects
// The chunk_map_mutex_ has to be used when the chunk map is accessed // The chunk_map_mutex_ has to be used when the chunk map is accessed
// concurrently. // concurrently.
base::Mutex chunk_map_mutex_; base::Mutex chunk_map_mutex_;
// Page-aligned addresses to their corresponding LargePage. // Page-aligned addresses to their corresponding LargePage.
std::unordered_map<Address, LargePage*> chunk_map_; std::unordered_map<Address, LargePage*> chunk_map_;
......
...@@ -338,30 +338,33 @@ UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) { ...@@ -338,30 +338,33 @@ UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) {
isolate->Dispose(); isolate->Dispose();
} }
TEST(ArrayBuffer_RetainedSizeIncreases) { TEST(ArrayBuffer_ExternalBackingStoreSizeIncreases) {
CcTest::InitializeVM(); CcTest::InitializeVM();
LocalContext env; LocalContext env;
v8::Isolate* isolate = env->GetIsolate(); v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap(); Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
const size_t retained_before = ArrayBufferTracker::RetainedInNewSpace(heap); const size_t backing_store_before =
heap->new_space()->ExternalBackingStoreBytes();
{ {
const size_t kArraybufferSize = 117; const size_t kArraybufferSize = 117;
v8::HandleScope handle_scope(isolate); v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, kArraybufferSize); Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, kArraybufferSize);
USE(ab); USE(ab);
const size_t retained_after = ArrayBufferTracker::RetainedInNewSpace(heap); const size_t backing_store_after =
CHECK_EQ(kArraybufferSize, retained_after - retained_before); heap->new_space()->ExternalBackingStoreBytes();
CHECK_EQ(kArraybufferSize, backing_store_after - backing_store_before);
} }
} }
TEST(ArrayBuffer_RetainedSizeDecreases) { TEST(ArrayBuffer_ExternalBackingStoreSizeDecreases) {
CcTest::InitializeVM(); CcTest::InitializeVM();
LocalContext env; LocalContext env;
v8::Isolate* isolate = env->GetIsolate(); v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap(); Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
const size_t retained_before = ArrayBufferTracker::RetainedInNewSpace(heap); const size_t backing_store_before =
heap->new_space()->ExternalBackingStoreBytes();
{ {
const size_t kArraybufferSize = 117; const size_t kArraybufferSize = 117;
v8::HandleScope handle_scope(isolate); v8::HandleScope handle_scope(isolate);
...@@ -369,8 +372,9 @@ TEST(ArrayBuffer_RetainedSizeDecreases) { ...@@ -369,8 +372,9 @@ TEST(ArrayBuffer_RetainedSizeDecreases) {
USE(ab); USE(ab);
} }
heap::GcAndSweep(heap, OLD_SPACE); heap::GcAndSweep(heap, OLD_SPACE);
const size_t retained_after = ArrayBufferTracker::RetainedInNewSpace(heap); const size_t backing_store_after =
CHECK_EQ(0, retained_after - retained_before); heap->new_space()->ExternalBackingStoreBytes();
CHECK_EQ(0, backing_store_after - backing_store_before);
} }
} // namespace heap } // namespace heap
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment