Commit aca33312 authored by Wez's avatar Wez Committed by Commit Bot

[heap] Replace retained_size() with ExternalBackingStoreBytes().

ArrayBuffer memory allocated off-heap was previously tracked by a test-
only retained_size() field on each LocalArrayBufferTracker.

Changes in off-heap ArrayBuffer memory usage are now reported to the
Space with which the ArrayBuffer is associated, so that the value is
cheaply available to include in e.g. GC limit calculations, via a new
getter, ExternalBackingStoreBytes().

Changes to external ArrayBuffer backing-store allocations are tracked in
an AtomicNumber associated with each Space, to allow for ArrayBuffers
being concurrently moved or freed from multiple Pages in the same Space
during sweeps & compactions.

Bug: chromium:837583
Change-Id: I8b1b6addd5cd05533d8da55ca813e134bc36e181
Reviewed-on: https://chromium-review.googlesource.com/1052347
Commit-Queue: Wez <wez@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53192}
parent 3b59a3dd
......@@ -47,6 +47,7 @@ class ArrayBufferCollector::FreeingTask final : public CancelableTask {
};
void ArrayBufferCollector::FreeAllocationsOnBackgroundThread() {
// TODO(wez): Remove backing-store from external memory accounting.
heap_->account_external_memory_concurrently_freed();
if (!heap_->IsTearingDown() && FLAG_concurrent_array_buffer_freeing) {
V8::GetCurrentPlatform()->CallOnWorkerThread(
......
......@@ -8,6 +8,7 @@
#include "src/conversions-inl.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/heap.h"
#include "src/heap/spaces.h"
#include "src/objects.h"
namespace v8 {
......@@ -28,6 +29,8 @@ void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) {
DCHECK_NOT_NULL(tracker);
tracker->Add(buffer, length);
}
// TODO(wez): Remove backing-store from external memory accounting.
// We may go over the limit of externally allocated memory here. We call the
// api function to trigger a GC in this case.
reinterpret_cast<v8::Isolate*>(heap->isolate())
......@@ -45,33 +48,38 @@ void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
DCHECK_NOT_NULL(tracker);
tracker->Remove(buffer, length);
}
// TODO(wez): Remove backing-store from external memory accounting.
heap->update_external_memory(-static_cast<intptr_t>(length));
}
template <typename Callback>
void LocalArrayBufferTracker::Free(Callback should_free) {
size_t new_retained_size = 0;
Isolate* isolate = heap_->isolate();
size_t freed_memory = 0;
Isolate* isolate = space_->heap()->isolate();
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
JSArrayBuffer* buffer = reinterpret_cast<JSArrayBuffer*>(it->first);
const size_t length = it->second;
if (should_free(buffer)) {
JSArrayBuffer::FreeBackingStore(
isolate, {buffer->backing_store(), length, buffer->backing_store(),
buffer->allocation_mode(), buffer->is_wasm_memory()});
it = array_buffers_.erase(it);
freed_memory += length;
} else {
new_retained_size += length;
++it;
}
}
const size_t freed_memory = retained_size_ - new_retained_size;
if (freed_memory > 0) {
heap_->update_external_memory_concurrently_freed(
// Update the Space with any freed backing-store bytes.
space_->DecrementExternalBackingStoreBytes(freed_memory);
// TODO(wez): Remove backing-store from external memory accounting.
space_->heap()->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory));
}
retained_size_ = new_retained_size;
}
template <typename MarkingState>
......@@ -88,8 +96,9 @@ void ArrayBufferTracker::FreeDead(Page* page, MarkingState* marking_state) {
}
void LocalArrayBufferTracker::Add(JSArrayBuffer* buffer, size_t length) {
DCHECK_GE(retained_size_ + length, retained_size_);
retained_size_ += length;
// Track the backing-store usage against the owning Space.
space_->IncrementExternalBackingStoreBytes(length);
auto ret = array_buffers_.insert({buffer, length});
USE(ret);
// Check that we indeed inserted a new value and did not overwrite an existing
......@@ -98,8 +107,9 @@ void LocalArrayBufferTracker::Add(JSArrayBuffer* buffer, size_t length) {
}
void LocalArrayBufferTracker::Remove(JSArrayBuffer* buffer, size_t length) {
DCHECK_GE(retained_size_, retained_size_ - length);
retained_size_ -= length;
// Remove the backing-store accounting from the owning Space.
space_->DecrementExternalBackingStoreBytes(length);
TrackingData::iterator it = array_buffers_.find(buffer);
// Check that we indeed find a key to remove.
DCHECK(it != array_buffers_.end());
......
......@@ -25,14 +25,13 @@ void LocalArrayBufferTracker::Process(Callback callback) {
JSArrayBuffer* new_buffer = nullptr;
JSArrayBuffer* old_buffer = nullptr;
size_t new_retained_size = 0;
size_t moved_size = 0;
size_t freed_memory = 0;
size_t moved_memory = 0;
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
old_buffer = reinterpret_cast<JSArrayBuffer*>(it->first);
const CallbackResult result = callback(old_buffer, &new_buffer);
if (result == kKeepEntry) {
new_retained_size += NumberToSize(old_buffer->byte_length());
++it;
} else if (result == kUpdateEntry) {
DCHECK_NOT_NULL(new_buffer);
......@@ -46,11 +45,12 @@ void LocalArrayBufferTracker::Process(Callback callback) {
}
DCHECK_NOT_NULL(tracker);
const size_t size = NumberToSize(new_buffer->byte_length());
moved_size += size;
tracker->Add(new_buffer, size);
}
moved_memory += it->second;
it = array_buffers_.erase(it);
} else if (result == kRemoveEntry) {
freed_memory += it->second;
// We pass backing_store() and stored length to the collector for freeing
// the backing store. Wasm allocations will go through their own tracker
// based on the backing store.
......@@ -62,17 +62,19 @@ void LocalArrayBufferTracker::Process(Callback callback) {
UNREACHABLE();
}
}
const size_t freed_memory = retained_size_ - new_retained_size - moved_size;
if (freed_memory > 0) {
heap_->update_external_memory_concurrently_freed(
if (moved_memory || freed_memory) {
// Update the Space with any moved or freed backing-store bytes.
space_->DecrementExternalBackingStoreBytes(freed_memory + moved_memory);
// TODO(wez): Remove backing-store from external memory accounting.
space_->heap()->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory));
}
retained_size_ = new_retained_size;
// Pass the backing stores that need to be freed to the main thread for later
// distribution.
// ArrayBufferCollector takes ownership of this pointer.
heap_->array_buffer_collector()->AddGarbageAllocations(
space_->heap()->array_buffer_collector()->AddGarbageAllocations(
backing_stores_to_free);
}
......@@ -85,17 +87,6 @@ void ArrayBufferTracker::PrepareToFreeDeadInNewSpace(Heap* heap) {
}
}
size_t ArrayBufferTracker::RetainedInNewSpace(Heap* heap) {
size_t retained_size = 0;
for (Page* page : PageRange(heap->new_space()->ToSpaceStart(),
heap->new_space()->ToSpaceEnd())) {
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) continue;
retained_size += tracker->retained_size();
}
return retained_size;
}
void ArrayBufferTracker::FreeAll(Page* page) {
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return;
......
......@@ -14,10 +14,10 @@
namespace v8 {
namespace internal {
class Heap;
class JSArrayBuffer;
class MarkingState;
class Page;
class Space;
class ArrayBufferTracker : public AllStatic {
public:
......@@ -38,9 +38,6 @@ class ArrayBufferTracker : public AllStatic {
// Does not take any locks and can only be called during Scavenge.
static void PrepareToFreeDeadInNewSpace(Heap* heap);
// Number of array buffer bytes retained from new space.
static size_t RetainedInNewSpace(Heap* heap);
// Frees all backing store pointers for dead JSArrayBuffer on a given page.
// Requires marking information to be present. Requires the page lock to be
// taken by the caller.
......@@ -70,8 +67,7 @@ class LocalArrayBufferTracker {
enum CallbackResult { kKeepEntry, kUpdateEntry, kRemoveEntry };
enum FreeMode { kFreeDead, kFreeAll };
explicit LocalArrayBufferTracker(Heap* heap)
: heap_(heap), retained_size_(0) {}
explicit LocalArrayBufferTracker(Space* space) : space_(space) {}
~LocalArrayBufferTracker();
inline void Add(JSArrayBuffer* buffer, size_t length);
......@@ -101,8 +97,6 @@ class LocalArrayBufferTracker {
return array_buffers_.find(buffer) != array_buffers_.end();
}
size_t retained_size() const { return retained_size_; }
private:
class Hasher {
public:
......@@ -118,12 +112,10 @@ class LocalArrayBufferTracker {
// different memory pages, making it impossible to guarantee order of freeing.
typedef std::unordered_map<JSArrayBuffer*, size_t, Hasher> TrackingData;
Heap* heap_;
Space* space_;
// The set contains raw heap pointers which are removed by the GC upon
// processing the tracker through its owning page.
TrackingData array_buffers_;
// Retained size of array buffers for this tracker in bytes.
size_t retained_size_;
};
} // namespace internal
......
......@@ -1395,7 +1395,7 @@ void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject* object,
void MemoryChunk::AllocateLocalTracker() {
DCHECK_NULL(local_tracker_);
local_tracker_ = new LocalArrayBufferTracker(heap());
local_tracker_ = new LocalArrayBufferTracker(owner());
}
void MemoryChunk::ReleaseLocalTracker() {
......
......@@ -907,7 +907,8 @@ class Space : public Malloced {
heap_(heap),
id_(id),
committed_(0),
max_committed_(0) {}
max_committed_(0),
external_backing_store_bytes_(0) {}
virtual ~Space() {}
......@@ -943,6 +944,11 @@ class Space : public Malloced {
// (e.g. see LargeObjectSpace).
virtual size_t SizeOfObjects() { return Size(); }
// Returns amount of off-heap memory in-use by objects in this Space.
virtual size_t ExternalBackingStoreBytes() const {
return external_backing_store_bytes_;
}
// Approximate amount of physical memory committed for this space.
virtual size_t CommittedPhysicalMemory() = 0;
......@@ -972,6 +978,13 @@ class Space : public Malloced {
committed_ -= bytes;
}
void IncrementExternalBackingStoreBytes(size_t amount) {
external_backing_store_bytes_ += amount;
}
void DecrementExternalBackingStoreBytes(size_t amount) {
external_backing_store_bytes_ -= amount;
}
V8_EXPORT_PRIVATE void* GetRandomMmapAddr();
#ifdef DEBUG
......@@ -995,6 +1008,9 @@ class Space : public Malloced {
size_t committed_;
size_t max_committed_;
// Tracks off-heap memory used by this space.
std::atomic<size_t> external_backing_store_bytes_;
DISALLOW_COPY_AND_ASSIGN(Space);
};
......@@ -2620,6 +2636,11 @@ class NewSpace : public SpaceWithLinearArea {
return Capacity() - Size();
}
size_t ExternalBackingStoreBytes() const override {
DCHECK_EQ(0, from_space_.ExternalBackingStoreBytes());
return to_space_.ExternalBackingStoreBytes();
}
size_t AllocatedSinceLastGC() {
const Address age_mark = to_space_.age_mark();
DCHECK_NE(age_mark, kNullAddress);
......@@ -2968,7 +2989,6 @@ class LargeObjectSpace : public Space {
inline size_t Available() override;
size_t Size() override { return size_; }
size_t SizeOfObjects() override { return objects_size_; }
// Approximate amount of physical memory committed for this space.
......@@ -3029,12 +3049,14 @@ class LargeObjectSpace : public Space {
private:
// The head of the linked list of large object chunks.
LargePage* first_page_;
size_t size_; // allocated bytes
int page_count_; // number of chunks
size_t objects_size_; // size of objects
size_t size_; // allocated bytes
int page_count_; // number of chunks
size_t objects_size_; // size of objects
// The chunk_map_mutex_ has to be used when the chunk map is accessed
// concurrently.
base::Mutex chunk_map_mutex_;
// Page-aligned addresses to their corresponding LargePage.
std::unordered_map<Address, LargePage*> chunk_map_;
......
......@@ -338,30 +338,33 @@ UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) {
isolate->Dispose();
}
TEST(ArrayBuffer_RetainedSizeIncreases) {
TEST(ArrayBuffer_ExternalBackingStoreSizeIncreases) {
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
const size_t retained_before = ArrayBufferTracker::RetainedInNewSpace(heap);
const size_t backing_store_before =
heap->new_space()->ExternalBackingStoreBytes();
{
const size_t kArraybufferSize = 117;
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, kArraybufferSize);
USE(ab);
const size_t retained_after = ArrayBufferTracker::RetainedInNewSpace(heap);
CHECK_EQ(kArraybufferSize, retained_after - retained_before);
const size_t backing_store_after =
heap->new_space()->ExternalBackingStoreBytes();
CHECK_EQ(kArraybufferSize, backing_store_after - backing_store_before);
}
}
TEST(ArrayBuffer_RetainedSizeDecreases) {
TEST(ArrayBuffer_ExternalBackingStoreSizeDecreases) {
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
const size_t retained_before = ArrayBufferTracker::RetainedInNewSpace(heap);
const size_t backing_store_before =
heap->new_space()->ExternalBackingStoreBytes();
{
const size_t kArraybufferSize = 117;
v8::HandleScope handle_scope(isolate);
......@@ -369,8 +372,9 @@ TEST(ArrayBuffer_RetainedSizeDecreases) {
USE(ab);
}
heap::GcAndSweep(heap, OLD_SPACE);
const size_t retained_after = ArrayBufferTracker::RetainedInNewSpace(heap);
CHECK_EQ(0, retained_after - retained_before);
const size_t backing_store_after =
heap->new_space()->ExternalBackingStoreBytes();
CHECK_EQ(0, backing_store_after - backing_store_before);
}
} // namespace heap
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment