Commit 83dfd058 authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

[heap] Improve accounting with ArrayBufferExtensions

Update external memory counters when using ArrayBufferExtensions. In
case the array buffers are swept concurrently, the counters are updated
at the beginning of the next minor/full GC. A subsequent GC is going
to update counters faster.

ArrayBufferExtension now stores the accounting_length such that
the sweeper always knows how much memory to deduct from the external
memory on destruction.

ArrayBufferList now also tracks the size of all ArrayBuffers in it.

Bug: v8:10064
Change-Id: I50a8b1180aa837b6932f834df1610255bd2bd9fd
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2041441
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#66195}
parent 98129efc
......@@ -21,6 +21,7 @@ void ArrayBufferList::Append(ArrayBufferExtension* extension) {
tail_ = extension;
}
bytes_ += extension->accounting_length();
extension->set_next(nullptr);
}
......@@ -37,6 +38,7 @@ void ArrayBufferList::Append(ArrayBufferList* list) {
DCHECK_NULL(list->tail_);
}
bytes_ += list->Bytes();
list->Reset();
}
......@@ -51,6 +53,18 @@ bool ArrayBufferList::Contains(ArrayBufferExtension* extension) {
return false;
}
size_t ArrayBufferList::BytesSlow() {
ArrayBufferExtension* current = head_;
size_t sum = 0;
while (current) {
sum += current->accounting_length();
current = current->next();
}
return sum;
}
void ArrayBufferSweeper::EnsureFinished() {
if (!sweeping_in_progress_) return;
......@@ -86,9 +100,16 @@ void ArrayBufferSweeper::EnsureFinished() {
UNREACHABLE();
}
DecrementExternalMemoryCounters();
sweeping_in_progress_ = false;
}
void ArrayBufferSweeper::DecrementExternalMemoryCounters() {
heap_->DecrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, job_.freed_bytes);
heap_->update_external_memory(-static_cast<int64_t>(job_.freed_bytes));
}
void ArrayBufferSweeper::RequestSweepYoung() {
RequestSweep(SweepingScope::Young);
}
......@@ -104,7 +125,7 @@ void ArrayBufferSweeper::RequestSweep(SweepingScope scope) {
return;
if (!heap_->IsTearingDown() && !heap_->ShouldReduceMemory() &&
FLAG_concurrent_array_buffer_sweeping) {
!heap_->is_current_gc_forced() && FLAG_concurrent_array_buffer_sweeping) {
Prepare(scope);
auto task = MakeCancelableTask(heap_->isolate(), [this] {
......@@ -122,6 +143,7 @@ void ArrayBufferSweeper::RequestSweep(SweepingScope scope) {
Prepare(scope);
job_.Sweep();
Merge();
DecrementExternalMemoryCounters();
}
}
......@@ -172,6 +194,16 @@ void ArrayBufferSweeper::Append(JSArrayBuffer object,
} else {
old_.Append(extension);
}
size_t bytes = extension->accounting_length();
IncrementExternalMemoryCounters(bytes);
}
void ArrayBufferSweeper::IncrementExternalMemoryCounters(size_t bytes) {
heap_->IncrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, bytes);
reinterpret_cast<v8::Isolate*>(heap_->isolate())
->AdjustAmountOfExternalAllocatedMemory(static_cast<int64_t>(bytes));
}
ArrayBufferSweeper::SweepingJob::SweepingJob()
......@@ -185,6 +217,7 @@ ArrayBufferSweeper::SweepingJob ArrayBufferSweeper::SweepingJob::Prepare(
job.scope = scope;
job.id = 0;
job.state = SweepingState::Prepared;
job.freed_bytes = 0;
return job;
}
......@@ -212,23 +245,28 @@ void ArrayBufferSweeper::SweepingJob::SweepFull() {
ArrayBufferList ArrayBufferSweeper::SweepingJob::SweepListFull(
ArrayBufferList* list) {
ArrayBufferExtension* current = list->head_;
ArrayBufferList survived;
ArrayBufferList survivor_list;
size_t freed_bytes = 0;
while (current) {
ArrayBufferExtension* next = current->next();
if (!current->IsMarked()) {
freed_bytes += current->accounting_length();
delete current;
} else {
current->Unmark();
survived.Append(current);
survivor_list.Append(current);
}
current = next;
}
this->freed_bytes += freed_bytes;
list->Reset();
return survived;
return survivor_list;
}
void ArrayBufferSweeper::SweepingJob::SweepYoung() {
......@@ -238,10 +276,13 @@ void ArrayBufferSweeper::SweepingJob::SweepYoung() {
ArrayBufferList new_young;
ArrayBufferList new_old;
size_t freed_bytes = 0;
while (current) {
ArrayBufferExtension* next = current->next();
if (!current->IsYoungMarked()) {
freed_bytes += current->accounting_length();
delete current;
} else if (current->IsYoungPromoted()) {
current->YoungUnmark();
......@@ -254,6 +295,7 @@ void ArrayBufferSweeper::SweepingJob::SweepYoung() {
current = next;
}
this->freed_bytes += freed_bytes;
old = new_old;
young = new_young;
}
......
......@@ -18,17 +18,24 @@ class Heap;
// Singly linked-list of ArrayBufferExtensions that stores head and tail of the
// list to allow for concatenation of lists.
struct ArrayBufferList {
ArrayBufferList() : head_(nullptr), tail_(nullptr) {}
ArrayBufferList() : head_(nullptr), tail_(nullptr), bytes_(0) {}
ArrayBufferExtension* head_;
ArrayBufferExtension* tail_;
size_t bytes_;
bool IsEmpty() {
DCHECK_IMPLIES(head_, tail_);
return head_ == nullptr;
}
void Reset() { head_ = tail_ = nullptr; }
size_t Bytes() { return bytes_; }
size_t BytesSlow();
void Reset() {
head_ = tail_ = nullptr;
bytes_ = 0;
}
void Append(ArrayBufferExtension* extension);
void Append(ArrayBufferList* list);
......@@ -64,6 +71,7 @@ class ArrayBufferSweeper {
ArrayBufferList young;
ArrayBufferList old;
SweepingScope scope;
size_t freed_bytes;
SweepingJob();
......@@ -77,6 +85,9 @@ class ArrayBufferSweeper {
void Merge();
void DecrementExternalMemoryCounters();
void IncrementExternalMemoryCounters(size_t bytes);
void RequestSweep(SweepingScope sweeping_task);
void Prepare(SweepingScope sweeping_task);
......
......@@ -20,14 +20,6 @@
namespace v8 {
namespace internal {
inline size_t PerIsolateAccountingLength(JSArrayBuffer buffer) {
// TODO(titzer): SharedArrayBuffers and shared WasmMemorys cause problems with
// accounting for per-isolate external memory. In particular, sharing the same
// array buffer or memory multiple times, which happens in stress tests, can
// cause overcounting, leading to GC thrashing. Fix with global accounting?
return buffer.is_shared() ? 0 : buffer.byte_length();
}
void ArrayBufferTracker::RegisterNew(
Heap* heap, JSArrayBuffer buffer,
std::shared_ptr<BackingStore> backing_store) {
......@@ -57,7 +49,7 @@ void ArrayBufferTracker::RegisterNew(
// TODO(wez): Remove backing-store from external memory accounting.
// We may go over the limit of externally allocated memory here. We call the
// api function to trigger a GC in this case.
const size_t length = PerIsolateAccountingLength(buffer);
const size_t length = buffer.PerIsolateAccountingLength();
reinterpret_cast<v8::Isolate*>(heap->isolate())
->AdjustAmountOfExternalAllocatedMemory(length);
}
......@@ -66,7 +58,7 @@ std::shared_ptr<BackingStore> ArrayBufferTracker::Unregister(
Heap* heap, JSArrayBuffer buffer) {
std::shared_ptr<BackingStore> backing_store;
const size_t length = PerIsolateAccountingLength(buffer);
const size_t length = buffer.PerIsolateAccountingLength();
Page* page = Page::FromHeapObject(buffer);
{
base::MutexGuard guard(page->mutex());
......@@ -98,7 +90,7 @@ void LocalArrayBufferTracker::Free(Callback should_free) {
it != array_buffers_.end();) {
// Unchecked cast because the map might already be dead at this point.
JSArrayBuffer buffer = JSArrayBuffer::unchecked_cast(it->first);
const size_t length = PerIsolateAccountingLength(buffer);
const size_t length = buffer.PerIsolateAccountingLength();
if (should_free(buffer)) {
// Destroy the shared pointer, (perhaps) freeing the backing store.
......@@ -135,7 +127,7 @@ void ArrayBufferTracker::FreeDead(Page* page, MarkingState* marking_state) {
void LocalArrayBufferTracker::Add(JSArrayBuffer buffer,
std::shared_ptr<BackingStore> backing_store) {
auto length = PerIsolateAccountingLength(buffer);
auto length = buffer.PerIsolateAccountingLength();
page_->IncrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, length);
......@@ -169,7 +161,7 @@ std::shared_ptr<BackingStore> LocalArrayBufferTracker::Remove(
array_buffers_.erase(it);
// Update accounting.
auto length = PerIsolateAccountingLength(buffer);
auto length = buffer.PerIsolateAccountingLength();
page_->DecrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, length);
......
......@@ -50,7 +50,7 @@ void LocalArrayBufferTracker::Process(Callback callback) {
tracker = target_page->local_tracker();
}
DCHECK_NOT_NULL(tracker);
const size_t length = PerIsolateAccountingLength(old_buffer);
const size_t length = old_buffer.PerIsolateAccountingLength();
// We should decrement before adding to avoid potential overflows in
// the external memory counters.
tracker->AddInternal(new_buffer, std::move(it->second));
......@@ -60,7 +60,7 @@ void LocalArrayBufferTracker::Process(Callback callback) {
static_cast<MemoryChunk*>(target_page), length);
}
} else if (result == kRemoveEntry) {
freed_memory += PerIsolateAccountingLength(old_buffer);
freed_memory += old_buffer.PerIsolateAccountingLength();
auto backing_store = std::move(it->second);
TRACE_BS("ABT:queue bs=%p mem=%p (length=%zu) cnt=%ld\n",
backing_store.get(), backing_store->buffer_start(),
......
......@@ -4087,6 +4087,7 @@ void Heap::Verify() {
// We have to wait here for the sweeper threads to have an iterable heap.
mark_compact_collector()->EnsureSweepingCompleted();
array_buffer_sweeper()->EnsureFinished();
VerifyPointersVisitor visitor(this);
IterateRoots(&visitor, VISIT_ONLY_STRONG);
......@@ -5996,6 +5997,16 @@ void Heap::RememberUnmappedPage(Address page, bool compacted) {
remembered_unmapped_pages_index_ %= kRememberedUnmappedPages;
}
size_t Heap::YoungArrayBufferBytes() {
DCHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
return array_buffer_sweeper()->young().Bytes();
}
size_t Heap::OldArrayBufferBytes() {
DCHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
return array_buffer_sweeper()->old().Bytes();
}
void Heap::RegisterStrongRoots(FullObjectSlot start, FullObjectSlot end) {
StrongRootsList* list = new StrongRootsList();
list->next = strong_roots_list_;
......
......@@ -644,6 +644,9 @@ class Heap {
V8_INLINE void update_external_memory_concurrently_freed(uintptr_t freed);
V8_INLINE void account_external_memory_concurrently_freed();
V8_EXPORT_PRIVATE size_t YoungArrayBufferBytes();
V8_EXPORT_PRIVATE size_t OldArrayBufferBytes();
size_t backing_store_bytes() const { return backing_store_bytes_; }
void CompactWeakArrayLists(AllocationType allocation);
......
......@@ -14,6 +14,7 @@
#include "src/base/platform/semaphore.h"
#include "src/common/globals.h"
#include "src/execution/vm-state-inl.h"
#include "src/heap/array-buffer-sweeper.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/combined-heap.h"
#include "src/heap/concurrent-marking.h"
......@@ -2199,7 +2200,7 @@ void PagedSpace::Verify(Isolate* isolate, ObjectVisitor* visitor) {
} else if (object.IsJSArrayBuffer()) {
JSArrayBuffer array_buffer = JSArrayBuffer::cast(object);
if (ArrayBufferTracker::IsTracked(array_buffer)) {
size_t size = PerIsolateAccountingLength(array_buffer);
size_t size = array_buffer.PerIsolateAccountingLength();
external_page_bytes[ExternalBackingStoreType::kArrayBuffer] += size;
}
}
......@@ -2211,10 +2212,20 @@ void PagedSpace::Verify(Isolate* isolate, ObjectVisitor* visitor) {
}
}
for (int i = 0; i < kNumTypes; i++) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL &&
i == ExternalBackingStoreType::kArrayBuffer)
continue;
ExternalBackingStoreType t = static_cast<ExternalBackingStoreType>(i);
CHECK_EQ(external_space_bytes[t], ExternalBackingStoreBytes(t));
}
CHECK(allocation_pointer_found_in_space);
if (identity() == OLD_SPACE && V8_ARRAY_BUFFER_EXTENSION_BOOL) {
size_t bytes = heap()->array_buffer_sweeper()->old().BytesSlow();
CHECK_EQ(bytes,
ExternalBackingStoreBytes(ExternalBackingStoreType::kArrayBuffer));
}
#ifdef DEBUG
VerifyCountersAfterSweeping(isolate->heap());
#endif
......@@ -2687,7 +2698,7 @@ void NewSpace::Verify(Isolate* isolate) {
} else if (object.IsJSArrayBuffer()) {
JSArrayBuffer array_buffer = JSArrayBuffer::cast(object);
if (ArrayBufferTracker::IsTracked(array_buffer)) {
size_t size = PerIsolateAccountingLength(array_buffer);
size_t size = array_buffer.PerIsolateAccountingLength();
external_space_bytes[ExternalBackingStoreType::kArrayBuffer] += size;
}
}
......@@ -2701,10 +2712,19 @@ void NewSpace::Verify(Isolate* isolate) {
}
for (int i = 0; i < kNumTypes; i++) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL &&
i == ExternalBackingStoreType::kArrayBuffer)
continue;
ExternalBackingStoreType t = static_cast<ExternalBackingStoreType>(i);
CHECK_EQ(external_space_bytes[t], ExternalBackingStoreBytes(t));
}
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
size_t bytes = heap()->array_buffer_sweeper()->young().BytesSlow();
CHECK_EQ(bytes,
ExternalBackingStoreBytes(ExternalBackingStoreType::kArrayBuffer));
}
// Check semi-spaces.
CHECK_EQ(from_space_.id(), kFromSpace);
CHECK_EQ(to_space_.id(), kToSpace);
......
......@@ -2869,6 +2869,9 @@ class V8_EXPORT_PRIVATE NewSpace
}
size_t ExternalBackingStoreBytes(ExternalBackingStoreType type) const final {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL &&
type == ExternalBackingStoreType::kArrayBuffer)
return heap()->YoungArrayBufferBytes();
DCHECK_EQ(0, from_space_.ExternalBackingStoreBytes(type));
return to_space_.ExternalBackingStoreBytes(type);
}
......@@ -3133,6 +3136,13 @@ class OldSpace : public PagedSpace {
return static_cast<intptr_t>(addr & kPageAlignmentMask) ==
MemoryChunkLayout::ObjectStartOffsetInDataPage();
}
size_t ExternalBackingStoreBytes(ExternalBackingStoreType type) const final {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL &&
type == ExternalBackingStoreType::kArrayBuffer)
return heap()->OldArrayBufferBytes();
return external_backing_store_bytes_[type];
}
};
// -----------------------------------------------------------------------------
......
......@@ -108,6 +108,14 @@ BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_asmjs_memory,
BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_shared,
JSArrayBuffer::IsSharedBit)
size_t JSArrayBuffer::PerIsolateAccountingLength() {
// TODO(titzer): SharedArrayBuffers and shared WasmMemorys cause problems with
// accounting for per-isolate external memory. In particular, sharing the same
// array buffer or memory multiple times, which happens in stress tests, can
// cause overcounting, leading to GC thrashing. Fix with global accounting?
return is_shared() ? 0 : byte_length();
}
size_t JSArrayBufferView::byte_offset() const {
return ReadField<size_t>(kByteOffsetOffset);
}
......
......@@ -65,8 +65,11 @@ void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) {
if (!backing_store->free_on_destruct()) set_is_external(true);
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
Heap* heap = GetIsolate()->heap();
EnsureExtension(heap);
EnsureExtension();
extension()->set_backing_store(std::move(backing_store));
size_t bytes = PerIsolateAccountingLength();
extension()->set_accounting_length(bytes);
heap->AppendArrayBufferExtension(*this, extension());
} else {
GetIsolate()->heap()->RegisterBackingStore(*this, std::move(backing_store));
}
......@@ -113,14 +116,13 @@ std::shared_ptr<BackingStore> JSArrayBuffer::GetBackingStore() {
}
}
ArrayBufferExtension* JSArrayBuffer::EnsureExtension(Heap* heap) {
ArrayBufferExtension* JSArrayBuffer::EnsureExtension() {
DCHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
if (extension() != nullptr) return extension();
ArrayBufferExtension* extension =
new ArrayBufferExtension(std::shared_ptr<BackingStore>());
set_extension(extension);
heap->AppendArrayBufferExtension(*this, extension);
return extension;
}
......
......@@ -107,7 +107,7 @@ class JSArrayBuffer : public JSObject {
// Allocates an ArrayBufferExtension for this array buffer, unless it is
// already associated with an extension.
ArrayBufferExtension* EnsureExtension(Heap* heap);
ArrayBufferExtension* EnsureExtension();
// Frees the associated ArrayBufferExtension and returns its backing store.
std::shared_ptr<BackingStore> RemoveExtension();
......@@ -117,6 +117,8 @@ class JSArrayBuffer : public JSObject {
void YoungMarkExtension();
void YoungMarkExtensionPromoted();
inline size_t PerIsolateAccountingLength();
// Dispatched behavior.
DECL_PRINTER(JSArrayBuffer)
DECL_VERIFIER(JSArrayBuffer)
......@@ -163,6 +165,7 @@ class ArrayBufferExtension : public Malloced {
std::atomic<GcState> young_gc_state_;
std::shared_ptr<BackingStore> backing_store_;
ArrayBufferExtension* next_;
std::size_t accounting_length_;
GcState young_gc_state() {
return young_gc_state_.load(std::memory_order_relaxed);
......@@ -177,12 +180,14 @@ class ArrayBufferExtension : public Malloced {
: marked_(false),
young_gc_state_(GcState::Dead),
backing_store_(std::shared_ptr<BackingStore>()),
next_(nullptr) {}
next_(nullptr),
accounting_length_(0) {}
explicit ArrayBufferExtension(std::shared_ptr<BackingStore> backing_store)
: marked_(false),
young_gc_state_(GcState::Dead),
backing_store_(backing_store),
next_(nullptr) {}
next_(nullptr),
accounting_length_(0) {}
void Mark() { marked_.store(true, std::memory_order_relaxed); }
void Unmark() { marked_.store(false, std::memory_order_relaxed); }
......@@ -198,6 +203,12 @@ class ArrayBufferExtension : public Malloced {
std::shared_ptr<BackingStore> backing_store() { return backing_store_; }
BackingStore* backing_store_raw() { return backing_store_.get(); }
size_t accounting_length() { return accounting_length_; }
void set_accounting_length(size_t accounting_length) {
accounting_length_ = accounting_length;
}
std::shared_ptr<BackingStore> RemoveBackingStore() {
return std::move(backing_store_);
}
......
......@@ -42,6 +42,11 @@ bool IsTracked(i::Heap* heap, i::ArrayBufferExtension* extension) {
return in_young || in_old;
}
bool IsTracked(i::Heap* heap, i::JSArrayBuffer buffer) {
return V8_ARRAY_BUFFER_EXTENSION_BOOL ? IsTracked(heap, buffer.extension())
: IsTracked(buffer);
}
} // namespace
namespace v8 {
......@@ -525,9 +530,10 @@ TEST(ArrayBuffer_ExternalBackingStoreSizeDecreases) {
}
TEST(ArrayBuffer_ExternalBackingStoreSizeIncreasesMarkCompact) {
if (FLAG_never_compact || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
if (FLAG_never_compact) return;
ManualGCScope manual_gc_scope;
FLAG_manual_evacuation_candidates_selection = true;
FLAG_concurrent_array_buffer_sweeping = false;
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
......@@ -544,13 +550,13 @@ TEST(ArrayBuffer_ExternalBackingStoreSizeIncreasesMarkCompact) {
Local<v8::ArrayBuffer> ab1 =
v8::ArrayBuffer::New(isolate, kArraybufferSize);
Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
CHECK(IsTracked(*buf1));
CHECK(IsTracked(heap, *buf1));
heap::GcAndSweep(heap, NEW_SPACE);
heap::GcAndSweep(heap, NEW_SPACE);
Page* page_before_gc = Page::FromHeapObject(*buf1);
heap::ForceEvacuationCandidate(page_before_gc);
CHECK(IsTracked(*buf1));
CHECK(IsTracked(heap, *buf1));
CcTest::CollectAllGarbage();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment