Commit 55d00c95 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] Fix ArrayBufferTracker accessing already swept byte length

The tracker needs to maintain the byte length as there is no order guarantee
when sweeping pages and the byte length may be a HeapNumber that is stored on a
different page.

The abstraction for ArrayBuffers is left untouched. We distinguish between the
following cases:
1. Regular AB (backing_store and bye_length should be used)
2. AB allocated using kReservation but not part of wasm
3. AB allocated using kReservation and part of wasm

In practice, 2. does not exist, but we still maintain "allocation_base" and
"allocation_length" which fall back to backing_store and byte_length in this
case. The problematic part is that they look like innocent getters on the
object but actually refer to different data structures or on-heap objects.

Since 2. does not exist, and 3. looks up the bounds in its own tracker, it is
fine for ArrayBufferTracker to pass backing_store and tracked byte_length.

Bug: v8:7701
Change-Id: Ib89d5fe94fce5cef8e5d8343a5415a3b9ad0deba
Reviewed-on: https://chromium-review.googlesource.com/1039385Reviewed-by: 's avatarBen Titzer <titzer@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52923}
parent a3770c73
...@@ -51,12 +51,15 @@ void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) { ...@@ -51,12 +51,15 @@ void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
template <typename Callback> template <typename Callback>
void LocalArrayBufferTracker::Free(Callback should_free) { void LocalArrayBufferTracker::Free(Callback should_free) {
size_t new_retained_size = 0; size_t new_retained_size = 0;
Isolate* isolate = heap_->isolate();
for (TrackingData::iterator it = array_buffers_.begin(); for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) { it != array_buffers_.end();) {
JSArrayBuffer* buffer = reinterpret_cast<JSArrayBuffer*>(*it); JSArrayBuffer* buffer = reinterpret_cast<JSArrayBuffer*>(it->first);
const size_t length = buffer->byte_length()->Number(); const size_t length = it->second;
if (should_free(buffer)) { if (should_free(buffer)) {
buffer->FreeBackingStore(); JSArrayBuffer::FreeBackingStore(
isolate, {buffer->backing_store(), length, buffer->backing_store(),
buffer->allocation_mode(), buffer->is_wasm_memory()});
it = array_buffers_.erase(it); it = array_buffers_.erase(it);
} else { } else {
new_retained_size += length; new_retained_size += length;
...@@ -87,7 +90,7 @@ void ArrayBufferTracker::FreeDead(Page* page, MarkingState* marking_state) { ...@@ -87,7 +90,7 @@ void ArrayBufferTracker::FreeDead(Page* page, MarkingState* marking_state) {
void LocalArrayBufferTracker::Add(JSArrayBuffer* buffer, size_t length) { void LocalArrayBufferTracker::Add(JSArrayBuffer* buffer, size_t length) {
DCHECK_GE(retained_size_ + length, retained_size_); DCHECK_GE(retained_size_ + length, retained_size_);
retained_size_ += length; retained_size_ += length;
auto ret = array_buffers_.insert(buffer); auto ret = array_buffers_.insert({buffer, length});
USE(ret); USE(ret);
// Check that we indeed inserted a new value and did not overwrite an existing // Check that we indeed inserted a new value and did not overwrite an existing
// one (which would be a bug). // one (which would be a bug).
...@@ -100,6 +103,7 @@ void LocalArrayBufferTracker::Remove(JSArrayBuffer* buffer, size_t length) { ...@@ -100,6 +103,7 @@ void LocalArrayBufferTracker::Remove(JSArrayBuffer* buffer, size_t length) {
TrackingData::iterator it = array_buffers_.find(buffer); TrackingData::iterator it = array_buffers_.find(buffer);
// Check that we indeed find a key to remove. // Check that we indeed find a key to remove.
DCHECK(it != array_buffers_.end()); DCHECK(it != array_buffers_.end());
DCHECK_EQ(length, it->second);
array_buffers_.erase(it); array_buffers_.erase(it);
} }
......
...@@ -29,7 +29,7 @@ void LocalArrayBufferTracker::Process(Callback callback) { ...@@ -29,7 +29,7 @@ void LocalArrayBufferTracker::Process(Callback callback) {
size_t moved_size = 0; size_t moved_size = 0;
for (TrackingData::iterator it = array_buffers_.begin(); for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) { it != array_buffers_.end();) {
old_buffer = reinterpret_cast<JSArrayBuffer*>(*it); old_buffer = reinterpret_cast<JSArrayBuffer*>(it->first);
const CallbackResult result = callback(old_buffer, &new_buffer); const CallbackResult result = callback(old_buffer, &new_buffer);
if (result == kKeepEntry) { if (result == kKeepEntry) {
new_retained_size += NumberToSize(old_buffer->byte_length()); new_retained_size += NumberToSize(old_buffer->byte_length());
...@@ -51,14 +51,12 @@ void LocalArrayBufferTracker::Process(Callback callback) { ...@@ -51,14 +51,12 @@ void LocalArrayBufferTracker::Process(Callback callback) {
} }
it = array_buffers_.erase(it); it = array_buffers_.erase(it);
} else if (result == kRemoveEntry) { } else if (result == kRemoveEntry) {
// Size of freed memory is computed to avoid looking at dead objects. // We pass backing_store() and stored length to the collector for freeing
void* allocation_base = old_buffer->allocation_base(); // the backing store. Wasm allocations will go through their own tracker
DCHECK_NOT_NULL(allocation_base); // based on the backing store.
backing_stores_to_free->emplace_back( backing_stores_to_free->emplace_back(
allocation_base, old_buffer->allocation_length(), old_buffer->backing_store(), it->second, old_buffer->backing_store(),
old_buffer->backing_store(), old_buffer->allocation_mode(), old_buffer->allocation_mode(), old_buffer->is_wasm_memory());
old_buffer->is_wasm_memory());
it = array_buffers_.erase(it); it = array_buffers_.erase(it);
} else { } else {
UNREACHABLE(); UNREACHABLE();
......
...@@ -5,7 +5,7 @@ ...@@ -5,7 +5,7 @@
#ifndef V8_HEAP_ARRAY_BUFFER_TRACKER_H_ #ifndef V8_HEAP_ARRAY_BUFFER_TRACKER_H_
#define V8_HEAP_ARRAY_BUFFER_TRACKER_H_ #define V8_HEAP_ARRAY_BUFFER_TRACKER_H_
#include <unordered_set> #include <unordered_map>
#include "src/allocation.h" #include "src/allocation.h"
#include "src/base/platform/mutex.h" #include "src/base/platform/mutex.h"
...@@ -111,7 +111,12 @@ class LocalArrayBufferTracker { ...@@ -111,7 +111,12 @@ class LocalArrayBufferTracker {
} }
}; };
typedef std::unordered_set<JSArrayBuffer*, Hasher> TrackingData; // Keep track of the backing store and the corresponding length at time of
// registering. The length is accessed from JavaScript and can be a
// HeapNumber. The reason for tracking the length is that in the case of
// length being a HeapNumber, the buffer and its length may be stored on
// different memory pages, making it impossible to guarantee order of freeing.
typedef std::unordered_map<JSArrayBuffer*, size_t, Hasher> TrackingData;
Heap* heap_; Heap* heap_;
// The set contains raw heap pointers which are removed by the GC upon // The set contains raw heap pointers which are removed by the GC upon
......
...@@ -19192,7 +19192,7 @@ void JSArrayBuffer::Neuter() { ...@@ -19192,7 +19192,7 @@ void JSArrayBuffer::Neuter() {
} }
} }
void JSArrayBuffer::FreeBackingStore() { void JSArrayBuffer::FreeBackingStoreFromMainThread() {
if (allocation_base() == nullptr) { if (allocation_base() == nullptr) {
return; return;
} }
......
...@@ -140,6 +140,8 @@ class JSArrayBuffer : public JSObject { ...@@ -140,6 +140,8 @@ class JSArrayBuffer : public JSObject {
// [backing_store]: backing memory for this array // [backing_store]: backing memory for this array
DECL_ACCESSORS(backing_store, void) DECL_ACCESSORS(backing_store, void)
// For non-wasm, allocation_length and allocation_base are byte_length and
// backing_store, respectively.
inline size_t allocation_length() const; inline size_t allocation_length() const;
inline void* allocation_base() const; inline void* allocation_base() const;
...@@ -194,7 +196,7 @@ class JSArrayBuffer : public JSObject { ...@@ -194,7 +196,7 @@ class JSArrayBuffer : public JSObject {
// Sets whether the buffer is tracked by the WasmMemoryTracker. // Sets whether the buffer is tracked by the WasmMemoryTracker.
void set_is_wasm_memory(bool is_wasm_memory); void set_is_wasm_memory(bool is_wasm_memory);
void FreeBackingStore(); void FreeBackingStoreFromMainThread();
static void FreeBackingStore(Isolate* isolate, Allocation allocation); static void FreeBackingStore(Isolate* isolate, Allocation allocation);
V8_EXPORT_PRIVATE static void Setup( V8_EXPORT_PRIVATE static void Setup(
......
...@@ -322,7 +322,7 @@ void DetachMemoryBuffer(Isolate* isolate, Handle<JSArrayBuffer> buffer, ...@@ -322,7 +322,7 @@ void DetachMemoryBuffer(Isolate* isolate, Handle<JSArrayBuffer> buffer,
// by Neuter. This means there is a dangling pointer until we neuter the // by Neuter. This means there is a dangling pointer until we neuter the
// buffer. Since there is no way for the user to directly call // buffer. Since there is no way for the user to directly call
// FreeBackingStore, we can ensure this is safe. // FreeBackingStore, we can ensure this is safe.
buffer->FreeBackingStore(); buffer->FreeBackingStoreFromMainThread();
} }
} }
......
...@@ -1024,7 +1024,7 @@ struct ManuallyExternalizedBuffer { ...@@ -1024,7 +1024,7 @@ struct ManuallyExternalizedBuffer {
} }
~ManuallyExternalizedBuffer() { ~ManuallyExternalizedBuffer() {
if (should_free_) { if (should_free_) {
buffer_->FreeBackingStore(); buffer_->FreeBackingStoreFromMainThread();
} }
} }
}; };
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment