Commit 69fda08a authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

[objects] Add ArrayBufferExtension class

This CL adds the ArrayBufferExtension class, which is used to track
JSArrayBuffers in a linked list. The ArrayBufferExtension is going to
replace the ArrayBufferTracker in the future but is currently behind
the v8_enable_array_buffer_extension feature flag.

When enabled, each JSArrayBuffer has a corresponding native-heap
allocated ArrayBufferExtension object. All extensions are currently
tracked in a single linked list. During marking the GC not only
marks the JSArrayBuffer but also its extension object. At the end of
mark-compact the GC iterates all extensions and removes unmarked ones.

Change-Id: I88298be255944d5ae1327c91b0d7f0fdbcd486d5
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1969791Reviewed-by: 's avatarPeter Marshall <petermarshall@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#65724}
parent e83a4611
...@@ -128,7 +128,7 @@ declare_args() { ...@@ -128,7 +128,7 @@ declare_args() {
v8_enable_concurrent_marking = true v8_enable_concurrent_marking = true
# Sets -dV8_ARRAY_BUFFER_EXTENSION # Sets -dV8_ARRAY_BUFFER_EXTENSION
v8_enable_array_buffer_extension = true v8_enable_array_buffer_extension = false
# Enables various testing features. # Enables various testing features.
v8_enable_test_features = "" v8_enable_test_features = ""
......
...@@ -51,6 +51,7 @@ TNode<JSArrayBuffer> TypedArrayBuiltinsAssembler::AllocateEmptyOnHeapBuffer( ...@@ -51,6 +51,7 @@ TNode<JSArrayBuffer> TypedArrayBuiltinsAssembler::AllocateEmptyOnHeapBuffer(
// - Set IsExternal and IsDetachable bits of BitFieldSlot. // - Set IsExternal and IsDetachable bits of BitFieldSlot.
// - Set the byte_length field to byte_length. // - Set the byte_length field to byte_length.
// - Set backing_store to null/Smi(0). // - Set backing_store to null/Smi(0).
// - Set extension to null.
// - Set all embedder fields to Smi(0). // - Set all embedder fields to Smi(0).
if (FIELD_SIZE(JSArrayBuffer::kOptionalPaddingOffset) != 0) { if (FIELD_SIZE(JSArrayBuffer::kOptionalPaddingOffset) != 0) {
DCHECK_EQ(4, FIELD_SIZE(JSArrayBuffer::kOptionalPaddingOffset)); DCHECK_EQ(4, FIELD_SIZE(JSArrayBuffer::kOptionalPaddingOffset));
...@@ -70,6 +71,11 @@ TNode<JSArrayBuffer> TypedArrayBuiltinsAssembler::AllocateEmptyOnHeapBuffer( ...@@ -70,6 +71,11 @@ TNode<JSArrayBuffer> TypedArrayBuiltinsAssembler::AllocateEmptyOnHeapBuffer(
StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kBackingStoreOffset, StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kBackingStoreOffset,
IntPtrConstant(0), IntPtrConstant(0),
MachineType::PointerRepresentation()); MachineType::PointerRepresentation());
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kExtensionOffset,
IntPtrConstant(0),
MachineType::PointerRepresentation());
}
for (int offset = JSArrayBuffer::kHeaderSize; for (int offset = JSArrayBuffer::kHeaderSize;
offset < JSArrayBuffer::kSizeWithEmbedderFields; offset += kTaggedSize) { offset < JSArrayBuffer::kSizeWithEmbedderFields; offset += kTaggedSize) {
StoreObjectFieldNoWriteBarrier(buffer, offset, SmiConstant(0)); StoreObjectFieldNoWriteBarrier(buffer, offset, SmiConstant(0));
......
...@@ -29,6 +29,10 @@ V8_EXPORT_PRIVATE void Heap_GenerationalBarrierSlow(HeapObject object, ...@@ -29,6 +29,10 @@ V8_EXPORT_PRIVATE void Heap_GenerationalBarrierSlow(HeapObject object,
V8_EXPORT_PRIVATE void Heap_MarkingBarrierSlow(HeapObject object, Address slot, V8_EXPORT_PRIVATE void Heap_MarkingBarrierSlow(HeapObject object, Address slot,
HeapObject value); HeapObject value);
V8_EXPORT_PRIVATE void Heap_WriteBarrierForCodeSlow(Code host); V8_EXPORT_PRIVATE void Heap_WriteBarrierForCodeSlow(Code host);
V8_EXPORT_PRIVATE void Heap_MarkingBarrierForArrayBufferExtensionSlow(
HeapObject object, ArrayBufferExtension* extension);
V8_EXPORT_PRIVATE void Heap_GenerationalBarrierForCodeSlow(Code host, V8_EXPORT_PRIVATE void Heap_GenerationalBarrierForCodeSlow(Code host,
RelocInfo* rinfo, RelocInfo* rinfo,
HeapObject object); HeapObject object);
...@@ -144,6 +148,14 @@ inline void WriteBarrierForCode(Code host) { ...@@ -144,6 +148,14 @@ inline void WriteBarrierForCode(Code host) {
Heap_WriteBarrierForCodeSlow(host); Heap_WriteBarrierForCodeSlow(host);
} }
inline void MarkingBarrierForArrayBufferExtension(
HeapObject object, ArrayBufferExtension* extension) {
heap_internals::MemoryChunk* object_chunk =
heap_internals::MemoryChunk::FromHeapObject(object);
if (!extension || !object_chunk->IsMarking()) return;
Heap_MarkingBarrierForArrayBufferExtensionSlow(object, extension);
}
inline void GenerationalBarrier(HeapObject object, ObjectSlot slot, inline void GenerationalBarrier(HeapObject object, ObjectSlot slot,
Object value) { Object value) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return; if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
......
...@@ -11,6 +11,7 @@ ...@@ -11,6 +11,7 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
class ArrayBufferExtension;
class Code; class Code;
class FixedArray; class FixedArray;
class Heap; class Heap;
...@@ -40,6 +41,9 @@ void MarkingBarrier(HeapObject object, ObjectSlot slot, HeapObject value); ...@@ -40,6 +41,9 @@ void MarkingBarrier(HeapObject object, ObjectSlot slot, HeapObject value);
void MarkingBarrier(HeapObject object, MaybeObjectSlot slot, MaybeObject value); void MarkingBarrier(HeapObject object, MaybeObjectSlot slot, MaybeObject value);
void MarkingBarrierForCode(Code host, RelocInfo* rinfo, HeapObject object); void MarkingBarrierForCode(Code host, RelocInfo* rinfo, HeapObject object);
void MarkingBarrierForArrayBufferExtension(HeapObject object,
ArrayBufferExtension* extension);
void MarkingBarrierForDescriptorArray(Heap* heap, HeapObject host, void MarkingBarrierForDescriptorArray(Heap* heap, HeapObject host,
HeapObject descriptor_array, HeapObject descriptor_array,
int number_of_own_descriptors); int number_of_own_descriptors);
......
...@@ -108,6 +108,11 @@ void Heap_WriteBarrierForCodeSlow(Code host) { ...@@ -108,6 +108,11 @@ void Heap_WriteBarrierForCodeSlow(Code host) {
Heap::WriteBarrierForCodeSlow(host); Heap::WriteBarrierForCodeSlow(host);
} }
void Heap_MarkingBarrierForArrayBufferExtensionSlow(
HeapObject object, ArrayBufferExtension* extension) {
Heap::MarkingBarrierForArrayBufferExtensionSlow(object, extension);
}
void Heap_GenerationalBarrierForCodeSlow(Code host, RelocInfo* rinfo, void Heap_GenerationalBarrierForCodeSlow(Code host, RelocInfo* rinfo,
HeapObject object) { HeapObject object) {
Heap::GenerationalBarrierForCodeSlow(host, rinfo, object); Heap::GenerationalBarrierForCodeSlow(host, rinfo, object);
...@@ -3779,6 +3784,18 @@ void Heap::RemoveNearHeapLimitCallback(v8::NearHeapLimitCallback callback, ...@@ -3779,6 +3784,18 @@ void Heap::RemoveNearHeapLimitCallback(v8::NearHeapLimitCallback callback,
UNREACHABLE(); UNREACHABLE();
} }
void Heap::ReleaseAllArrayBufferExtensions() {
ArrayBufferExtension* current = array_buffer_extensions_;
while (current) {
ArrayBufferExtension* next = current->next();
delete current;
current = next;
}
array_buffer_extensions_ = nullptr;
}
void Heap::AutomaticallyRestoreInitialHeapLimit(double threshold_percent) { void Heap::AutomaticallyRestoreInitialHeapLimit(double threshold_percent) {
initial_max_old_generation_size_threshold_ = initial_max_old_generation_size_threshold_ =
initial_max_old_generation_size_ * threshold_percent; initial_max_old_generation_size_ * threshold_percent;
...@@ -5246,6 +5263,8 @@ void Heap::TearDown() { ...@@ -5246,6 +5263,8 @@ void Heap::TearDown() {
// It's too late for Heap::Verify() here, as parts of the Isolate are // It's too late for Heap::Verify() here, as parts of the Isolate are
// already gone by the time this is called. // already gone by the time this is called.
ReleaseAllArrayBufferExtensions();
UpdateMaximumCommitted(); UpdateMaximumCommitted();
if (FLAG_verify_predictable || FLAG_fuzzer_gc_analysis) { if (FLAG_verify_predictable || FLAG_fuzzer_gc_analysis) {
...@@ -6234,6 +6253,16 @@ void Heap::WriteBarrierForCodeSlow(Code code) { ...@@ -6234,6 +6253,16 @@ void Heap::WriteBarrierForCodeSlow(Code code) {
} }
} }
void Heap::MarkingBarrierForArrayBufferExtensionSlow(
HeapObject object, ArrayBufferExtension* extension) {
if (V8_CONCURRENT_MARKING_BOOL || GetIsolateFromWritableObject(object)
->heap()
->incremental_marking()
->marking_state()
->IsBlack(object))
extension->Mark();
}
void Heap::GenerationalBarrierSlow(HeapObject object, Address slot, void Heap::GenerationalBarrierSlow(HeapObject object, Address slot,
HeapObject value) { HeapObject value) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object); MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
......
...@@ -24,6 +24,7 @@ ...@@ -24,6 +24,7 @@
#include "src/objects/allocation-site.h" #include "src/objects/allocation-site.h"
#include "src/objects/fixed-array.h" #include "src/objects/fixed-array.h"
#include "src/objects/heap-object.h" #include "src/objects/heap-object.h"
#include "src/objects/js-array-buffer.h"
#include "src/objects/objects.h" #include "src/objects/objects.h"
#include "src/objects/smi.h" #include "src/objects/smi.h"
#include "src/objects/string-table.h" #include "src/objects/string-table.h"
...@@ -385,6 +386,10 @@ class Heap { ...@@ -385,6 +386,10 @@ class Heap {
TSlot end); TSlot end);
V8_EXPORT_PRIVATE static void WriteBarrierForCodeSlow(Code host); V8_EXPORT_PRIVATE static void WriteBarrierForCodeSlow(Code host);
V8_EXPORT_PRIVATE static void MarkingBarrierForArrayBufferExtensionSlow(
HeapObject object, ArrayBufferExtension* extension);
V8_EXPORT_PRIVATE static void GenerationalBarrierSlow(HeapObject object, V8_EXPORT_PRIVATE static void GenerationalBarrierSlow(HeapObject object,
Address slot, Address slot,
HeapObject value); HeapObject value);
...@@ -400,6 +405,10 @@ class Heap { ...@@ -400,6 +405,10 @@ class Heap {
V8_EXPORT_PRIVATE static void MarkingBarrierForCodeSlow(Code host, V8_EXPORT_PRIVATE static void MarkingBarrierForCodeSlow(Code host,
RelocInfo* rinfo, RelocInfo* rinfo,
HeapObject value); HeapObject value);
static void MarkingBarrierForArrayBufferExtension(
JSArrayBuffer object, ArrayBufferExtension* extension);
V8_EXPORT_PRIVATE static void MarkingBarrierForDescriptorArraySlow( V8_EXPORT_PRIVATE static void MarkingBarrierForDescriptorArraySlow(
Heap* heap, HeapObject host, HeapObject descriptor_array, Heap* heap, HeapObject host, HeapObject descriptor_array,
int number_of_own_descriptors); int number_of_own_descriptors);
...@@ -575,6 +584,21 @@ class Heap { ...@@ -575,6 +584,21 @@ class Heap {
V8_EXPORT_PRIVATE void AutomaticallyRestoreInitialHeapLimit( V8_EXPORT_PRIVATE void AutomaticallyRestoreInitialHeapLimit(
double threshold_percent); double threshold_percent);
ArrayBufferExtension* array_buffer_extensions() {
return array_buffer_extensions_;
}
void set_array_buffer_extensions(ArrayBufferExtension* head) {
array_buffer_extensions_ = head;
}
void AppendArrayBufferExtension(ArrayBufferExtension* extension) {
extension->set_next(array_buffer_extensions_);
array_buffer_extensions_ = extension;
}
void ReleaseAllArrayBufferExtensions();
V8_EXPORT_PRIVATE double MonotonicallyIncreasingTimeInMs(); V8_EXPORT_PRIVATE double MonotonicallyIncreasingTimeInMs();
void RecordStats(HeapStats* stats, bool take_snapshot = false); void RecordStats(HeapStats* stats, bool take_snapshot = false);
...@@ -1896,6 +1920,9 @@ class Heap { ...@@ -1896,6 +1920,9 @@ class Heap {
// Map from the space id to the space. // Map from the space id to the space.
Space* space_[LAST_SPACE + 1]; Space* space_[LAST_SPACE + 1];
// List for tracking ArrayBufferExtensions
ArrayBufferExtension* array_buffer_extensions_ = nullptr;
// Determines whether code space is write-protected. This is essentially a // Determines whether code space is write-protected. This is essentially a
// race-free copy of the {FLAG_write_protect_code_memory} flag. // race-free copy of the {FLAG_write_protect_code_memory} flag.
bool write_protect_code_memory_ = false; bool write_protect_code_memory_ = false;
......
...@@ -885,6 +885,8 @@ void MarkCompactCollector::VerifyMarking() { ...@@ -885,6 +885,8 @@ void MarkCompactCollector::VerifyMarking() {
void MarkCompactCollector::Finish() { void MarkCompactCollector::Finish() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_FINISH); TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_FINISH);
SweepArrayBufferExtensions();
#ifdef DEBUG #ifdef DEBUG
heap()->VerifyCountersBeforeConcurrentSweeping(); heap()->VerifyCountersBeforeConcurrentSweeping();
#endif #endif
...@@ -925,6 +927,28 @@ void MarkCompactCollector::Finish() { ...@@ -925,6 +927,28 @@ void MarkCompactCollector::Finish() {
} }
} }
void MarkCompactCollector::SweepArrayBufferExtensions() {
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ArrayBufferExtension* current = heap_->array_buffer_extensions();
ArrayBufferExtension* last = nullptr;
while (current) {
ArrayBufferExtension* next = current->next();
if (!current->IsMarked()) {
delete current;
} else {
current->Unmark();
current->set_next(last);
last = current;
}
current = next;
}
heap_->set_array_buffer_extensions(last);
}
class MarkCompactCollector::RootMarkingVisitor final : public RootVisitor { class MarkCompactCollector::RootMarkingVisitor final : public RootVisitor {
public: public:
explicit RootMarkingVisitor(MarkCompactCollector* collector) explicit RootMarkingVisitor(MarkCompactCollector* collector)
......
...@@ -608,6 +608,9 @@ class MarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -608,6 +608,9 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// Finishes GC, performs heap verification if enabled. // Finishes GC, performs heap verification if enabled.
void Finish(); void Finish();
// Free unmarked ArrayBufferExtensions.
void SweepArrayBufferExtensions();
void MarkLiveObjects() override; void MarkLiveObjects() override;
// Marks the object black and adds it to the marking work list. // Marks the object black and adds it to the marking work list.
......
...@@ -234,6 +234,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSApiObject( ...@@ -234,6 +234,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSApiObject(
template <typename ConcreteVisitor, typename MarkingState> template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSArrayBuffer( int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSArrayBuffer(
Map map, JSArrayBuffer object) { Map map, JSArrayBuffer object) {
object.MarkExtension();
return VisitEmbedderTracingSubclass(map, object); return VisitEmbedderTracingSubclass(map, object);
} }
......
...@@ -44,17 +44,23 @@ void JSArrayBuffer::set_backing_store(void* value) { ...@@ -44,17 +44,23 @@ void JSArrayBuffer::set_backing_store(void* value) {
WriteField<Address>(kBackingStoreOffset, reinterpret_cast<Address>(value)); WriteField<Address>(kBackingStoreOffset, reinterpret_cast<Address>(value));
} }
void* JSArrayBuffer::extension() const { ArrayBufferExtension* JSArrayBuffer::extension() const {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) { if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
return reinterpret_cast<void*>(ReadField<Address>(kExtensionOffset)); return base::AsAtomicPointer::Acquire_Load(extension_location());
} else { } else {
return nullptr; return nullptr;
} }
} }
void JSArrayBuffer::set_extension(void* value) { ArrayBufferExtension** JSArrayBuffer::extension_location() const {
Address location = field_address(kExtensionOffset);
return reinterpret_cast<ArrayBufferExtension**>(location);
}
void JSArrayBuffer::set_extension(ArrayBufferExtension* value) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) { if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
WriteField<Address>(kExtensionOffset, reinterpret_cast<Address>(value)); base::AsAtomicPointer::Release_Store(extension_location(), value);
MarkingBarrierForArrayBufferExtension(*this, value);
} else { } else {
CHECK_EQ(value, nullptr); CHECK_EQ(value, nullptr);
} }
......
...@@ -63,7 +63,13 @@ void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) { ...@@ -63,7 +63,13 @@ void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) {
set_byte_length(backing_store->byte_length()); set_byte_length(backing_store->byte_length());
if (backing_store->is_wasm_memory()) set_is_detachable(false); if (backing_store->is_wasm_memory()) set_is_detachable(false);
if (!backing_store->free_on_destruct()) set_is_external(true); if (!backing_store->free_on_destruct()) set_is_external(true);
GetIsolate()->heap()->RegisterBackingStore(*this, std::move(backing_store)); if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
Heap* heap = GetIsolate()->heap();
EnsureExtension(heap);
extension()->set_backing_store(std::move(backing_store));
} else {
GetIsolate()->heap()->RegisterBackingStore(*this, std::move(backing_store));
}
} }
void JSArrayBuffer::Detach(bool force_for_wasm_memory) { void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
...@@ -78,7 +84,12 @@ void JSArrayBuffer::Detach(bool force_for_wasm_memory) { ...@@ -78,7 +84,12 @@ void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
Isolate* const isolate = GetIsolate(); Isolate* const isolate = GetIsolate();
if (backing_store()) { if (backing_store()) {
auto backing_store = isolate->heap()->UnregisterBackingStore(*this); std::shared_ptr<BackingStore> backing_store;
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
backing_store = RemoveExtension();
} else {
backing_store = isolate->heap()->UnregisterBackingStore(*this);
}
CHECK_IMPLIES(force_for_wasm_memory, backing_store->is_wasm_memory()); CHECK_IMPLIES(force_for_wasm_memory, backing_store->is_wasm_memory());
} }
...@@ -94,7 +105,40 @@ void JSArrayBuffer::Detach(bool force_for_wasm_memory) { ...@@ -94,7 +105,40 @@ void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
} }
std::shared_ptr<BackingStore> JSArrayBuffer::GetBackingStore() { std::shared_ptr<BackingStore> JSArrayBuffer::GetBackingStore() {
return GetIsolate()->heap()->LookupBackingStore(*this); if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
if (!extension()) return nullptr;
return extension()->backing_store();
} else {
return GetIsolate()->heap()->LookupBackingStore(*this);
}
}
ArrayBufferExtension* JSArrayBuffer::EnsureExtension(Heap* heap) {
DCHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
if (extension() != nullptr) return extension();
ArrayBufferExtension* extension =
new ArrayBufferExtension(std::shared_ptr<BackingStore>());
set_extension(extension);
heap->AppendArrayBufferExtension(extension);
return extension;
}
std::shared_ptr<BackingStore> JSArrayBuffer::RemoveExtension() {
ArrayBufferExtension* extension = this->extension();
DCHECK_NOT_NULL(extension);
auto result = extension->RemoveBackingStore();
// Remove pointer to extension such that the next GC will free it
// automatically.
set_extension(nullptr);
return result;
}
void JSArrayBuffer::MarkExtension() {
ArrayBufferExtension* extension = this->extension();
if (extension) {
extension->Mark();
}
} }
Handle<JSArrayBuffer> JSTypedArray::GetBuffer() { Handle<JSArrayBuffer> JSTypedArray::GetBuffer() {
......
...@@ -15,6 +15,8 @@ ...@@ -15,6 +15,8 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
class ArrayBufferExtension;
class JSArrayBuffer : public JSObject { class JSArrayBuffer : public JSObject {
public: public:
// The maximum length for JSArrayBuffer's supported by V8. // The maximum length for JSArrayBuffer's supported by V8.
...@@ -34,7 +36,7 @@ class JSArrayBuffer : public JSObject { ...@@ -34,7 +36,7 @@ class JSArrayBuffer : public JSObject {
DECL_PRIMITIVE_ACCESSORS(backing_store, void*) DECL_PRIMITIVE_ACCESSORS(backing_store, void*)
// [extension]: extension object used for GC // [extension]: extension object used for GC
DECL_PRIMITIVE_ACCESSORS(extension, void*) DECL_PRIMITIVE_ACCESSORS(extension, ArrayBufferExtension*)
// For non-wasm, allocation_length and allocation_base are byte_length and // For non-wasm, allocation_length and allocation_base are byte_length and
// backing_store, respectively. // backing_store, respectively.
...@@ -103,6 +105,16 @@ class JSArrayBuffer : public JSObject { ...@@ -103,6 +105,16 @@ class JSArrayBuffer : public JSObject {
// or a zero-length array buffer). // or a zero-length array buffer).
std::shared_ptr<BackingStore> GetBackingStore(); std::shared_ptr<BackingStore> GetBackingStore();
// Allocates an ArrayBufferExtension for this array buffer, unless it is
// already associated with an extension.
ArrayBufferExtension* EnsureExtension(Heap* heap);
// Frees the associated ArrayBufferExtension and returns its backing store.
std::shared_ptr<BackingStore> RemoveExtension();
// Marks ArrayBufferExtension
void MarkExtension();
// Dispatched behavior. // Dispatched behavior.
DECL_PRINTER(JSArrayBuffer) DECL_PRINTER(JSArrayBuffer)
DECL_VERIFIER(JSArrayBuffer) DECL_VERIFIER(JSArrayBuffer)
...@@ -131,6 +143,49 @@ class JSArrayBuffer : public JSObject { ...@@ -131,6 +143,49 @@ class JSArrayBuffer : public JSObject {
class BodyDescriptor; class BodyDescriptor;
OBJECT_CONSTRUCTORS(JSArrayBuffer, JSObject); OBJECT_CONSTRUCTORS(JSArrayBuffer, JSObject);
private:
inline ArrayBufferExtension** extension_location() const;
};
// Each JSArrayBuffer (with a backing store) has a corresponding native-heap
// allocated ArrayBufferExtension for GC purposes and storing the backing store.
// When marking a JSArrayBuffer, the GC also marks the native
// extension-object. The GC periodically iterates all extensions concurrently
// and frees unmarked ones.
// https://docs.google.com/document/d/1-ZrLdlFX1nXT3z-FAgLbKal1gI8Auiaya_My-a0UJ28/edit
class ArrayBufferExtension : public Malloced {
std::atomic<bool> marked_;
std::shared_ptr<BackingStore> backing_store_;
ArrayBufferExtension* next_;
public:
ArrayBufferExtension()
: marked_(false),
backing_store_(std::shared_ptr<BackingStore>()),
next_(nullptr) {}
explicit ArrayBufferExtension(std::shared_ptr<BackingStore> backing_store)
: marked_(false), backing_store_(backing_store), next_(nullptr) {}
void Mark() { marked_.store(true, std::memory_order_relaxed); }
void Unmark() { marked_.store(false, std::memory_order_relaxed); }
bool IsMarked() { return marked_.load(std::memory_order_relaxed); }
std::shared_ptr<BackingStore> backing_store() { return backing_store_; }
BackingStore* backing_store_raw() { return backing_store_.get(); }
std::shared_ptr<BackingStore> RemoveBackingStore() {
return std::move(backing_store_);
}
void set_backing_store(std::shared_ptr<BackingStore> backing_store) {
backing_store_ = std::move(backing_store);
}
void reset_backing_store() { backing_store_.reset(); }
ArrayBufferExtension* next() { return next_; }
void set_next(ArrayBufferExtension* extension) { next_ = extension; }
}; };
class JSArrayBufferView : public JSObject { class JSArrayBufferView : public JSObject {
......
...@@ -301,6 +301,7 @@ HeapObject Deserializer::PostProcessNewObject(HeapObject obj, ...@@ -301,6 +301,7 @@ HeapObject Deserializer::PostProcessNewObject(HeapObject obj,
} }
} else if (obj.IsJSArrayBuffer()) { } else if (obj.IsJSArrayBuffer()) {
JSArrayBuffer buffer = JSArrayBuffer::cast(obj); JSArrayBuffer buffer = JSArrayBuffer::cast(obj);
buffer.set_extension(nullptr);
// Only fixup for the off-heap case. This may trigger GC. // Only fixup for the off-heap case. This may trigger GC.
if (buffer.backing_store() != nullptr) { if (buffer.backing_store() != nullptr) {
new_off_heap_array_buffers_.push_back(handle(buffer, isolate_)); new_off_heap_array_buffers_.push_back(handle(buffer, isolate_));
......
...@@ -30,6 +30,7 @@ namespace heap { ...@@ -30,6 +30,7 @@ namespace heap {
// moving the objects through various spaces during GC phases. // moving the objects through various spaces during GC phases.
TEST(ArrayBuffer_OnlyMC) { TEST(ArrayBuffer_OnlyMC) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope; ManualGCScope manual_gc_scope;
CcTest::InitializeVM(); CcTest::InitializeVM();
LocalContext env; LocalContext env;
...@@ -58,6 +59,7 @@ TEST(ArrayBuffer_OnlyMC) { ...@@ -58,6 +59,7 @@ TEST(ArrayBuffer_OnlyMC) {
} }
TEST(ArrayBuffer_OnlyScavenge) { TEST(ArrayBuffer_OnlyScavenge) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope; ManualGCScope manual_gc_scope;
CcTest::InitializeVM(); CcTest::InitializeVM();
LocalContext env; LocalContext env;
...@@ -88,6 +90,7 @@ TEST(ArrayBuffer_OnlyScavenge) { ...@@ -88,6 +90,7 @@ TEST(ArrayBuffer_OnlyScavenge) {
} }
TEST(ArrayBuffer_ScavengeAndMC) { TEST(ArrayBuffer_ScavengeAndMC) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope; ManualGCScope manual_gc_scope;
CcTest::InitializeVM(); CcTest::InitializeVM();
LocalContext env; LocalContext env;
...@@ -120,7 +123,7 @@ TEST(ArrayBuffer_ScavengeAndMC) { ...@@ -120,7 +123,7 @@ TEST(ArrayBuffer_ScavengeAndMC) {
} }
TEST(ArrayBuffer_Compaction) { TEST(ArrayBuffer_Compaction) {
if (FLAG_never_compact) return; if (FLAG_never_compact || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope; ManualGCScope manual_gc_scope;
FLAG_manual_evacuation_candidates_selection = true; FLAG_manual_evacuation_candidates_selection = true;
CcTest::InitializeVM(); CcTest::InitializeVM();
...@@ -149,6 +152,7 @@ TEST(ArrayBuffer_Compaction) { ...@@ -149,6 +152,7 @@ TEST(ArrayBuffer_Compaction) {
} }
TEST(ArrayBuffer_UnregisterDuringSweep) { TEST(ArrayBuffer_UnregisterDuringSweep) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
// Regular pages in old space (without compaction) are processed concurrently // Regular pages in old space (without compaction) are processed concurrently
// in the sweeper. If we happen to unregister a buffer (either explicitly, or // in the sweeper. If we happen to unregister a buffer (either explicitly, or
// implicitly through e.g. |Detach|) we need to sync with the sweeper // implicitly through e.g. |Detach|) we need to sync with the sweeper
...@@ -197,7 +201,7 @@ TEST(ArrayBuffer_UnregisterDuringSweep) { ...@@ -197,7 +201,7 @@ TEST(ArrayBuffer_UnregisterDuringSweep) {
} }
TEST(ArrayBuffer_NonLivePromotion) { TEST(ArrayBuffer_NonLivePromotion) {
if (!FLAG_incremental_marking) return; if (!FLAG_incremental_marking || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope; ManualGCScope manual_gc_scope;
// The test verifies that the marking state is preserved when promoting // The test verifies that the marking state is preserved when promoting
// a buffer to old space. // a buffer to old space.
...@@ -234,7 +238,7 @@ TEST(ArrayBuffer_NonLivePromotion) { ...@@ -234,7 +238,7 @@ TEST(ArrayBuffer_NonLivePromotion) {
} }
TEST(ArrayBuffer_LivePromotion) { TEST(ArrayBuffer_LivePromotion) {
if (!FLAG_incremental_marking) return; if (!FLAG_incremental_marking || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope; ManualGCScope manual_gc_scope;
// The test verifies that the marking state is preserved when promoting // The test verifies that the marking state is preserved when promoting
// a buffer to old space. // a buffer to old space.
...@@ -270,7 +274,7 @@ TEST(ArrayBuffer_LivePromotion) { ...@@ -270,7 +274,7 @@ TEST(ArrayBuffer_LivePromotion) {
} }
TEST(ArrayBuffer_SemiSpaceCopyThenPagePromotion) { TEST(ArrayBuffer_SemiSpaceCopyThenPagePromotion) {
if (!i::FLAG_incremental_marking) return; if (!i::FLAG_incremental_marking || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope; ManualGCScope manual_gc_scope;
// The test verifies that the marking state is preserved across semispace // The test verifies that the marking state is preserved across semispace
// copy. // copy.
...@@ -337,6 +341,7 @@ UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) { ...@@ -337,6 +341,7 @@ UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) {
} }
TEST(ArrayBuffer_ExternalBackingStoreSizeIncreases) { TEST(ArrayBuffer_ExternalBackingStoreSizeIncreases) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
CcTest::InitializeVM(); CcTest::InitializeVM();
LocalContext env; LocalContext env;
v8::Isolate* isolate = env->GetIsolate(); v8::Isolate* isolate = env->GetIsolate();
...@@ -378,7 +383,7 @@ TEST(ArrayBuffer_ExternalBackingStoreSizeDecreases) { ...@@ -378,7 +383,7 @@ TEST(ArrayBuffer_ExternalBackingStoreSizeDecreases) {
} }
TEST(ArrayBuffer_ExternalBackingStoreSizeIncreasesMarkCompact) { TEST(ArrayBuffer_ExternalBackingStoreSizeIncreasesMarkCompact) {
if (FLAG_never_compact) return; if (FLAG_never_compact || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope; ManualGCScope manual_gc_scope;
FLAG_manual_evacuation_candidates_selection = true; FLAG_manual_evacuation_candidates_selection = true;
CcTest::InitializeVM(); CcTest::InitializeVM();
......
...@@ -167,7 +167,8 @@ UNINITIALIZED_TEST(PagePromotion_NewToNewJSArrayBuffer) { ...@@ -167,7 +167,8 @@ UNINITIALIZED_TEST(PagePromotion_NewToNewJSArrayBuffer) {
CHECK(heap->new_space()->ToSpaceContainsSlow(buffer->address())); CHECK(heap->new_space()->ToSpaceContainsSlow(buffer->address()));
CHECK(to_be_promoted_page->Contains(first_object->address())); CHECK(to_be_promoted_page->Contains(first_object->address()));
CHECK(to_be_promoted_page->Contains(buffer->address())); CHECK(to_be_promoted_page->Contains(buffer->address()));
CHECK(ArrayBufferTracker::IsTracked(*buffer)); if (!V8_ARRAY_BUFFER_EXTENSION_BOOL)
CHECK(ArrayBufferTracker::IsTracked(*buffer));
} }
isolate->Dispose(); isolate->Dispose();
} }
...@@ -212,7 +213,8 @@ UNINITIALIZED_TEST(PagePromotion_NewToOldJSArrayBuffer) { ...@@ -212,7 +213,8 @@ UNINITIALIZED_TEST(PagePromotion_NewToOldJSArrayBuffer) {
CHECK(heap->old_space()->ContainsSlow(buffer->address())); CHECK(heap->old_space()->ContainsSlow(buffer->address()));
CHECK(to_be_promoted_page->Contains(first_object->address())); CHECK(to_be_promoted_page->Contains(first_object->address()));
CHECK(to_be_promoted_page->Contains(buffer->address())); CHECK(to_be_promoted_page->Contains(buffer->address()));
CHECK(ArrayBufferTracker::IsTracked(*buffer)); if (!V8_ARRAY_BUFFER_EXTENSION_BOOL)
CHECK(ArrayBufferTracker::IsTracked(*buffer));
} }
isolate->Dispose(); isolate->Dispose();
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment