Commit ecc1508b authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

[objects] Update JSArrayBuffer::extension-field in two steps

The JSArrayBuffer::extension-field might not be aligned with pointer
compression enabled. However on AArch64 pointers need to be aligned if
you perform atomic operations on them. Therefore split extension into
two 32-bit words that each get updated atomically. There is no ABA
problem here since the extension field only transitions from
NULL --> value --> NULL. After Detach(), Attach() isn't invoked anymore.

Bug: v8:10064
Change-Id: I20c1a37ac35d1749a94bfd277a4f91d531015bc5
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2078586Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#66515}
parent af3d9ee9
......@@ -46,25 +46,73 @@ void JSArrayBuffer::set_backing_store(void* value) {
ArrayBufferExtension* JSArrayBuffer::extension() const {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
#if V8_COMPRESS_POINTERS
// With pointer compression the extension-field might not be
// pointer-aligned. However on ARM64 this field needs to be aligned to
// perform atomic operations on it. Therefore we split the pointer into two
// 32-bit words that we update atomically. We don't have an ABA problem here
// since there can never be an Attach() after Detach() (transitions only
// from NULL --> some ptr --> NULL).
// Synchronize with publishing release store of non-null extension
uint32_t lo = base::AsAtomic32::Acquire_Load(extension_lo());
if (lo & kUninitializedTagMask) return nullptr;
// Synchronize with release store of null extension
uint32_t hi = base::AsAtomic32::Acquire_Load(extension_hi());
uint32_t verify_lo = base::AsAtomic32::Relaxed_Load(extension_lo());
if (lo != verify_lo) return nullptr;
uintptr_t address = static_cast<uintptr_t>(lo);
address |= static_cast<uintptr_t>(hi) << 32;
return reinterpret_cast<ArrayBufferExtension*>(address);
#else
return base::AsAtomicPointer::Acquire_Load(extension_location());
#endif
} else {
return nullptr;
}
}
void JSArrayBuffer::set_extension(ArrayBufferExtension* extension) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
#if V8_COMPRESS_POINTERS
if (extension != nullptr) {
uintptr_t address = reinterpret_cast<uintptr_t>(extension);
base::AsAtomic32::Relaxed_Store(extension_hi(),
static_cast<uint32_t>(address >> 32));
base::AsAtomic32::Release_Store(extension_lo(),
static_cast<uint32_t>(address));
} else {
base::AsAtomic32::Relaxed_Store(extension_lo(),
0 | kUninitializedTagMask);
base::AsAtomic32::Release_Store(extension_hi(), 0);
}
#else
base::AsAtomicPointer::Release_Store(extension_location(), extension);
#endif
MarkingBarrierForArrayBufferExtension(*this, extension);
} else {
CHECK_EQ(extension, nullptr);
}
}
ArrayBufferExtension** JSArrayBuffer::extension_location() const {
Address location = field_address(kExtensionOffset);
return reinterpret_cast<ArrayBufferExtension**>(location);
}
void JSArrayBuffer::set_extension(ArrayBufferExtension* value) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
base::AsAtomicPointer::Release_Store(extension_location(), value);
MarkingBarrierForArrayBufferExtension(*this, value);
} else {
CHECK_EQ(value, nullptr);
}
#if V8_COMPRESS_POINTERS
uint32_t* JSArrayBuffer::extension_lo() const {
Address location = field_address(kExtensionOffset);
return reinterpret_cast<uint32_t*>(location);
}
uint32_t* JSArrayBuffer::extension_hi() const {
Address location = field_address(kExtensionOffset) + sizeof(uint32_t);
return reinterpret_cast<uint32_t*>(location);
}
#endif
size_t JSArrayBuffer::allocation_length() const {
if (backing_store() == nullptr) {
......
......@@ -59,17 +59,18 @@ void JSArrayBuffer::Setup(SharedFlag shared,
void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) {
DCHECK_NOT_NULL(backing_store);
DCHECK_EQ(is_shared(), backing_store->is_shared());
DCHECK(!was_detached());
set_backing_store(backing_store->buffer_start());
set_byte_length(backing_store->byte_length());
if (backing_store->is_wasm_memory()) set_is_detachable(false);
if (!backing_store->free_on_destruct()) set_is_external(true);
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
Heap* heap = GetIsolate()->heap();
EnsureExtension();
extension()->set_backing_store(std::move(backing_store));
ArrayBufferExtension* extension = EnsureExtension();
extension->set_backing_store(std::move(backing_store));
size_t bytes = PerIsolateAccountingLength();
extension()->set_accounting_length(bytes);
heap->AppendArrayBufferExtension(*this, extension());
extension->set_accounting_length(bytes);
heap->AppendArrayBufferExtension(*this, extension);
} else {
GetIsolate()->heap()->RegisterBackingStore(*this, std::move(backing_store));
}
......@@ -118,10 +119,10 @@ std::shared_ptr<BackingStore> JSArrayBuffer::GetBackingStore() {
ArrayBufferExtension* JSArrayBuffer::EnsureExtension() {
DCHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
if (extension() != nullptr) return extension();
ArrayBufferExtension* extension = this->extension();
if (extension != nullptr) return extension;
ArrayBufferExtension* extension =
new ArrayBufferExtension(std::shared_ptr<BackingStore>());
extension = new ArrayBufferExtension(std::shared_ptr<BackingStore>());
set_extension(extension);
return extension;
}
......
......@@ -131,6 +131,13 @@ class JSArrayBuffer : public JSObject {
private:
inline ArrayBufferExtension** extension_location() const;
#if V8_COMPRESS_POINTERS
static const int kUninitializedTagMask = 1;
inline uint32_t* extension_lo() const;
inline uint32_t* extension_hi() const;
#endif
};
// Each JSArrayBuffer (with a backing store) has a corresponding native-heap
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment