Commit b5c917ee authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[api] New function for reallocating ArrayBuffer backing store

This patch adds a new BackingStore::Reallocate function that internally
uses a new ArrayBuffer::Allocator::Reallocate provided by the embedder.

The default implementation of the function simply copies the backing
store. The embedder can override the function and provide a more
efficient implementation e.g. using realloc.

Bug: v8:9908, v8:9380

Change-Id: I2179c80ba199c045b6900c620a813916150e7098
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2007274
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarAndreas Haas <ahaas@chromium.org>
Cr-Commit-Position: refs/heads/master@{#66044}
parent 6c5f6ea3
...@@ -4977,6 +4977,15 @@ class V8_EXPORT BackingStore : public v8::internal::BackingStoreBase { ...@@ -4977,6 +4977,15 @@ class V8_EXPORT BackingStore : public v8::internal::BackingStoreBase {
*/ */
bool IsShared() const; bool IsShared() const;
/**
* Wrapper around ArrayBuffer::Allocator::Reallocate that preserves IsShared.
* Assumes that the backing_store was allocated by the ArrayBuffer allocator
* of the given isolate.
*/
static std::unique_ptr<BackingStore> Reallocate(
v8::Isolate* isolate, std::unique_ptr<BackingStore> backing_store,
size_t byte_length);
private: private:
/** /**
* See [Shared]ArrayBuffer::GetBackingStore and * See [Shared]ArrayBuffer::GetBackingStore and
...@@ -5018,13 +5027,13 @@ class V8_EXPORT ArrayBuffer : public Object { ...@@ -5018,13 +5027,13 @@ class V8_EXPORT ArrayBuffer : public Object {
virtual ~Allocator() = default; virtual ~Allocator() = default;
/** /**
* Allocate |length| bytes. Return NULL if allocation is not successful. * Allocate |length| bytes. Return nullptr if allocation is not successful.
* Memory should be initialized to zeroes. * Memory should be initialized to zeroes.
*/ */
virtual void* Allocate(size_t length) = 0; virtual void* Allocate(size_t length) = 0;
/** /**
* Allocate |length| bytes. Return NULL if allocation is not successful. * Allocate |length| bytes. Return nullptr if allocation is not successful.
* Memory does not have to be initialized. * Memory does not have to be initialized.
*/ */
virtual void* AllocateUninitialized(size_t length) = 0; virtual void* AllocateUninitialized(size_t length) = 0;
...@@ -5035,6 +5044,20 @@ class V8_EXPORT ArrayBuffer : public Object { ...@@ -5035,6 +5044,20 @@ class V8_EXPORT ArrayBuffer : public Object {
*/ */
virtual void Free(void* data, size_t length) = 0; virtual void Free(void* data, size_t length) = 0;
/**
* Reallocate the memory block of size |old_length| to a memory block of
* size |new_length| by expanding, contracting, or copying the existing
* memory block. If |new_length| > |old_length|, then the new part of
* the memory must be initialized to zeros. Return nullptr if reallocation
* is not successful.
*
* The caller guarantees that the memory block was previously allocated
* using Allocate or AllocateUninitialized.
*
* The default implementation allocates a new block and copies data.
*/
virtual void* Reallocate(void* data, size_t old_length, size_t new_length);
/** /**
* ArrayBuffer allocation mode. kNormal is a malloc/free style allocation, * ArrayBuffer allocation mode. kNormal is a malloc/free style allocation,
* while kReservation is for larger allocations with the ability to set * while kReservation is for larger allocations with the ability to set
......
...@@ -542,6 +542,21 @@ class ArrayBufferAllocator : public v8::ArrayBuffer::Allocator { ...@@ -542,6 +542,21 @@ class ArrayBufferAllocator : public v8::ArrayBuffer::Allocator {
} }
void Free(void* data, size_t) override { free(data); } void Free(void* data, size_t) override { free(data); }
void* Reallocate(void* data, size_t old_length, size_t new_length) override {
#if V8_OS_AIX && _LINUX_SOURCE_COMPAT
// Work around for GCC bug on AIX
// See: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=79839
void* new_data = __linux_realloc(data, new_length);
#else
void* new_data = realloc(data, new_length);
#endif
if (new_length > old_length) {
memset(reinterpret_cast<uint8_t*>(new_data) + old_length, 0,
new_length - old_length);
}
return new_data;
}
}; };
struct SnapshotCreatorData { struct SnapshotCreatorData {
...@@ -3784,6 +3799,22 @@ bool v8::BackingStore::IsShared() const { ...@@ -3784,6 +3799,22 @@ bool v8::BackingStore::IsShared() const {
return reinterpret_cast<const i::BackingStore*>(this)->is_shared(); return reinterpret_cast<const i::BackingStore*>(this)->is_shared();
} }
// static
std::unique_ptr<v8::BackingStore> v8::BackingStore::Reallocate(
v8::Isolate* isolate, std::unique_ptr<v8::BackingStore> backing_store,
size_t byte_length) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
LOG_API(i_isolate, ArrayBuffer, BackingStore_Reallocate);
CHECK_LE(byte_length, i::JSArrayBuffer::kMaxByteLength);
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
i::BackingStore* i_backing_store =
reinterpret_cast<i::BackingStore*>(backing_store.get());
if (!i_backing_store->Reallocate(i_isolate, byte_length)) {
i::FatalProcessOutOfMemory(i_isolate, "v8::BackingStore::Reallocate");
}
return backing_store;
}
std::shared_ptr<v8::BackingStore> v8::ArrayBuffer::GetBackingStore() { std::shared_ptr<v8::BackingStore> v8::ArrayBuffer::GetBackingStore() {
i::Handle<i::JSArrayBuffer> self = Utils::OpenHandle(this); i::Handle<i::JSArrayBuffer> self = Utils::OpenHandle(this);
std::shared_ptr<i::BackingStore> backing_store = self->GetBackingStore(); std::shared_ptr<i::BackingStore> backing_store = self->GetBackingStore();
...@@ -7262,6 +7293,21 @@ void WasmModuleObjectBuilderStreaming::Finish() {} ...@@ -7262,6 +7293,21 @@ void WasmModuleObjectBuilderStreaming::Finish() {}
void WasmModuleObjectBuilderStreaming::Abort(MaybeLocal<Value> exception) {} void WasmModuleObjectBuilderStreaming::Abort(MaybeLocal<Value> exception) {}
void* v8::ArrayBuffer::Allocator::Reallocate(void* data, size_t old_length,
size_t new_length) {
if (old_length == new_length) return data;
uint8_t* new_data =
reinterpret_cast<uint8_t*>(AllocateUninitialized(new_length));
if (new_data == nullptr) return nullptr;
size_t bytes_to_copy = std::min(old_length, new_length);
memcpy(new_data, data, bytes_to_copy);
if (new_length > bytes_to_copy) {
memset(new_data + bytes_to_copy, 0, new_length - bytes_to_copy);
}
Free(data, old_length);
return new_data;
}
// static // static
v8::ArrayBuffer::Allocator* v8::ArrayBuffer::Allocator::NewDefaultAllocator() { v8::ArrayBuffer::Allocator* v8::ArrayBuffer::Allocator::NewDefaultAllocator() {
return new ArrayBufferAllocator(); return new ArrayBufferAllocator();
......
...@@ -735,6 +735,7 @@ class RuntimeCallTimer final { ...@@ -735,6 +735,7 @@ class RuntimeCallTimer final {
V(ArrayBuffer_Detach) \ V(ArrayBuffer_Detach) \
V(ArrayBuffer_New) \ V(ArrayBuffer_New) \
V(ArrayBuffer_NewBackingStore) \ V(ArrayBuffer_NewBackingStore) \
V(ArrayBuffer_BackingStore_Reallocate) \
V(Array_CloneElementAt) \ V(Array_CloneElementAt) \
V(Array_New) \ V(Array_New) \
V(BigInt64Array_New) \ V(BigInt64Array_New) \
......
...@@ -561,6 +561,21 @@ std::unique_ptr<BackingStore> BackingStore::EmptyBackingStore( ...@@ -561,6 +561,21 @@ std::unique_ptr<BackingStore> BackingStore::EmptyBackingStore(
return std::unique_ptr<BackingStore>(result); return std::unique_ptr<BackingStore>(result);
} }
bool BackingStore::Reallocate(Isolate* isolate, size_t new_byte_length) {
CHECK(!is_wasm_memory_ && !custom_deleter_ && !globally_registered_ &&
free_on_destruct_);
auto allocator = get_v8_api_array_buffer_allocator();
CHECK_EQ(isolate->array_buffer_allocator(), allocator);
CHECK_EQ(byte_length_, byte_capacity_);
void* new_start =
allocator->Reallocate(buffer_start_, byte_length_, new_byte_length);
if (!new_start) return false;
buffer_start_ = new_start;
byte_capacity_ = new_byte_length;
byte_length_ = new_byte_length;
return true;
}
v8::ArrayBuffer::Allocator* BackingStore::get_v8_api_array_buffer_allocator() { v8::ArrayBuffer::Allocator* BackingStore::get_v8_api_array_buffer_allocator() {
CHECK(!is_wasm_memory_); CHECK(!is_wasm_memory_);
auto array_buffer_allocator = auto array_buffer_allocator =
......
...@@ -87,6 +87,9 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase { ...@@ -87,6 +87,9 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase {
bool GrowWasmMemoryInPlace(Isolate* isolate, size_t delta_pages, bool GrowWasmMemoryInPlace(Isolate* isolate, size_t delta_pages,
size_t max_pages); size_t max_pages);
// Wrapper around ArrayBuffer::Allocator::Reallocate.
bool Reallocate(Isolate* isolate, size_t new_byte_length);
// Allocate a new, larger, backing store for this Wasm memory and copy the // Allocate a new, larger, backing store for this Wasm memory and copy the
// contents of this backing store into it. // contents of this backing store into it.
std::unique_ptr<BackingStore> CopyWasmMemory(Isolate* isolate, std::unique_ptr<BackingStore> CopyWasmMemory(Isolate* isolate,
......
...@@ -761,3 +761,74 @@ TEST(BackingStore_HoldAllocatorAlive_AfterIsolateShutdown) { ...@@ -761,3 +761,74 @@ TEST(BackingStore_HoldAllocatorAlive_AfterIsolateShutdown) {
backing_store.reset(); backing_store.reset();
CHECK(allocator_weak.expired()); CHECK(allocator_weak.expired());
} }
TEST(BackingStore_ReallocateExpand) {
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
std::unique_ptr<v8::BackingStore> backing_store =
v8::ArrayBuffer::NewBackingStore(isolate, 10);
{
uint8_t* data = reinterpret_cast<uint8_t*>(
reinterpret_cast<uintptr_t>(backing_store->Data()));
for (uint8_t i = 0; i < 10; i++) {
data[i] = i;
}
}
std::unique_ptr<v8::BackingStore> new_backing_store =
v8::BackingStore::Reallocate(isolate, std::move(backing_store), 20);
CHECK_EQ(new_backing_store->ByteLength(), 20);
CHECK(!new_backing_store->IsShared());
{
uint8_t* data = reinterpret_cast<uint8_t*>(
reinterpret_cast<uintptr_t>(new_backing_store->Data()));
for (uint8_t i = 0; i < 10; i++) {
CHECK_EQ(data[i], i);
}
for (uint8_t i = 10; i < 20; i++) {
CHECK_EQ(data[i], 0);
}
}
}
TEST(BackingStore_ReallocateShrink) {
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
std::unique_ptr<v8::BackingStore> backing_store =
v8::ArrayBuffer::NewBackingStore(isolate, 20);
{
uint8_t* data = reinterpret_cast<uint8_t*>(backing_store->Data());
for (uint8_t i = 0; i < 20; i++) {
data[i] = i;
}
}
std::unique_ptr<v8::BackingStore> new_backing_store =
v8::BackingStore::Reallocate(isolate, std::move(backing_store), 10);
CHECK_EQ(new_backing_store->ByteLength(), 10);
CHECK(!new_backing_store->IsShared());
{
uint8_t* data = reinterpret_cast<uint8_t*>(new_backing_store->Data());
for (uint8_t i = 0; i < 10; i++) {
CHECK_EQ(data[i], i);
}
}
}
TEST(BackingStore_ReallocateNotShared) {
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
std::unique_ptr<v8::BackingStore> backing_store =
v8::ArrayBuffer::NewBackingStore(isolate, 20);
std::unique_ptr<v8::BackingStore> new_backing_store =
v8::BackingStore::Reallocate(isolate, std::move(backing_store), 10);
CHECK(!new_backing_store->IsShared());
}
TEST(BackingStore_ReallocateShared) {
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
std::unique_ptr<v8::BackingStore> backing_store =
v8::SharedArrayBuffer::NewBackingStore(isolate, 20);
std::unique_ptr<v8::BackingStore> new_backing_store =
v8::BackingStore::Reallocate(isolate, std::move(backing_store), 10);
CHECK(new_backing_store->IsShared());
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment