Commit bba5f1f4 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[api] Add API functions for constructing standalone BackingStores

These are useful for the cases when an embedder works with backing
stores without creating JS ArrayBuffer objects.

Bug: v8:9380
Change-Id: I452bd911e7b20fb38568f18f9d15ea1a7ffb5a57
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1825339
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarAndreas Haas <ahaas@chromium.org>
Cr-Commit-Position: refs/heads/master@{#64460}
parent 52a54bd1
......@@ -4872,6 +4872,14 @@ class V8_EXPORT BackingStore : public v8::internal::BackingStoreBase {
BackingStore();
};
/**
* This callback is used only if the memory block for this backing store cannot
* be allocated with an ArrayBuffer::Allocator. In such cases the destructor
* of this backing store object invokes the callback to free the memory block.
*/
using BackingStoreDeleterCallback = void (*)(void* data, size_t length,
void* deleter_data);
/**
* An instance of the built-in ArrayBuffer constructor (ES6 draft 15.13.5).
*/
......@@ -5022,6 +5030,29 @@ class V8_EXPORT ArrayBuffer : public Object {
static Local<ArrayBuffer> New(Isolate* isolate,
std::shared_ptr<BackingStore> backing_store);
/**
* Returns a new standalone BackingStore that is allocated using the array
* buffer allocator of the isolate. The result can be later passed to
* ArrayBuffer::New.
*
* If the allocator returns nullptr, then the function may cause GCs in the
* given isolate and re-try the allocation. If GCs do not help, then the
* function will crash with an out-of-memory error.
*/
static std::unique_ptr<BackingStore> NewBackingStore(Isolate* isolate,
size_t byte_length);
/**
* Returns a new standalone BackingStore that takes over the ownership of
* the given buffer. The destructor of the BackingStore invokes the given
* deleter callback.
*
* The result can be later passed to ArrayBuffer::New. The raw pointer
* to the buffer must not be passed again to any V8 API function.
*/
static std::unique_ptr<BackingStore> NewBackingStore(
void* data, size_t byte_length, BackingStoreDeleterCallback deleter,
void* deleter_data);
/**
* Returns true if ArrayBuffer is externalized, that is, does not
* own its memory block.
......@@ -5472,6 +5503,29 @@ class V8_EXPORT SharedArrayBuffer : public Object {
static Local<SharedArrayBuffer> New(
Isolate* isolate, std::shared_ptr<BackingStore> backing_store);
/**
* Returns a new standalone BackingStore that is allocated using the array
* buffer allocator of the isolate. The result can be later passed to
* SharedArrayBuffer::New.
*
* If the allocator returns nullptr, then the function may cause GCs in the
* given isolate and re-try the allocation. If GCs do not help, then the
* function will crash with an out-of-memory error.
*/
static std::unique_ptr<BackingStore> NewBackingStore(Isolate* isolate,
size_t byte_length);
/**
* Returns a new standalone BackingStore that takes over the ownership of
* the given buffer. The destructor of the BackingStore invokes the given
* deleter callback.
*
* The result can be later passed to SharedArrayBuffer::New. The raw pointer
* to the buffer must not be passed again to any V8 functions.
*/
static std::unique_ptr<BackingStore> NewBackingStore(
void* data, size_t byte_length, BackingStoreDeleterCallback deleter,
void* deleter_data);
/**
* Create a new SharedArrayBuffer over an existing memory block. Propagate
* flags to indicate whether the underlying buffer can be grown.
......
......@@ -7458,6 +7458,32 @@ Local<ArrayBuffer> v8::ArrayBuffer::New(
return Utils::ToLocal(obj);
}
std::unique_ptr<v8::BackingStore> v8::ArrayBuffer::NewBackingStore(
Isolate* isolate, size_t byte_length) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
LOG_API(i_isolate, ArrayBuffer, NewBackingStore);
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
std::unique_ptr<i::BackingStoreBase> backing_store =
i::BackingStore::Allocate(i_isolate, byte_length,
i::SharedFlag::kNotShared,
i::InitializedFlag::kZeroInitialized);
if (!backing_store) {
i::FatalProcessOutOfMemory(i_isolate, "v8::ArrayBuffer::NewBackingStore");
}
return std::unique_ptr<v8::BackingStore>(
static_cast<v8::BackingStore*>(backing_store.release()));
}
std::unique_ptr<v8::BackingStore> v8::ArrayBuffer::NewBackingStore(
void* data, size_t byte_length, BackingStoreDeleterCallback deleter,
void* deleter_data) {
std::unique_ptr<i::BackingStoreBase> backing_store =
i::BackingStore::WrapAllocation(data, byte_length, deleter, deleter_data,
i::SharedFlag::kNotShared);
return std::unique_ptr<v8::BackingStore>(
static_cast<v8::BackingStore*>(backing_store.release()));
}
Local<ArrayBuffer> v8::ArrayBufferView::Buffer() {
i::Handle<i::JSArrayBufferView> obj = Utils::OpenHandle(this);
i::Handle<i::JSArrayBuffer> buffer;
......@@ -7759,6 +7785,32 @@ Local<SharedArrayBuffer> v8::SharedArrayBuffer::New(
return Utils::ToLocalShared(buffer);
}
std::unique_ptr<v8::BackingStore> v8::SharedArrayBuffer::NewBackingStore(
Isolate* isolate, size_t byte_length) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
LOG_API(i_isolate, SharedArrayBuffer, NewBackingStore);
ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate);
std::unique_ptr<i::BackingStoreBase> backing_store =
i::BackingStore::Allocate(i_isolate, byte_length, i::SharedFlag::kShared,
i::InitializedFlag::kZeroInitialized);
if (!backing_store) {
i::FatalProcessOutOfMemory(i_isolate,
"v8::SharedArrayBuffer::NewBackingStore");
}
return std::unique_ptr<v8::BackingStore>(
static_cast<v8::BackingStore*>(backing_store.release()));
}
std::unique_ptr<v8::BackingStore> v8::SharedArrayBuffer::NewBackingStore(
void* data, size_t byte_length, BackingStoreDeleterCallback deleter,
void* deleter_data) {
std::unique_ptr<i::BackingStoreBase> backing_store =
i::BackingStore::WrapAllocation(data, byte_length, deleter, deleter_data,
i::SharedFlag::kShared);
return std::unique_ptr<v8::BackingStore>(
static_cast<v8::BackingStore*>(backing_store.release()));
}
Local<Symbol> v8::Symbol::New(Isolate* isolate, Local<String> name) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
LOG_API(i_isolate, Symbol, New);
......
......@@ -733,6 +733,7 @@ class RuntimeCallTimer final {
V(ArrayBuffer_Cast) \
V(ArrayBuffer_Detach) \
V(ArrayBuffer_New) \
V(ArrayBuffer_NewBackingStore) \
V(Array_CloneElementAt) \
V(Array_New) \
V(BigInt64Array_New) \
......@@ -851,6 +852,7 @@ class RuntimeCallTimer final {
V(Set_Has) \
V(Set_New) \
V(SharedArrayBuffer_New) \
V(SharedArrayBuffer_NewBackingStore) \
V(String_Concat) \
V(String_NewExternalOneByte) \
V(String_NewExternalTwoByte) \
......
......@@ -124,6 +124,7 @@ BackingStore::~BackingStore() {
if (is_wasm_memory_) {
DCHECK(free_on_destruct_);
DCHECK(!custom_deleter_);
TRACE_BS("BSw:free bs=%p mem=%p (length=%zu, capacity=%zu)\n", this,
buffer_start_, byte_length(), byte_capacity_);
if (is_shared_) {
......@@ -149,6 +150,14 @@ BackingStore::~BackingStore() {
Clear();
return;
}
if (custom_deleter_) {
DCHECK(free_on_destruct_);
TRACE_BS("BS:custome deleter bs=%p mem=%p (length=%zu, capacity=%zu)\n",
this, buffer_start_, byte_length(), byte_capacity_);
type_specific_data_.deleter(buffer_start_, byte_length_, deleter_data_);
Clear();
return;
}
if (free_on_destruct_) {
// JSArrayBuffer backing store. Deallocate through the embedder's allocator.
auto allocator = reinterpret_cast<v8::ArrayBuffer::Allocator*>(
......@@ -210,7 +219,8 @@ std::unique_ptr<BackingStore> BackingStore::Allocate(
shared, // shared
false, // is_wasm_memory
true, // free_on_destruct
false); // has_guard_regions
false, // has_guard_regions
false); // custom_deleter
TRACE_BS("BS:alloc bs=%p mem=%p (length=%zu)\n", result,
result->buffer_start(), byte_length);
......@@ -321,7 +331,8 @@ std::unique_ptr<BackingStore> BackingStore::TryAllocateWasmMemory(
shared, // shared
true, // is_wasm_memory
true, // free_on_destruct
guards); // has_guard_regions
guards, // has_guard_regions
false); // custom_deleter
TRACE_BS("BSw:alloc bs=%p mem=%p (length=%zu, capacity=%zu)\n", result,
result->buffer_start(), byte_length, byte_capacity);
......@@ -451,9 +462,14 @@ void BackingStore::UpdateSharedWasmMemoryObjects(Isolate* isolate) {
std::unique_ptr<BackingStore> BackingStore::WrapAllocation(
Isolate* isolate, void* allocation_base, size_t allocation_length,
SharedFlag shared, bool free_on_destruct) {
auto result =
new BackingStore(allocation_base, allocation_length, allocation_length,
shared, false, free_on_destruct, false);
auto result = new BackingStore(allocation_base, // start
allocation_length, // length
allocation_length, // capacity
shared, // shared
false, // is_wasm_memory
free_on_destruct, // free_on_destruct
false, // has_guard_regions
false); // custom_deleter
result->type_specific_data_.v8_api_array_buffer_allocator =
isolate->array_buffer_allocator();
TRACE_BS("BS:wrap bs=%p mem=%p (length=%zu)\n", result,
......@@ -461,6 +477,25 @@ std::unique_ptr<BackingStore> BackingStore::WrapAllocation(
return std::unique_ptr<BackingStore>(result);
}
std::unique_ptr<BackingStore> BackingStore::WrapAllocation(
void* allocation_base, size_t allocation_length,
v8::BackingStoreDeleterCallback deleter, void* deleter_data,
SharedFlag shared) {
auto result = new BackingStore(allocation_base, // start
allocation_length, // length
allocation_length, // capacity
shared, // shared
false, // is_wasm_memory
true, // free_on_destruct
false, // has_guard_regions
true); // custom_deleter
result->type_specific_data_.deleter = deleter;
result->deleter_data_ = deleter_data;
TRACE_BS("BS:wrap bs=%p mem=%p (length=%zu)\n", result,
result->buffer_start(), result->byte_length());
return std::unique_ptr<BackingStore>(result);
}
std::unique_ptr<BackingStore> BackingStore::EmptyBackingStore(
SharedFlag shared) {
auto result = new BackingStore(nullptr, // start
......@@ -469,7 +504,8 @@ std::unique_ptr<BackingStore> BackingStore::EmptyBackingStore(
shared, // shared
false, // is_wasm_memory
false, // free_on_destruct
false); // has_guard_regions
false, // has_guard_regions
false); // custom_deleter
return std::unique_ptr<BackingStore>(result);
}
......@@ -512,6 +548,9 @@ void GlobalBackingStoreRegistry::Register(
// then we don't have to guarantee that there is single unique
// BackingStore per buffer_start() because the destructor of
// of the BackingStore will be a no-op in that case.
// All WASM memory has to be registered.
CHECK(!backing_store->is_wasm_memory());
return;
}
......
......@@ -8,6 +8,7 @@
#include <memory>
#include "include/v8-internal.h"
#include "include/v8.h"
#include "src/handles/handles.h"
namespace v8 {
......@@ -63,6 +64,11 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase {
SharedFlag shared,
bool free_on_destruct);
static std::unique_ptr<BackingStore> WrapAllocation(
void* allocation_base, size_t allocation_length,
v8::BackingStoreDeleterCallback deleter, void* deleter_data,
SharedFlag shared);
// Create an empty backing store.
static std::unique_ptr<BackingStore> EmptyBackingStore(SharedFlag shared);
......@@ -116,7 +122,7 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase {
BackingStore(void* buffer_start, size_t byte_length, size_t byte_capacity,
SharedFlag shared, bool is_wasm_memory, bool free_on_destruct,
bool has_guard_regions)
bool has_guard_regions, bool custom_deleter)
: buffer_start_(buffer_start),
byte_length_(byte_length),
byte_capacity_(byte_capacity),
......@@ -124,19 +130,15 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase {
is_wasm_memory_(is_wasm_memory),
free_on_destruct_(free_on_destruct),
has_guard_regions_(has_guard_regions),
globally_registered_(false) {
globally_registered_(false),
custom_deleter_(custom_deleter) {
type_specific_data_.v8_api_array_buffer_allocator = nullptr;
deleter_data_ = nullptr;
}
void* buffer_start_ = nullptr;
std::atomic<size_t> byte_length_{0};
size_t byte_capacity_ = 0;
bool is_shared_ : 1;
bool is_wasm_memory_ : 1;
bool free_on_destruct_ : 1;
bool has_guard_regions_ : 1;
bool globally_registered_ : 1;
union {
// If this backing store was allocated through the ArrayBufferAllocator API,
// this is a direct pointer to the API object for freeing the backing
......@@ -148,8 +150,21 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase {
// For shared Wasm memories, this is a list of all the attached memory
// objects, which is needed to grow shared backing stores.
SharedWasmMemoryData* shared_wasm_memory_data;
// Custom deleter for the backing stores that wrap memory blocks that are
// allocated with a custom allocator.
v8::BackingStoreDeleterCallback deleter;
} type_specific_data_;
void* deleter_data_;
bool is_shared_ : 1;
bool is_wasm_memory_ : 1;
bool free_on_destruct_ : 1;
bool has_guard_regions_ : 1;
bool globally_registered_ : 1;
bool custom_deleter_ : 1;
// Accessors for type-specific data.
void* get_v8_api_array_buffer_allocator();
SharedWasmMemoryData* get_shared_wasm_memory_data();
......
......@@ -543,3 +543,68 @@ THREADED_TEST(Regress1006600) {
CHECK_NULL(ab.As<v8::Object>()->GetAlignedPointerFromInternalField(i));
}
}
THREADED_TEST(ArrayBuffer_NewBackingStore) {
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
v8::HandleScope handle_scope(isolate);
std::shared_ptr<v8::BackingStore> backing_store =
v8::ArrayBuffer::NewBackingStore(isolate, 100);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, backing_store);
CHECK_EQ(backing_store.get(), ab->GetBackingStore().get());
}
THREADED_TEST(SharedArrayBuffer_NewBackingStore) {
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
v8::HandleScope handle_scope(isolate);
std::shared_ptr<v8::BackingStore> backing_store =
v8::SharedArrayBuffer::NewBackingStore(isolate, 100);
Local<v8::SharedArrayBuffer> ab =
v8::SharedArrayBuffer::New(isolate, backing_store);
CHECK_EQ(backing_store.get(), ab->GetBackingStore().get());
}
static void* backing_store_custom_data = nullptr;
static size_t backing_store_custom_length = 0;
static bool backing_store_custom_called = false;
const intptr_t backing_store_custom_deleter_data = 1234567;
static void BackingStoreCustomDeleter(void* data, size_t length,
void* deleter_data) {
CHECK(!backing_store_custom_called);
CHECK_EQ(backing_store_custom_data, data);
CHECK_EQ(backing_store_custom_length, length);
CHECK_EQ(backing_store_custom_deleter_data,
reinterpret_cast<intptr_t>(deleter_data));
free(data);
backing_store_custom_called = true;
}
TEST(ArrayBuffer_NewBackingStore_CustomDeleter) {
{
// Create and destroy a backing store.
backing_store_custom_called = false;
backing_store_custom_data = malloc(100);
backing_store_custom_length = 100;
v8::ArrayBuffer::NewBackingStore(
backing_store_custom_data, backing_store_custom_length,
BackingStoreCustomDeleter,
reinterpret_cast<void*>(backing_store_custom_deleter_data));
}
CHECK(backing_store_custom_called);
}
TEST(SharedArrayBuffer_NewBackingStore_CustomDeleter) {
{
// Create and destroy a backing store.
backing_store_custom_called = false;
backing_store_custom_data = malloc(100);
backing_store_custom_length = 100;
v8::SharedArrayBuffer::NewBackingStore(
backing_store_custom_data, backing_store_custom_length,
BackingStoreCustomDeleter,
reinterpret_cast<void*>(backing_store_custom_deleter_data));
}
CHECK(backing_store_custom_called);
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment