Commit 0cd7468b authored by Eric Holk's avatar Eric Holk Committed by Commit Bot

[wasm] Always enable guard regions on 64-bit platforms

This change makes full 8 GiB guard regions always enabled on 64-bit
platforms.

Additionally, since all Wasm memory allocation paths have some form of
guard regions, this removes and simplifies most of the logic around
whether to enable guard regions.

R=gdeepti@chromium.org

Change-Id: Idf3fbcc11ac70ea2ee7eb88c2173d6a1410395e1
Reviewed-on: https://chromium-review.googlesource.com/985142
Commit-Queue: Eric Holk <eholk@chromium.org>
Reviewed-by: 's avatarBrad Nelson <bradnelson@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52310}
parent e1e870a3
...@@ -1676,17 +1676,25 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() { ...@@ -1676,17 +1676,25 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
WasmContext* wasm_context = instance->wasm_context()->get(); WasmContext* wasm_context = instance->wasm_context()->get();
uint32_t globals_size = module_->globals_size; uint32_t globals_size = module_->globals_size;
if (globals_size > 0) { if (globals_size > 0) {
constexpr bool enable_guard_regions = false; void* backing_store =
Handle<JSArrayBuffer> global_buffer = isolate_->array_buffer_allocator()->Allocate(globals_size);
NewArrayBuffer(isolate_, globals_size, enable_guard_regions); if (backing_store == nullptr) {
globals_ = global_buffer; thrower_->RangeError("Out of memory: wasm globals");
return {};
}
globals_ =
isolate_->factory()->NewJSArrayBuffer(SharedFlag::kNotShared, TENURED);
constexpr bool is_external = false;
constexpr bool is_wasm_memory = false;
JSArrayBuffer::Setup(globals_, isolate_, is_external, backing_store,
globals_size, SharedFlag::kNotShared, is_wasm_memory);
if (globals_.is_null()) { if (globals_.is_null()) {
thrower_->RangeError("Out of memory: wasm globals"); thrower_->RangeError("Out of memory: wasm globals");
return {}; return {};
} }
wasm_context->globals_start = wasm_context->globals_start =
reinterpret_cast<byte*>(global_buffer->backing_store()); reinterpret_cast<byte*>(globals_->backing_store());
instance->set_globals_buffer(*global_buffer); instance->set_globals_buffer(*globals_);
} }
//-------------------------------------------------------------------------- //--------------------------------------------------------------------------
...@@ -2375,11 +2383,10 @@ Handle<JSArrayBuffer> InstanceBuilder::AllocateMemory(uint32_t num_pages) { ...@@ -2375,11 +2383,10 @@ Handle<JSArrayBuffer> InstanceBuilder::AllocateMemory(uint32_t num_pages) {
thrower_->RangeError("Out of memory: wasm memory too large"); thrower_->RangeError("Out of memory: wasm memory too large");
return Handle<JSArrayBuffer>::null(); return Handle<JSArrayBuffer>::null();
} }
const bool enable_guard_regions = use_trap_handler();
const bool is_shared_memory = const bool is_shared_memory =
module_->has_shared_memory && i::FLAG_experimental_wasm_threads; module_->has_shared_memory && i::FLAG_experimental_wasm_threads;
Handle<JSArrayBuffer> mem_buffer = NewArrayBuffer( Handle<JSArrayBuffer> mem_buffer = NewArrayBuffer(
isolate_, num_pages * kWasmPageSize, enable_guard_regions, isolate_, num_pages * kWasmPageSize,
is_shared_memory ? i::SharedFlag::kShared : i::SharedFlag::kNotShared); is_shared_memory ? i::SharedFlag::kShared : i::SharedFlag::kNotShared);
if (mem_buffer.is_null()) { if (mem_buffer.is_null()) {
......
...@@ -654,10 +654,8 @@ void WebAssemblyMemory(const v8::FunctionCallbackInfo<v8::Value>& args) { ...@@ -654,10 +654,8 @@ void WebAssemblyMemory(const v8::FunctionCallbackInfo<v8::Value>& args) {
size_t size = static_cast<size_t>(i::wasm::kWasmPageSize) * size_t size = static_cast<size_t>(i::wasm::kWasmPageSize) *
static_cast<size_t>(initial); static_cast<size_t>(initial);
const bool enable_guard_regions =
internal::trap_handler::IsTrapHandlerEnabled();
i::Handle<i::JSArrayBuffer> buffer = i::wasm::NewArrayBuffer( i::Handle<i::JSArrayBuffer> buffer = i::wasm::NewArrayBuffer(
i_isolate, size, enable_guard_regions, i_isolate, size,
is_shared_memory ? i::SharedFlag::kShared : i::SharedFlag::kNotShared); is_shared_memory ? i::SharedFlag::kShared : i::SharedFlag::kNotShared);
if (buffer.is_null()) { if (buffer.is_null()) {
thrower.RangeError("could not allocate memory"); thrower.RangeError("could not allocate memory");
......
...@@ -14,20 +14,18 @@ namespace wasm { ...@@ -14,20 +14,18 @@ namespace wasm {
namespace { namespace {
void* TryAllocateBackingStore(WasmMemoryTracker* memory_tracker, Heap* heap, void* TryAllocateBackingStore(WasmMemoryTracker* memory_tracker, Heap* heap,
size_t size, bool require_guard_regions, size_t size, void** allocation_base,
void** allocation_base,
size_t* allocation_length) { size_t* allocation_length) {
#if V8_TARGET_ARCH_32_BIT // We always allocate the largest possible offset into the heap, so the
DCHECK(!require_guard_regions); // addressable memory after the guard page can be made inaccessible.
#endif #if V8_TARGET_ARCH_64_BIT
// We always allocate the largest possible offset into the heap, so the *allocation_length = RoundUp(kWasmMaxHeapOffset, CommitPageSize());
// addressable memory after the guard page can be made inaccessible. #else
*allocation_length = *allocation_length =
require_guard_regions RoundUp(base::bits::RoundUpToPowerOfTwo32(static_cast<uint32_t>(size)),
? RoundUp(kWasmMaxHeapOffset, CommitPageSize()) kWasmPageSize);
: RoundUp( #endif
base::bits::RoundUpToPowerOfTwo32(static_cast<uint32_t>(size)),
kWasmPageSize);
DCHECK_GE(*allocation_length, size); DCHECK_GE(*allocation_length, size);
DCHECK_GE(*allocation_length, kWasmPageSize); DCHECK_GE(*allocation_length, kWasmPageSize);
...@@ -169,12 +167,11 @@ void* WasmMemoryTracker::GetEmptyBackingStore(void** allocation_base, ...@@ -169,12 +167,11 @@ void* WasmMemoryTracker::GetEmptyBackingStore(void** allocation_base,
Heap* heap) { Heap* heap) {
if (empty_backing_store_.allocation_base == nullptr) { if (empty_backing_store_.allocation_base == nullptr) {
constexpr size_t buffer_length = 0; constexpr size_t buffer_length = 0;
const bool require_guard_regions = trap_handler::IsTrapHandlerEnabled();
void* local_allocation_base; void* local_allocation_base;
size_t local_allocation_length; size_t local_allocation_length;
void* buffer_start = TryAllocateBackingStore( void* buffer_start = TryAllocateBackingStore(this, heap, buffer_length,
this, heap, buffer_length, require_guard_regions, &local_allocation_base,
&local_allocation_base, &local_allocation_length); &local_allocation_length);
empty_backing_store_ = empty_backing_store_ =
AllocationData(local_allocation_base, local_allocation_length, AllocationData(local_allocation_base, local_allocation_length,
...@@ -220,7 +217,6 @@ Handle<JSArrayBuffer> SetupArrayBuffer(Isolate* isolate, void* backing_store, ...@@ -220,7 +217,6 @@ Handle<JSArrayBuffer> SetupArrayBuffer(Isolate* isolate, void* backing_store,
} }
Handle<JSArrayBuffer> NewArrayBuffer(Isolate* isolate, size_t size, Handle<JSArrayBuffer> NewArrayBuffer(Isolate* isolate, size_t size,
bool require_guard_regions,
SharedFlag shared) { SharedFlag shared) {
// Check against kMaxInt, since the byte length is stored as int in the // Check against kMaxInt, since the byte length is stored as int in the
// JSArrayBuffer. Note that wasm_max_mem_pages can be raised from the command // JSArrayBuffer. Note that wasm_max_mem_pages can be raised from the command
...@@ -241,10 +237,8 @@ Handle<JSArrayBuffer> NewArrayBuffer(Isolate* isolate, size_t size, ...@@ -241,10 +237,8 @@ Handle<JSArrayBuffer> NewArrayBuffer(Isolate* isolate, size_t size,
? memory_tracker->GetEmptyBackingStore( ? memory_tracker->GetEmptyBackingStore(
&allocation_base, &allocation_length, isolate->heap()) &allocation_base, &allocation_length, isolate->heap())
: TryAllocateBackingStore(memory_tracker, isolate->heap(), size, : TryAllocateBackingStore(memory_tracker, isolate->heap(), size,
require_guard_regions, &allocation_base, &allocation_base, &allocation_length);
&allocation_length); if (memory == nullptr) {
if (size > 0 && memory == nullptr) {
return Handle<JSArrayBuffer>::null(); return Handle<JSArrayBuffer>::null();
} }
......
...@@ -114,8 +114,7 @@ class WasmMemoryTracker { ...@@ -114,8 +114,7 @@ class WasmMemoryTracker {
}; };
Handle<JSArrayBuffer> NewArrayBuffer( Handle<JSArrayBuffer> NewArrayBuffer(
Isolate*, size_t size, bool require_guard_regions, Isolate*, size_t size, SharedFlag shared = SharedFlag::kNotShared);
SharedFlag shared = SharedFlag::kNotShared);
Handle<JSArrayBuffer> SetupArrayBuffer( Handle<JSArrayBuffer> SetupArrayBuffer(
Isolate*, void* backing_store, size_t size, bool is_external, Isolate*, void* backing_store, size_t size, bool is_external,
......
...@@ -341,8 +341,7 @@ void WasmTableObject::ClearDispatchTables(Handle<WasmTableObject> table, ...@@ -341,8 +341,7 @@ void WasmTableObject::ClearDispatchTables(Handle<WasmTableObject> table,
namespace { namespace {
Handle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate, Handle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate,
Handle<JSArrayBuffer> old_buffer, Handle<JSArrayBuffer> old_buffer,
uint32_t pages, uint32_t maximum_pages, uint32_t pages, uint32_t maximum_pages) {
bool use_trap_handler) {
if (!old_buffer->is_growable()) return Handle<JSArrayBuffer>::null(); if (!old_buffer->is_growable()) return Handle<JSArrayBuffer>::null();
Address old_mem_start = nullptr; Address old_mem_start = nullptr;
uint32_t old_size = 0; uint32_t old_size = 0;
...@@ -395,7 +394,7 @@ Handle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate, ...@@ -395,7 +394,7 @@ Handle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate,
// We couldn't reuse the old backing store, so create a new one and copy the // We couldn't reuse the old backing store, so create a new one and copy the
// old contents in. // old contents in.
Handle<JSArrayBuffer> new_buffer; Handle<JSArrayBuffer> new_buffer;
new_buffer = wasm::NewArrayBuffer(isolate, new_size, use_trap_handler); new_buffer = wasm::NewArrayBuffer(isolate, new_size);
if (new_buffer.is_null() || old_size == 0) return new_buffer; if (new_buffer.is_null() || old_size == 0) return new_buffer;
Address new_mem_start = static_cast<Address>(new_buffer->backing_store()); Address new_mem_start = static_cast<Address>(new_buffer->backing_store());
memcpy(new_mem_start, old_mem_start, old_size); memcpy(new_mem_start, old_mem_start, old_size);
...@@ -497,10 +496,7 @@ int32_t WasmMemoryObject::Grow(Isolate* isolate, ...@@ -497,10 +496,7 @@ int32_t WasmMemoryObject::Grow(Isolate* isolate,
maximum_pages = Min(FLAG_wasm_max_mem_pages, maximum_pages = Min(FLAG_wasm_max_mem_pages,
static_cast<uint32_t>(memory_object->maximum_pages())); static_cast<uint32_t>(memory_object->maximum_pages()));
} }
// TODO(kschimpf): We need to fix this by adding a field to WasmMemoryObject new_buffer = GrowMemoryBuffer(isolate, old_buffer, pages, maximum_pages);
// that defines the style of memory being used.
new_buffer = GrowMemoryBuffer(isolate, old_buffer, pages, maximum_pages,
trap_handler::IsTrapHandlerEnabled());
if (new_buffer.is_null()) return -1; if (new_buffer.is_null()) return -1;
if (memory_object->has_instances()) { if (memory_object->has_instances()) {
......
...@@ -1079,13 +1079,8 @@ TEST(Run_WasmModule_Buffer_Externalized_GrowMemMemSize) { ...@@ -1079,13 +1079,8 @@ TEST(Run_WasmModule_Buffer_Externalized_GrowMemMemSize) {
{ {
Isolate* isolate = CcTest::InitIsolateOnce(); Isolate* isolate = CcTest::InitIsolateOnce();
HandleScope scope(isolate); HandleScope scope(isolate);
#if V8_TARGET_ARCH_64_BIT Handle<JSArrayBuffer> buffer =
constexpr bool require_guard_regions = true; wasm::NewArrayBuffer(isolate, 16 * kWasmPageSize);
#else
constexpr bool require_guard_regions = false;
#endif
Handle<JSArrayBuffer> buffer = wasm::NewArrayBuffer(
isolate, 16 * kWasmPageSize, require_guard_regions);
Handle<WasmMemoryObject> mem_obj = Handle<WasmMemoryObject> mem_obj =
WasmMemoryObject::New(isolate, buffer, 100); WasmMemoryObject::New(isolate, buffer, 100);
auto const contents = v8::Utils::ToLocal(buffer)->Externalize(); auto const contents = v8::Utils::ToLocal(buffer)->Externalize();
...@@ -1106,13 +1101,8 @@ TEST(Run_WasmModule_Buffer_Externalized_Detach) { ...@@ -1106,13 +1101,8 @@ TEST(Run_WasmModule_Buffer_Externalized_Detach) {
// https://bugs.chromium.org/p/chromium/issues/detail?id=731046 // https://bugs.chromium.org/p/chromium/issues/detail?id=731046
Isolate* isolate = CcTest::InitIsolateOnce(); Isolate* isolate = CcTest::InitIsolateOnce();
HandleScope scope(isolate); HandleScope scope(isolate);
#if V8_TARGET_ARCH_64_BIT Handle<JSArrayBuffer> buffer =
constexpr bool require_guard_regions = true; wasm::NewArrayBuffer(isolate, 16 * kWasmPageSize);
#else
constexpr bool require_guard_regions = false;
#endif
Handle<JSArrayBuffer> buffer = wasm::NewArrayBuffer(
isolate, 16 * kWasmPageSize, require_guard_regions);
auto const contents = v8::Utils::ToLocal(buffer)->Externalize(); auto const contents = v8::Utils::ToLocal(buffer)->Externalize();
wasm::DetachMemoryBuffer(isolate, buffer, true); wasm::DetachMemoryBuffer(isolate, buffer, true);
constexpr bool is_wasm_memory = true; constexpr bool is_wasm_memory = true;
...@@ -1128,13 +1118,8 @@ TEST(Run_WasmModule_Buffer_Externalized_Regression_UseAfterFree) { ...@@ -1128,13 +1118,8 @@ TEST(Run_WasmModule_Buffer_Externalized_Regression_UseAfterFree) {
// Regresion test for https://crbug.com/813876 // Regresion test for https://crbug.com/813876
Isolate* isolate = CcTest::InitIsolateOnce(); Isolate* isolate = CcTest::InitIsolateOnce();
HandleScope scope(isolate); HandleScope scope(isolate);
#if V8_TARGET_ARCH_64_BIT
const bool require_guard_regions = trap_handler::IsTrapHandlerEnabled();
#else
constexpr bool require_guard_regions = false;
#endif
Handle<JSArrayBuffer> buffer = Handle<JSArrayBuffer> buffer =
wasm::NewArrayBuffer(isolate, 16 * kWasmPageSize, require_guard_regions); wasm::NewArrayBuffer(isolate, 16 * kWasmPageSize);
Handle<WasmMemoryObject> mem = WasmMemoryObject::New(isolate, buffer, 128); Handle<WasmMemoryObject> mem = WasmMemoryObject::New(isolate, buffer, 128);
auto contents = v8::Utils::ToLocal(buffer)->Externalize(); auto contents = v8::Utils::ToLocal(buffer)->Externalize();
WasmMemoryObject::Grow(isolate, mem, 0); WasmMemoryObject::Grow(isolate, mem, 0);
...@@ -1156,9 +1141,7 @@ TEST(Run_WasmModule_Reclaim_Memory) { ...@@ -1156,9 +1141,7 @@ TEST(Run_WasmModule_Reclaim_Memory) {
Handle<JSArrayBuffer> buffer; Handle<JSArrayBuffer> buffer;
for (int i = 0; i < 256; ++i) { for (int i = 0; i < 256; ++i) {
HandleScope scope(isolate); HandleScope scope(isolate);
constexpr bool require_guard_regions = true; buffer = NewArrayBuffer(isolate, kWasmPageSize, SharedFlag::kNotShared);
buffer = NewArrayBuffer(isolate, kWasmPageSize, require_guard_regions,
SharedFlag::kNotShared);
CHECK(!buffer.is_null()); CHECK(!buffer.is_null());
} }
} }
......
...@@ -40,12 +40,8 @@ byte* TestingModuleBuilder::AddMemory(uint32_t size) { ...@@ -40,12 +40,8 @@ byte* TestingModuleBuilder::AddMemory(uint32_t size) {
CHECK_EQ(0, mem_size_); CHECK_EQ(0, mem_size_);
DCHECK(!instance_object_->has_memory_object()); DCHECK(!instance_object_->has_memory_object());
test_module_.has_memory = true; test_module_.has_memory = true;
const bool enable_guard_regions = uint32_t alloc_size = RoundUp(size, kWasmPageSize);
trap_handler::IsTrapHandlerEnabled() && test_module_.is_wasm(); Handle<JSArrayBuffer> new_buffer = wasm::NewArrayBuffer(isolate_, alloc_size);
uint32_t alloc_size =
enable_guard_regions ? RoundUp(size, CommitPageSize()) : size;
Handle<JSArrayBuffer> new_buffer =
wasm::NewArrayBuffer(isolate_, alloc_size, enable_guard_regions);
CHECK(!new_buffer.is_null()); CHECK(!new_buffer.is_null());
mem_start_ = reinterpret_cast<byte*>(new_buffer->backing_store()); mem_start_ = reinterpret_cast<byte*>(new_buffer->backing_store());
mem_size_ = size; mem_size_ = size;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment