Commit ab572da2 authored by Michael Achenbach's avatar Michael Achenbach Committed by Commit Bot

Revert "[wasm] Always enable guard regions on 64-bit platforms"

This reverts commit ad221d14.

Reason for revert: Layout test failures:
https://build.chromium.org/p/client.v8.fyi/builders/V8-Blink%20Linux%2064/builds/22780

Original change's description:
> [wasm] Always enable guard regions on 64-bit platforms
> 
> This change makes full 8 GiB guard regions always enabled on 64-bit
> platforms.
> 
> Additionally, since all Wasm memory allocation paths have some form of
> guard regions, this removes and simplifies most of the logic around
> whether to enable guard regions.
> 
> This is a reland of https://crrev.com/c/985142.
> 
> Bug: v8:7619
> Change-Id: I8bf1f86d6f89fd0bb2144431c7628f15a6b00ba0
> Reviewed-on: https://chromium-review.googlesource.com/996466
> Reviewed-by: Brad Nelson <bradnelson@chromium.org>
> Commit-Queue: Eric Holk <eholk@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#52412}

TBR=bradnelson@chromium.org,eholk@chromium.org

Change-Id: Ic15d14c6fa69300bc0fdc036b9fee8ecf65fd397
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: v8:7619
Reviewed-on: https://chromium-review.googlesource.com/999412Reviewed-by: 's avatarMichael Achenbach <machenbach@chromium.org>
Commit-Queue: Michael Achenbach <machenbach@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52418}
parent 8be6842c
...@@ -1651,19 +1651,9 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() { ...@@ -1651,19 +1651,9 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
WasmContext* wasm_context = instance->wasm_context()->get(); WasmContext* wasm_context = instance->wasm_context()->get();
uint32_t globals_size = module_->globals_size; uint32_t globals_size = module_->globals_size;
if (globals_size > 0) { if (globals_size > 0) {
void* backing_store = constexpr bool enable_guard_regions = false;
isolate_->array_buffer_allocator()->Allocate(globals_size); if (!NewArrayBuffer(isolate_, globals_size, enable_guard_regions)
if (backing_store == nullptr) { .ToHandle(&globals_)) {
thrower_->RangeError("Out of memory: wasm globals");
return {};
}
globals_ =
isolate_->factory()->NewJSArrayBuffer(SharedFlag::kNotShared, TENURED);
constexpr bool is_external = false;
constexpr bool is_wasm_memory = false;
JSArrayBuffer::Setup(globals_, isolate_, is_external, backing_store,
globals_size, SharedFlag::kNotShared, is_wasm_memory);
if (globals_.is_null()) {
thrower_->RangeError("Out of memory: wasm globals"); thrower_->RangeError("Out of memory: wasm globals");
return {}; return {};
} }
...@@ -2358,12 +2348,14 @@ Handle<JSArrayBuffer> InstanceBuilder::AllocateMemory(uint32_t num_pages) { ...@@ -2358,12 +2348,14 @@ Handle<JSArrayBuffer> InstanceBuilder::AllocateMemory(uint32_t num_pages) {
thrower_->RangeError("Out of memory: wasm memory too large"); thrower_->RangeError("Out of memory: wasm memory too large");
return Handle<JSArrayBuffer>::null(); return Handle<JSArrayBuffer>::null();
} }
const bool enable_guard_regions = use_trap_handler();
const bool is_shared_memory = const bool is_shared_memory =
module_->has_shared_memory && i::FLAG_experimental_wasm_threads; module_->has_shared_memory && i::FLAG_experimental_wasm_threads;
i::SharedFlag shared_flag = i::SharedFlag shared_flag =
is_shared_memory ? i::SharedFlag::kShared : i::SharedFlag::kNotShared; is_shared_memory ? i::SharedFlag::kShared : i::SharedFlag::kNotShared;
Handle<JSArrayBuffer> mem_buffer; Handle<JSArrayBuffer> mem_buffer;
if (!NewArrayBuffer(isolate_, num_pages * kWasmPageSize, shared_flag) if (!NewArrayBuffer(isolate_, num_pages * kWasmPageSize, enable_guard_regions,
shared_flag)
.ToHandle(&mem_buffer)) { .ToHandle(&mem_buffer)) {
thrower_->RangeError("Out of memory: wasm memory"); thrower_->RangeError("Out of memory: wasm memory");
} }
......
...@@ -651,12 +651,15 @@ void WebAssemblyMemory(const v8::FunctionCallbackInfo<v8::Value>& args) { ...@@ -651,12 +651,15 @@ void WebAssemblyMemory(const v8::FunctionCallbackInfo<v8::Value>& args) {
} }
} }
size_t size = static_cast<size_t>(i::wasm::kWasmPageSize) *
static_cast<size_t>(initial);
const bool enable_guard_regions =
internal::trap_handler::IsTrapHandlerEnabled();
i::SharedFlag shared_flag = i::SharedFlag shared_flag =
is_shared_memory ? i::SharedFlag::kShared : i::SharedFlag::kNotShared; is_shared_memory ? i::SharedFlag::kShared : i::SharedFlag::kNotShared;
i::Handle<i::JSArrayBuffer> buffer; i::Handle<i::JSArrayBuffer> buffer;
size_t size = static_cast<size_t>(i::wasm::kWasmPageSize) * if (!i::wasm::NewArrayBuffer(i_isolate, size, enable_guard_regions,
static_cast<size_t>(initial); shared_flag)
if (!i::wasm::NewArrayBuffer(i_isolate, size, shared_flag)
.ToHandle(&buffer)) { .ToHandle(&buffer)) {
thrower.RangeError("could not allocate memory"); thrower.RangeError("could not allocate memory");
return; return;
......
...@@ -14,18 +14,20 @@ namespace wasm { ...@@ -14,18 +14,20 @@ namespace wasm {
namespace { namespace {
void* TryAllocateBackingStore(WasmMemoryTracker* memory_tracker, Heap* heap, void* TryAllocateBackingStore(WasmMemoryTracker* memory_tracker, Heap* heap,
size_t size, void** allocation_base, size_t size, bool require_guard_regions,
void** allocation_base,
size_t* allocation_length) { size_t* allocation_length) {
// We always allocate the largest possible offset into the heap, so the #if V8_TARGET_ARCH_32_BIT
// addressable memory after the guard page can be made inaccessible. DCHECK(!require_guard_regions);
#if V8_TARGET_ARCH_64_BIT
*allocation_length = RoundUp(kWasmMaxHeapOffset, CommitPageSize());
#else
*allocation_length =
RoundUp(base::bits::RoundUpToPowerOfTwo32(static_cast<uint32_t>(size)),
kWasmPageSize);
#endif #endif
// We always allocate the largest possible offset into the heap, so the
// addressable memory after the guard page can be made inaccessible.
*allocation_length =
require_guard_regions
? RoundUp(kWasmMaxHeapOffset, CommitPageSize())
: RoundUp(
base::bits::RoundUpToPowerOfTwo32(static_cast<uint32_t>(size)),
kWasmPageSize);
DCHECK_GE(*allocation_length, size); DCHECK_GE(*allocation_length, size);
DCHECK_GE(*allocation_length, kWasmPageSize); DCHECK_GE(*allocation_length, kWasmPageSize);
...@@ -179,11 +181,12 @@ void* WasmMemoryTracker::GetEmptyBackingStore(void** allocation_base, ...@@ -179,11 +181,12 @@ void* WasmMemoryTracker::GetEmptyBackingStore(void** allocation_base,
Heap* heap) { Heap* heap) {
if (empty_backing_store_.allocation_base == nullptr) { if (empty_backing_store_.allocation_base == nullptr) {
constexpr size_t buffer_length = 0; constexpr size_t buffer_length = 0;
const bool require_guard_regions = trap_handler::IsTrapHandlerEnabled();
void* local_allocation_base; void* local_allocation_base;
size_t local_allocation_length; size_t local_allocation_length;
void* buffer_start = TryAllocateBackingStore(this, heap, buffer_length, void* buffer_start = TryAllocateBackingStore(
&local_allocation_base, this, heap, buffer_length, require_guard_regions,
&local_allocation_length); &local_allocation_base, &local_allocation_length);
empty_backing_store_ = empty_backing_store_ =
AllocationData(local_allocation_base, local_allocation_length, AllocationData(local_allocation_base, local_allocation_length,
...@@ -229,6 +232,7 @@ Handle<JSArrayBuffer> SetupArrayBuffer(Isolate* isolate, void* backing_store, ...@@ -229,6 +232,7 @@ Handle<JSArrayBuffer> SetupArrayBuffer(Isolate* isolate, void* backing_store,
} }
MaybeHandle<JSArrayBuffer> NewArrayBuffer(Isolate* isolate, size_t size, MaybeHandle<JSArrayBuffer> NewArrayBuffer(Isolate* isolate, size_t size,
bool require_guard_regions,
SharedFlag shared) { SharedFlag shared) {
// Check against kMaxInt, since the byte length is stored as int in the // Check against kMaxInt, since the byte length is stored as int in the
// JSArrayBuffer. Note that wasm_max_mem_pages can be raised from the command // JSArrayBuffer. Note that wasm_max_mem_pages can be raised from the command
...@@ -249,10 +253,10 @@ MaybeHandle<JSArrayBuffer> NewArrayBuffer(Isolate* isolate, size_t size, ...@@ -249,10 +253,10 @@ MaybeHandle<JSArrayBuffer> NewArrayBuffer(Isolate* isolate, size_t size,
? memory_tracker->GetEmptyBackingStore( ? memory_tracker->GetEmptyBackingStore(
&allocation_base, &allocation_length, isolate->heap()) &allocation_base, &allocation_length, isolate->heap())
: TryAllocateBackingStore(memory_tracker, isolate->heap(), size, : TryAllocateBackingStore(memory_tracker, isolate->heap(), size,
&allocation_base, &allocation_length); require_guard_regions, &allocation_base,
if (memory == nullptr) { &allocation_length);
return {};
} if (size > 0 && memory == nullptr) return {};
#if DEBUG #if DEBUG
// Double check the API allocator actually zero-initialized the memory. // Double check the API allocator actually zero-initialized the memory.
......
...@@ -114,7 +114,8 @@ class WasmMemoryTracker { ...@@ -114,7 +114,8 @@ class WasmMemoryTracker {
}; };
MaybeHandle<JSArrayBuffer> NewArrayBuffer( MaybeHandle<JSArrayBuffer> NewArrayBuffer(
Isolate*, size_t size, SharedFlag shared = SharedFlag::kNotShared); Isolate*, size_t size, bool require_guard_regions,
SharedFlag shared = SharedFlag::kNotShared);
Handle<JSArrayBuffer> SetupArrayBuffer( Handle<JSArrayBuffer> SetupArrayBuffer(
Isolate*, void* backing_store, size_t size, bool is_external, Isolate*, void* backing_store, size_t size, bool is_external,
......
...@@ -345,7 +345,8 @@ namespace { ...@@ -345,7 +345,8 @@ namespace {
MaybeHandle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate, MaybeHandle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate,
Handle<JSArrayBuffer> old_buffer, Handle<JSArrayBuffer> old_buffer,
uint32_t pages, uint32_t pages,
uint32_t maximum_pages) { uint32_t maximum_pages,
bool use_trap_handler) {
if (!old_buffer->is_growable()) return {}; if (!old_buffer->is_growable()) return {};
Address old_mem_start = nullptr; Address old_mem_start = nullptr;
uint32_t old_size = 0; uint32_t old_size = 0;
...@@ -396,7 +397,8 @@ MaybeHandle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate, ...@@ -396,7 +397,8 @@ MaybeHandle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate,
// We couldn't reuse the old backing store, so create a new one and copy the // We couldn't reuse the old backing store, so create a new one and copy the
// old contents in. // old contents in.
Handle<JSArrayBuffer> new_buffer; Handle<JSArrayBuffer> new_buffer;
if (!wasm::NewArrayBuffer(isolate, new_size).ToHandle(&new_buffer)) { if (!wasm::NewArrayBuffer(isolate, new_size, use_trap_handler)
.ToHandle(&new_buffer)) {
return {}; return {};
} }
if (old_size == 0) return new_buffer; if (old_size == 0) return new_buffer;
...@@ -500,7 +502,10 @@ int32_t WasmMemoryObject::Grow(Isolate* isolate, ...@@ -500,7 +502,10 @@ int32_t WasmMemoryObject::Grow(Isolate* isolate,
maximum_pages = Min(FLAG_wasm_max_mem_pages, maximum_pages = Min(FLAG_wasm_max_mem_pages,
static_cast<uint32_t>(memory_object->maximum_pages())); static_cast<uint32_t>(memory_object->maximum_pages()));
} }
if (!GrowMemoryBuffer(isolate, old_buffer, pages, maximum_pages) // TODO(kschimpf): We need to fix this by adding a field to WasmMemoryObject
// that defines the style of memory being used.
if (!GrowMemoryBuffer(isolate, old_buffer, pages, maximum_pages,
trap_handler::IsTrapHandlerEnabled())
.ToHandle(&new_buffer)) { .ToHandle(&new_buffer)) {
return -1; return -1;
} }
......
...@@ -1079,8 +1079,15 @@ TEST(Run_WasmModule_Buffer_Externalized_GrowMemMemSize) { ...@@ -1079,8 +1079,15 @@ TEST(Run_WasmModule_Buffer_Externalized_GrowMemMemSize) {
{ {
Isolate* isolate = CcTest::InitIsolateOnce(); Isolate* isolate = CcTest::InitIsolateOnce();
HandleScope scope(isolate); HandleScope scope(isolate);
#if V8_TARGET_ARCH_64_BIT
constexpr bool require_guard_regions = true;
#else
constexpr bool require_guard_regions = false;
#endif
Handle<JSArrayBuffer> buffer; Handle<JSArrayBuffer> buffer;
CHECK(wasm::NewArrayBuffer(isolate, 16 * kWasmPageSize).ToHandle(&buffer)); CHECK(
wasm::NewArrayBuffer(isolate, 16 * kWasmPageSize, require_guard_regions)
.ToHandle(&buffer));
Handle<WasmMemoryObject> mem_obj = Handle<WasmMemoryObject> mem_obj =
WasmMemoryObject::New(isolate, buffer, 100); WasmMemoryObject::New(isolate, buffer, 100);
auto const contents = v8::Utils::ToLocal(buffer)->Externalize(); auto const contents = v8::Utils::ToLocal(buffer)->Externalize();
...@@ -1101,8 +1108,15 @@ TEST(Run_WasmModule_Buffer_Externalized_Detach) { ...@@ -1101,8 +1108,15 @@ TEST(Run_WasmModule_Buffer_Externalized_Detach) {
// https://bugs.chromium.org/p/chromium/issues/detail?id=731046 // https://bugs.chromium.org/p/chromium/issues/detail?id=731046
Isolate* isolate = CcTest::InitIsolateOnce(); Isolate* isolate = CcTest::InitIsolateOnce();
HandleScope scope(isolate); HandleScope scope(isolate);
#if V8_TARGET_ARCH_64_BIT
constexpr bool require_guard_regions = true;
#else
constexpr bool require_guard_regions = false;
#endif
Handle<JSArrayBuffer> buffer; Handle<JSArrayBuffer> buffer;
CHECK(wasm::NewArrayBuffer(isolate, 16 * kWasmPageSize).ToHandle(&buffer)); CHECK(
wasm::NewArrayBuffer(isolate, 16 * kWasmPageSize, require_guard_regions)
.ToHandle(&buffer));
auto const contents = v8::Utils::ToLocal(buffer)->Externalize(); auto const contents = v8::Utils::ToLocal(buffer)->Externalize();
wasm::DetachMemoryBuffer(isolate, buffer, true); wasm::DetachMemoryBuffer(isolate, buffer, true);
constexpr bool is_wasm_memory = true; constexpr bool is_wasm_memory = true;
...@@ -1118,8 +1132,14 @@ TEST(Run_WasmModule_Buffer_Externalized_Regression_UseAfterFree) { ...@@ -1118,8 +1132,14 @@ TEST(Run_WasmModule_Buffer_Externalized_Regression_UseAfterFree) {
// Regresion test for https://crbug.com/813876 // Regresion test for https://crbug.com/813876
Isolate* isolate = CcTest::InitIsolateOnce(); Isolate* isolate = CcTest::InitIsolateOnce();
HandleScope scope(isolate); HandleScope scope(isolate);
#if V8_TARGET_ARCH_64_BIT
const bool require_guard_regions = trap_handler::IsTrapHandlerEnabled();
#else
constexpr bool require_guard_regions = false;
#endif
Handle<JSArrayBuffer> buffer; Handle<JSArrayBuffer> buffer;
CHECK(wasm::NewArrayBuffer(isolate, 16 * kWasmPageSize).ToHandle(&buffer)); CHECK(wasm::NewArrayBuffer(isolate, 16 * kWasmPageSize, require_guard_regions)
.ToHandle(&buffer));
Handle<WasmMemoryObject> mem = WasmMemoryObject::New(isolate, buffer, 128); Handle<WasmMemoryObject> mem = WasmMemoryObject::New(isolate, buffer, 128);
auto contents = v8::Utils::ToLocal(buffer)->Externalize(); auto contents = v8::Utils::ToLocal(buffer)->Externalize();
WasmMemoryObject::Grow(isolate, mem, 0); WasmMemoryObject::Grow(isolate, mem, 0);
...@@ -1141,7 +1161,9 @@ TEST(Run_WasmModule_Reclaim_Memory) { ...@@ -1141,7 +1161,9 @@ TEST(Run_WasmModule_Reclaim_Memory) {
Handle<JSArrayBuffer> buffer; Handle<JSArrayBuffer> buffer;
for (int i = 0; i < 256; ++i) { for (int i = 0; i < 256; ++i) {
HandleScope scope(isolate); HandleScope scope(isolate);
CHECK(NewArrayBuffer(isolate, kWasmPageSize, SharedFlag::kNotShared) constexpr bool require_guard_regions = true;
CHECK(NewArrayBuffer(isolate, kWasmPageSize, require_guard_regions,
SharedFlag::kNotShared)
.ToHandle(&buffer)); .ToHandle(&buffer));
} }
} }
......
...@@ -40,9 +40,13 @@ byte* TestingModuleBuilder::AddMemory(uint32_t size) { ...@@ -40,9 +40,13 @@ byte* TestingModuleBuilder::AddMemory(uint32_t size) {
CHECK_EQ(0, mem_size_); CHECK_EQ(0, mem_size_);
DCHECK(!instance_object_->has_memory_object()); DCHECK(!instance_object_->has_memory_object());
test_module_.has_memory = true; test_module_.has_memory = true;
uint32_t alloc_size = RoundUp(size, kWasmPageSize); const bool enable_guard_regions =
trap_handler::IsTrapHandlerEnabled() && test_module_.is_wasm();
uint32_t alloc_size =
enable_guard_regions ? RoundUp(size, CommitPageSize()) : size;
Handle<JSArrayBuffer> new_buffer; Handle<JSArrayBuffer> new_buffer;
CHECK(wasm::NewArrayBuffer(isolate_, alloc_size).ToHandle(&new_buffer)); CHECK(wasm::NewArrayBuffer(isolate_, alloc_size, enable_guard_regions)
.ToHandle(&new_buffer));
CHECK(!new_buffer.is_null()); CHECK(!new_buffer.is_null());
mem_start_ = reinterpret_cast<byte*>(new_buffer->backing_store()); mem_start_ = reinterpret_cast<byte*>(new_buffer->backing_store());
mem_size_ = size; mem_size_ = size;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment