Commit c2835df6 authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

[wasm] Remove trap handler fallback

The trap handler fallback is flaky, and was never enabled since it
never worked reliably. This CL removes
a) the --wasm-trap-handler-fallback flag,
b) the distinction between soft and hard address space limit,
c) methods to check whether memory has guard regions (it will always
  have them on 64 bit architectures),
d) associated runtime functions,
e) the trap handler fallback tests,
f) recompilation logic for the fallback.

R=titzer@chromium.org

Bug: v8:8746
Change-Id: I7f4682b8cd5470906dd8579ff1fdc9b1a3c0f0e7
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1570023Reviewed-by: 's avatarBen Titzer <titzer@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#60904}
parent dd29683f
......@@ -669,8 +669,6 @@ DEFINE_IMPLICATION(future, wasm_shared_code)
DEFINE_BOOL(wasm_trap_handler, true,
"use signal handlers to catch out of bounds memory access in wasm"
" (currently Linux x86_64 only)")
DEFINE_BOOL(wasm_trap_handler_fallback, false,
"Use bounds checks if guarded memory is not available")
DEFINE_BOOL(wasm_fuzzer_gen_test, false,
"Generate a test case when running a wasm fuzzer")
DEFINE_IMPLICATION(wasm_fuzzer_gen_test, single_threaded)
......
......@@ -1250,13 +1250,5 @@ RUNTIME_FUNCTION(Runtime_FreezeWasmLazyCompilation) {
return ReadOnlyRoots(isolate).undefined_value();
}
RUNTIME_FUNCTION(Runtime_WasmMemoryHasFullGuardRegion) {
DCHECK_EQ(1, args.length());
DisallowHeapAllocation no_gc;
CONVERT_ARG_CHECKED(WasmMemoryObject, memory, 0);
return isolate->heap()->ToBoolean(memory->has_full_guard_region(isolate));
}
} // namespace internal
} // namespace v8
......@@ -516,7 +516,6 @@ namespace internal {
F(WasmGetNumberOfInstances, 1, 1) \
F(WasmNumInterpretedCalls, 1, 1) \
F(WasmTraceMemory, 1, 1) \
F(WasmMemoryHasFullGuardRegion, 1, 1) \
F(SetWasmThreadsEnabled, 1, 1)
#define FOR_EACH_INTRINSIC_TYPEDARRAY(F, I) \
......
......@@ -1054,14 +1054,6 @@ std::shared_ptr<NativeModule> CompileToNativeModule(
return native_module;
}
void CompileNativeModuleWithExplicitBoundsChecks(Isolate* isolate,
ErrorThrower* thrower,
const WasmModule* wasm_module,
NativeModule* native_module) {
native_module->DisableTrapHandler();
CompileNativeModule(isolate, thrower, wasm_module, native_module);
}
AsyncCompileJob::AsyncCompileJob(
Isolate* isolate, const WasmFeatures& enabled,
std::unique_ptr<byte[]> bytes_copy, size_t length, Handle<Context> context,
......
......@@ -42,11 +42,6 @@ std::shared_ptr<NativeModule> CompileToNativeModule(
std::shared_ptr<const WasmModule> module, const ModuleWireBytes& wire_bytes,
Handle<FixedArray>* export_wrappers_out);
void CompileNativeModuleWithExplicitBoundsChecks(Isolate* isolate,
ErrorThrower* thrower,
const WasmModule* wasm_module,
NativeModule* native_module);
V8_EXPORT_PRIVATE
void CompileJsToWasmWrappers(Isolate* isolate, const WasmModule* module,
Handle<FixedArray> export_wrappers);
......
......@@ -85,11 +85,6 @@ class InstanceBuilder {
Handle<WasmExportedFunction> start_function_;
std::vector<SanitizedImport> sanitized_imports_;
UseTrapHandler use_trap_handler() const {
return module_object_->native_module()->use_trap_handler() ? kUseTrapHandler
: kNoTrapHandler;
}
// Helper routines to print out errors with imports.
#define ERROR_THROWER_WITH_MESSAGE(TYPE) \
void Report##TYPE(const char* error, uint32_t index, \
......@@ -246,12 +241,11 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
// Record build time into correct bucket, then build instance.
TimedHistogramScope wasm_instantiate_module_time_scope(SELECT_WASM_COUNTER(
isolate_->counters(), module_->origin, wasm_instantiate, module_time));
NativeModule* native_module = module_object_->native_module();
//--------------------------------------------------------------------------
// Allocate the memory array buffer.
//--------------------------------------------------------------------------
// We allocate the memory buffer before cloning or reusing the compiled module
// so we will know whether we need to recompile with bounds checks.
uint32_t initial_pages = module_->initial_pages;
auto initial_pages_counter = SELECT_WASM_COUNTER(
isolate_->counters(), module_->origin, wasm, min_mem_pages_count);
......@@ -272,10 +266,11 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
Handle<JSArrayBuffer> memory = memory_.ToHandleChecked();
memory->set_is_detachable(false);
DCHECK_IMPLIES(use_trap_handler(), module_->origin == kAsmJsOrigin ||
memory->is_wasm_memory() ||
memory->backing_store() == nullptr);
} else if (initial_pages > 0 || use_trap_handler()) {
DCHECK_IMPLIES(native_module->use_trap_handler(),
module_->origin == kAsmJsOrigin ||
memory->is_wasm_memory() ||
memory->backing_store() == nullptr);
} else if (initial_pages > 0 || native_module->use_trap_handler()) {
// We need to unconditionally create a guard region if using trap handlers,
// even when the size is zero to prevent null-dereference issues
// (e.g. https://crbug.com/769637).
......@@ -288,39 +283,9 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
}
}
//--------------------------------------------------------------------------
// Recompile module if using trap handlers but could not get guarded memory
//--------------------------------------------------------------------------
if (module_->origin == kWasmOrigin && use_trap_handler()) {
// Make sure the memory has suitable guard regions.
WasmMemoryTracker* const memory_tracker =
isolate_->wasm_engine()->memory_tracker();
if (!memory_tracker->HasFullGuardRegions(
memory_.ToHandleChecked()->backing_store())) {
if (!FLAG_wasm_trap_handler_fallback) {
thrower_->LinkError(
"Provided memory is lacking guard regions but fallback was "
"disabled.");
return {};
}
TRACE("Recompiling module without bounds checks\n");
ErrorThrower thrower(isolate_, "recompile");
auto native_module = module_object_->native_module();
CompileNativeModuleWithExplicitBoundsChecks(isolate_, &thrower, module_,
native_module);
if (thrower.error()) {
return {};
}
DCHECK(!native_module->use_trap_handler());
}
}
//--------------------------------------------------------------------------
// Create the WebAssembly.Instance object.
//--------------------------------------------------------------------------
NativeModule* native_module = module_object_->native_module();
TRACE("New module instantiation for %p\n", native_module);
Handle<WasmInstanceObject> instance =
WasmInstanceObject::New(isolate_, module_object_);
......
......@@ -1057,20 +1057,6 @@ uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(
return module_->num_imported_functions + slot_idx;
}
void NativeModule::DisableTrapHandler() {
// Switch {use_trap_handler_} from true to false.
DCHECK(use_trap_handler_);
use_trap_handler_ = kNoTrapHandler;
// Clear the code table (just to increase the chances to hit an error if we
// forget to re-add all code).
uint32_t num_wasm_functions = module_->num_declared_functions;
memset(code_table_.get(), 0, num_wasm_functions * sizeof(WasmCode*));
// TODO(clemensh): Actually free the owned code, such that the memory can be
// recycled.
}
const char* NativeModule::GetRuntimeStubName(Address runtime_stub_entry) const {
#define RETURN_NAME(Name) \
if (runtime_stub_entries_[WasmCode::k##Name] == runtime_stub_entry) { \
......@@ -1148,10 +1134,7 @@ VirtualMemory WasmCodeManager::TryAllocate(size_t size, void* hint) {
v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
DCHECK_GT(size, 0);
size = RoundUp(size, page_allocator->AllocatePageSize());
if (!memory_tracker_->ReserveAddressSpace(size,
WasmMemoryTracker::kHardLimit)) {
return {};
}
if (!memory_tracker_->ReserveAddressSpace(size)) return {};
if (hint == nullptr) hint = page_allocator->GetRandomMmapAddr();
VirtualMemory mem(page_allocator, size, hint,
......
......@@ -342,14 +342,6 @@ class V8_EXPORT_PRIVATE NativeModule final {
return jump_table_->contains(address);
}
// Transition this module from code relying on trap handlers (i.e. without
// explicit memory bounds checks) to code that does not require trap handlers
// (i.e. code with explicit bounds checks).
// This method must only be called if {use_trap_handler()} is true (it will be
// false afterwards). All code in this {NativeModule} needs to be re-added
// after calling this method.
void DisableTrapHandler();
// Returns the target to call for the given function (returns a jump table
// slot within {jump_table_}).
Address GetCallTargetForFunction(uint32_t func_index) const;
......
......@@ -27,16 +27,6 @@ void AddAllocationStatusSample(Isolate* isolate,
static_cast<int>(status));
}
size_t GetAllocationLength(uint32_t size, bool require_full_guard_regions) {
if (require_full_guard_regions) {
return RoundUp(kWasmMaxHeapOffset + kNegativeGuardSize, CommitPageSize());
} else {
return RoundUp(
base::bits::RoundUpToPowerOfTwo32(static_cast<uint32_t>(size)),
kWasmPageSize);
}
}
bool RunWithGCAndRetry(const std::function<bool()>& fn, Heap* heap,
bool* did_retry) {
// Try up to three times; getting rid of dead JSArrayBuffer allocations might
......@@ -60,15 +50,13 @@ void* TryAllocateBackingStore(WasmMemoryTracker* memory_tracker, Heap* heap,
size_t* allocation_length) {
using AllocationStatus = WasmMemoryTracker::AllocationStatus;
#if V8_TARGET_ARCH_64_BIT
bool require_full_guard_regions = true;
constexpr bool kRequireFullGuardRegions = true;
#else
bool require_full_guard_regions = false;
constexpr bool kRequireFullGuardRegions = false;
#endif
// Let the WasmMemoryTracker know we are going to reserve a bunch of
// address space.
// TODO(7881): do not use static_cast<uint32_t>() here
uint32_t reservation_size =
static_cast<uint32_t>((max_size > size) ? max_size : size);
size_t reservation_size = std::max(max_size, size);
bool did_retry = false;
auto reserve_memory_space = [&] {
......@@ -79,40 +67,32 @@ void* TryAllocateBackingStore(WasmMemoryTracker* memory_tracker, Heap* heap,
// To protect against 32-bit integer overflow issues, we also
// protect the 2GiB before the valid part of the memory buffer.
*allocation_length =
GetAllocationLength(reservation_size, require_full_guard_regions);
kRequireFullGuardRegions
? RoundUp(kWasmMaxHeapOffset + kNegativeGuardSize, CommitPageSize())
: RoundUp(base::bits::RoundUpToPowerOfTwo(reservation_size),
kWasmPageSize);
DCHECK_GE(*allocation_length, size);
DCHECK_GE(*allocation_length, kWasmPageSize);
auto limit = require_full_guard_regions ? WasmMemoryTracker::kSoftLimit
: WasmMemoryTracker::kHardLimit;
return memory_tracker->ReserveAddressSpace(*allocation_length, limit);
return memory_tracker->ReserveAddressSpace(*allocation_length);
};
if (!RunWithGCAndRetry(reserve_memory_space, heap, &did_retry)) {
// Reset reservation_size to initial size so that at least the initial size
// can be allocated if maximum size reservation is not possible.
reservation_size = static_cast<uint32_t>(size);
// If we fail to allocate guard regions and the fallback is enabled, then
// retry without full guard regions.
bool fail = true;
if (require_full_guard_regions && FLAG_wasm_trap_handler_fallback) {
require_full_guard_regions = false;
fail = !RunWithGCAndRetry(reserve_memory_space, heap, &did_retry);
}
if (fail) {
// We are over the address space limit. Fail.
//
// When running under the correctness fuzzer (i.e.
// --abort-on-stack-or-string-length-overflow is preset), we crash
// instead so it is not incorrectly reported as a correctness
// violation. See https://crbug.com/828293#c4
if (FLAG_abort_on_stack_or_string_length_overflow) {
FATAL("could not allocate wasm memory");
}
AddAllocationStatusSample(
heap->isolate(), AllocationStatus::kAddressSpaceLimitReachedFailure);
return nullptr;
reservation_size = size;
// We are over the address space limit. Fail.
//
// When running under the correctness fuzzer (i.e.
// --abort-on-stack-or-string-length-overflow is preset), we crash
// instead so it is not incorrectly reported as a correctness
// violation. See https://crbug.com/828293#c4
if (FLAG_abort_on_stack_or_string_length_overflow) {
FATAL("could not allocate wasm memory");
}
AddAllocationStatusSample(
heap->isolate(), AllocationStatus::kAddressSpaceLimitReachedFailure);
return nullptr;
}
// The Reserve makes the whole region inaccessible by default.
......@@ -130,7 +110,7 @@ void* TryAllocateBackingStore(WasmMemoryTracker* memory_tracker, Heap* heap,
}
byte* memory = reinterpret_cast<byte*>(*allocation_base);
if (require_full_guard_regions) {
if (kRequireFullGuardRegions) {
memory += kNegativeGuardSize;
}
......@@ -157,14 +137,11 @@ void* TryAllocateBackingStore(WasmMemoryTracker* memory_tracker, Heap* heap,
#if V8_TARGET_ARCH_MIPS64
// MIPS64 has a user space of 2^40 bytes on most processors,
// address space limits needs to be smaller.
constexpr size_t kAddressSpaceSoftLimit = 0x2100000000L; // 132 GiB
constexpr size_t kAddressSpaceHardLimit = 0x4000000000L; // 256 GiB
constexpr size_t kAddressSpaceLimit = 0x4000000000L; // 256 GiB
#elif V8_TARGET_ARCH_64_BIT
constexpr size_t kAddressSpaceSoftLimit = 0x6000000000L; // 384 GiB
constexpr size_t kAddressSpaceHardLimit = 0x10100000000L; // 1 TiB + 4 GiB
constexpr size_t kAddressSpaceLimit = 0x10100000000L; // 1 TiB + 4 GiB
#else
constexpr size_t kAddressSpaceSoftLimit = 0x90000000; // 2 GiB + 256 MiB
constexpr size_t kAddressSpaceHardLimit = 0xC0000000; // 3 GiB
constexpr size_t kAddressSpaceLimit = 0xC0000000; // 3 GiB
#endif
} // namespace
......@@ -192,10 +169,8 @@ void WasmMemoryTracker::FreeBackingStoreForTesting(base::AddressRegion memory,
reinterpret_cast<void*>(memory.begin()), memory.size()));
}
bool WasmMemoryTracker::ReserveAddressSpace(size_t num_bytes,
ReservationLimit limit) {
size_t reservation_limit =
limit == kSoftLimit ? kAddressSpaceSoftLimit : kAddressSpaceHardLimit;
bool WasmMemoryTracker::ReserveAddressSpace(size_t num_bytes) {
size_t reservation_limit = kAddressSpaceLimit;
while (true) {
size_t old_count = reserved_address_space_.load();
if (old_count > reservation_limit) return false;
......@@ -269,21 +244,6 @@ bool WasmMemoryTracker::IsWasmSharedMemory(const void* buffer_start) {
return (result != allocations_.end() && result->second.is_shared);
}
bool WasmMemoryTracker::HasFullGuardRegions(const void* buffer_start) {
base::MutexGuard scope_lock(&mutex_);
const auto allocation = allocations_.find(buffer_start);
if (allocation == allocations_.end()) {
return false;
}
Address start = reinterpret_cast<Address>(buffer_start);
Address limit =
reinterpret_cast<Address>(allocation->second.allocation_base) +
allocation->second.allocation_length;
return start + kWasmMaxHeapOffset < limit;
}
void WasmMemoryTracker::MarkWasmMemoryNotGrowable(
Handle<JSArrayBuffer> buffer) {
base::MutexGuard scope_lock(&mutex_);
......
......@@ -31,10 +31,7 @@ class WasmMemoryTracker {
// ReserveAddressSpace attempts to increase the reserved address space counter
// by {num_bytes}. Returns true if successful (meaning it is okay to go ahead
// and reserve {num_bytes} bytes), false otherwise.
// Use {kSoftLimit} if you can implement a fallback which needs less reserved
// memory.
enum ReservationLimit { kSoftLimit, kHardLimit };
bool ReserveAddressSpace(size_t num_bytes, ReservationLimit limit);
bool ReserveAddressSpace(size_t num_bytes);
void RegisterAllocation(Isolate* isolate, void* allocation_base,
size_t allocation_length, void* buffer_start,
......@@ -105,10 +102,6 @@ class WasmMemoryTracker {
bool IsWasmSharedMemory(const void* buffer_start);
// Returns whether the given buffer is a Wasm memory with guard regions large
// enough to safely use trap handlers.
bool HasFullGuardRegions(const void* buffer_start);
// Returns a pointer to a Wasm buffer's allocation data, or nullptr if the
// buffer is not tracked.
V8_EXPORT_PRIVATE const AllocationData* FindAllocationData(
......
......@@ -1092,16 +1092,7 @@ MaybeHandle<JSArrayBuffer> MemoryGrowBuffer(Isolate* isolate,
if (!wasm::NewArrayBuffer(isolate, new_size).ToHandle(&new_buffer)) {
return {};
}
wasm::WasmMemoryTracker* const memory_tracker =
isolate->wasm_engine()->memory_tracker();
// If the old buffer had full guard regions, we can only safely use the new
// buffer if it also has full guard regions. Otherwise, we'd have to
// recompile all the instances using this memory to insert bounds checks.
void* old_mem_start = old_buffer->backing_store();
if (memory_tracker->HasFullGuardRegions(old_mem_start) &&
!memory_tracker->HasFullGuardRegions(new_buffer->backing_store())) {
return {};
}
size_t old_size = old_buffer->byte_length();
if (old_size == 0) return new_buffer;
memcpy(new_buffer->backing_store(), old_mem_start, old_size);
......@@ -1192,30 +1183,6 @@ MaybeHandle<WasmMemoryObject> WasmMemoryObject::New(Isolate* isolate,
return New(isolate, buffer, maximum);
}
bool WasmMemoryObject::has_full_guard_region(Isolate* isolate) {
const wasm::WasmMemoryTracker::AllocationData* allocation =
isolate->wasm_engine()->memory_tracker()->FindAllocationData(
array_buffer()->backing_store());
CHECK_NOT_NULL(allocation);
Address allocation_base =
reinterpret_cast<Address>(allocation->allocation_base);
Address buffer_start = reinterpret_cast<Address>(allocation->buffer_start);
// Return whether the allocation covers every possible Wasm heap index.
//
// We always have the following relationship:
// allocation_base <= buffer_start <= buffer_start + memory_size <=
// allocation_base + allocation_length
// (in other words, the buffer fits within the allocation)
//
// The space between buffer_start + memory_size and allocation_base +
// allocation_length is the guard region. Here we make sure the guard region
// is large enough for any Wasm heap offset.
return buffer_start + wasm::kWasmMaxHeapOffset <=
allocation_base + allocation->allocation_length;
}
void WasmMemoryObject::AddInstance(Isolate* isolate,
Handle<WasmMemoryObject> memory,
Handle<WasmInstanceObject> instance) {
......
......@@ -335,10 +335,6 @@ class WasmMemoryObject : public JSObject {
Handle<WasmInstanceObject> object);
inline bool has_maximum_pages();
// Return whether the underlying backing store has guard regions large enough
// to be used with trap handlers.
bool has_full_guard_region(Isolate* isolate);
V8_EXPORT_PRIVATE static Handle<WasmMemoryObject> New(
Isolate* isolate, MaybeHandle<JSArrayBuffer> buffer, uint32_t maximum);
......
......@@ -235,9 +235,6 @@
# BUG(v8:8169)
'external-backing-store-gc': [SKIP],
# BUG(v8:8746)
'wasm/trap-handler-fallback': [PASS, FAIL],
}], # ALWAYS
['novfp3 == True', {
......
......@@ -178,7 +178,7 @@ function testAddressSpaceLimit() {
// 1TiB + 4 GiB, see wasm-memory.h
const kMaxAddressSpace = 1 * 1024 * 1024 * 1024 * 1024
+ 4 * 1024 * 1024 * 1024;
const kAddressSpacePerMemory = 8 * 1024 * 1024 * 1024;
const kAddressSpacePerMemory = 10 * 1024 * 1024 * 1024;
let last_memory;
try {
......@@ -193,10 +193,7 @@ function testAddressSpaceLimit() {
assertTrue(e instanceof RangeError);
return;
}
// If we get here it's because our fallback behavior is working. We may not
// be using the fallback, in which case we would have thrown a RangeError in
// the previous block.
assertTrue(!%WasmMemoryHasFullGuardRegion(last_memory));
assertUnreachable("should have reached the address space limit");
}
if(%IsWasmTrapHandlerEnabled()) {
......
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --wasm-trap-handler-fallback
load("test/mjsunit/wasm/wasm-module-builder.js");
// Make sure we can get at least one guard region if the trap handler is enabled.
(function CanGetGuardRegionTest() {
print("CanGetGuardRegionTest()");
const memory = new WebAssembly.Memory({initial: 1});
if (%IsWasmTrapHandlerEnabled()) {
assertTrue(%WasmMemoryHasFullGuardRegion(memory));
}
})();
// This test verifies that when we have too many outstanding memories to get
// another fast memory, we fall back on bounds checking rather than failing.
(function TrapHandlerFallbackTest() {
print("TrapHandlerFallbackTest()");
let builder = new WasmModuleBuilder();
builder.addImportedMemory("mod", "imported_mem", 1);
builder.addFunction("load", kSig_i_i)
.addBody([kExprGetLocal, 0, kExprI32LoadMem, 0, 0])
.exportFunc();
let memory;
let instance;
let instances = [];
let fallback_occurred = false;
// Create 135 instances. V8 limits wasm to slightly more than 1 TiB of address
// space per isolate (see kAddressSpaceLimit in wasm-memory.cc), which allows
// up to 128 fast memories. As long as we create more than that, we should
// trigger the fallback behavior.
for (var i = 0; i < 135 && !fallback_occurred; i++) {
memory = new WebAssembly.Memory({initial: 1});
instance = builder.instantiate({mod: {imported_mem: memory}});
instances.push(instance);
assertTraps(kTrapMemOutOfBounds, () => instance.exports.load(1 << 20));
fallback_occurred = !%WasmMemoryHasFullGuardRegion(memory);
}
assertTrue(fallback_occurred);
})();
(function TrapHandlerFallbackTestZeroInitialMemory() {
print("TrapHandlerFallbackTestZeroInitialMemory()");
let builder = new WasmModuleBuilder();
builder.addImportedMemory("mod", "imported_mem", 0);
builder.addFunction("load", kSig_i_i)
.addBody([kExprGetLocal, 0, kExprI32LoadMem, 0, 0])
.exportFunc();
let memory;
let instance;
let instances = [];
let fallback_occurred = false;
// Create 135 instances. V8 limits wasm to slightly more than 1 TiB of address
// space per isolate (see kAddressSpaceLimit in wasm-memory.cc), which allows
// up to 128 fast memories. As long as we create more than that, we should
// trigger the fallback behavior.
for (var i = 0; i < 135 && !fallback_occurred; i++) {
memory = new WebAssembly.Memory({initial: 1});
instance = builder.instantiate({mod: {imported_mem: memory}});
instances.push(instance);
assertTraps(kTrapMemOutOfBounds, () => instance.exports.load(1 << 20));
fallback_occurred = !%WasmMemoryHasFullGuardRegion(memory);
}
assertTrue(fallback_occurred);
})();
(function TrapHandlerFallbackTestGrowFromZero() {
print("TrapHandlerFallbackTestGrowFromZero()");
// Create a zero-length memory to make sure the empty backing store is created.
const zero_memory = new WebAssembly.Memory({initial: 0});
// Create enough memories to overflow the address space limit
let memories = []
for (var i = 0; i < 135; i++) {
memories.push(new WebAssembly.Memory({initial: 1}));
}
// Create a memory for the module. We'll grow this later.
let memory = new WebAssembly.Memory({initial: 0});
let builder = new WasmModuleBuilder();
builder.addImportedMemory("mod", "imported_mem", 0);
builder.addFunction("load", kSig_i_i)
.addBody([kExprGetLocal, 0, kExprI32LoadMem, 0, 0])
.exportFunc();
instance = builder.instantiate({mod: {imported_mem: memory}});
assertTraps(kTrapMemOutOfBounds, () => instance.exports.load(1 << 20));
try {
memory.grow(1);
} catch(e) {
if (typeof e == typeof new RangeError) {
return;
}
throw e;
}
assertTraps(kTrapMemOutOfBounds, () => instance.exports.load(1 << 20));
})();
// Like TrapHandlerFallbackTest, but allows the module to be reused, so we only
// have to recompile once.
(function TrapHandlerFallbackTestReuseModule() {
print("TrapHandlerFallbackTestReuseModule()");
let builder = new WasmModuleBuilder();
builder.addImportedMemory("mod", "imported_mem", 1);
builder.addFunction("load", kSig_i_i)
.addBody([kExprGetLocal, 0, kExprI32LoadMem, 0, 0])
.exportFunc();
let memory;
let instance;
let instances = [];
let fallback_occurred = false;
// Create 135 instances. V8 limits wasm to slightly more than 1 TiB of address
// space per isolate (see kAddressSpaceLimit in wasm-memory.cc), which allows
// up to 128 fast memories. As long as we create more than that, we should
// trigger the fallback behavior.
const module = builder.toModule();
for (var i = 0; i < 135 && !fallback_occurred; i++) {
memory = new WebAssembly.Memory({initial: 1});
instance = new WebAssembly.Instance(module, {mod: {imported_mem: memory}});
instances.push(instance);
assertTraps(kTrapMemOutOfBounds, () => instance.exports.load(1 << 20));
fallback_occurred = !%WasmMemoryHasFullGuardRegion(memory);
}
assertTrue(fallback_occurred);
})();
// Make sure that a bounds checked instance still works when calling an
// imported unchecked function.
(function CallIndirectImportTest() {
print("CallIndirectImportTest()");
// Create an unchecked instance that calls a function through an indirect
// table.
const instance_a = (() => {
const builder = new WasmModuleBuilder();
builder.addMemory(1, 1, false);
builder.addFunction("read_mem", kSig_i_i)
.addBody([
kExprGetLocal, 0,
kExprI32LoadMem, 0, 0
]).exportAs("read_mem");
return builder.instantiate();
})();
// Create new memories until we get one that is unguarded
let memories = [];
let memory;
for (var i = 0; i < 135; i++) {
memory = new WebAssembly.Memory({initial: 1});
memories.push(memory);
if (!%WasmMemoryHasFullGuardRegion(memory)) {
break;
}
}
assertFalse(%WasmMemoryHasFullGuardRegion(memory));
// create a module that imports a function through a table
const instance_b = (() => {
const builder = new WasmModuleBuilder();
builder.addFunction("main", kSig_i_i)
.addBody([
kExprGetLocal, 0,
kExprI32Const, 0,
kExprCallIndirect, 0, kTableZero
]).exportAs("main");
builder.addImportedTable("env", "table", 1, 1);
const module = new WebAssembly.Module(builder.toBuffer());
const table = new WebAssembly.Table({
element: "anyfunc",
initial: 1, maximum: 1
});
// Hook the new instance's export into the old instance's table.
table.set(0, instance_a.exports.read_mem);
return new WebAssembly.Instance(module, {'env': { 'table': table }});
})();
// Make sure we get an out of bounds still.
assertTraps(kTrapMemOutOfBounds, () => instance_b.exports.main(100000));
})();
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment