Commit 5e76ff5a authored by Eric Holk (eholk)'s avatar Eric Holk (eholk) Committed by Commit Bot

Reland "[wasm] always allocate memory when guard regions are needed"

This reverts commit 7cf29d8d.

Original change's description:
> [wasm] always allocate memory when guard regions are needed
>
> When using trap handlers, memory references do not get any checks inserted. This
> means there is no check for a null memory as happens when the memory size is
> 0. Normally this would be correctly caught as an out of bounds access, since the
> low memory addresses are not normally mapped. However, if they were mapped for
> some reason, we would not catch the out of bounds access.
>
> The fix is to ensure WebAssembly instances always have a guard region even if
> the memory is size 0.
>
> Bug: chromium:769637

Change-Id: I09fdaea92b7ccb3a6cc9e28392171ec098538a00
Reviewed-on: https://chromium-review.googlesource.com/695812
Commit-Queue: Eric Holk <eholk@chromium.org>
Reviewed-by: 's avatarClemens Hammacher <clemensh@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48293}
parent 56dc5925
...@@ -17,7 +17,7 @@ void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) { ...@@ -17,7 +17,7 @@ void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) {
void* data = buffer->backing_store(); void* data = buffer->backing_store();
if (!data) return; if (!data) return;
size_t length = buffer->allocation_length(); size_t length = static_cast<size_t>(buffer->byte_length()->Number());
Page* page = Page::FromAddress(buffer->address()); Page* page = Page::FromAddress(buffer->address());
{ {
base::LockGuard<base::RecursiveMutex> guard(page->mutex()); base::LockGuard<base::RecursiveMutex> guard(page->mutex());
...@@ -40,7 +40,7 @@ void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) { ...@@ -40,7 +40,7 @@ void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
if (!data) return; if (!data) return;
Page* page = Page::FromAddress(buffer->address()); Page* page = Page::FromAddress(buffer->address());
size_t length = buffer->allocation_length(); size_t length = static_cast<size_t>(buffer->byte_length()->Number());
{ {
base::LockGuard<base::RecursiveMutex> guard(page->mutex()); base::LockGuard<base::RecursiveMutex> guard(page->mutex());
LocalArrayBufferTracker* tracker = page->local_tracker(); LocalArrayBufferTracker* tracker = page->local_tracker();
...@@ -50,6 +50,22 @@ void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) { ...@@ -50,6 +50,22 @@ void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
heap->update_external_memory(-static_cast<intptr_t>(length)); heap->update_external_memory(-static_cast<intptr_t>(length));
} }
void ArrayBufferTracker::IncreaseArrayBufferSize(Heap* heap,
JSArrayBuffer* buffer,
size_t delta) {
DCHECK_NOT_NULL(buffer->backing_store());
Page* const page = Page::FromAddress(buffer->address());
{
base::LockGuard<base::RecursiveMutex> guard(page->mutex());
LocalArrayBufferTracker* tracker = page->local_tracker();
DCHECK_NOT_NULL(tracker);
DCHECK(tracker->IsTracked(buffer));
tracker->IncreaseRetainedSize(delta);
}
heap->update_external_memory(delta);
}
template <typename Callback> template <typename Callback>
void LocalArrayBufferTracker::Free(Callback should_free) { void LocalArrayBufferTracker::Free(Callback should_free) {
size_t freed_memory = 0; size_t freed_memory = 0;
...@@ -57,7 +73,7 @@ void LocalArrayBufferTracker::Free(Callback should_free) { ...@@ -57,7 +73,7 @@ void LocalArrayBufferTracker::Free(Callback should_free) {
for (TrackingData::iterator it = array_buffers_.begin(); for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) { it != array_buffers_.end();) {
JSArrayBuffer* buffer = reinterpret_cast<JSArrayBuffer*>(*it); JSArrayBuffer* buffer = reinterpret_cast<JSArrayBuffer*>(*it);
const size_t length = buffer->allocation_length(); const size_t length = static_cast<size_t>(buffer->byte_length()->Number());
if (should_free(buffer)) { if (should_free(buffer)) {
freed_memory += length; freed_memory += length;
buffer->FreeBackingStore(); buffer->FreeBackingStore();
...@@ -106,6 +122,11 @@ void LocalArrayBufferTracker::Remove(JSArrayBuffer* buffer, size_t length) { ...@@ -106,6 +122,11 @@ void LocalArrayBufferTracker::Remove(JSArrayBuffer* buffer, size_t length) {
array_buffers_.erase(it); array_buffers_.erase(it);
} }
void LocalArrayBufferTracker::IncreaseRetainedSize(size_t delta) {
DCHECK_GE(retained_size_ + delta, retained_size_);
retained_size_ += delta;
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -33,6 +33,10 @@ class ArrayBufferTracker : public AllStatic { ...@@ -33,6 +33,10 @@ class ArrayBufferTracker : public AllStatic {
// access to the tracker by taking the page lock for the corresponding page. // access to the tracker by taking the page lock for the corresponding page.
inline static void RegisterNew(Heap* heap, JSArrayBuffer* buffer); inline static void RegisterNew(Heap* heap, JSArrayBuffer* buffer);
inline static void Unregister(Heap* heap, JSArrayBuffer* buffer); inline static void Unregister(Heap* heap, JSArrayBuffer* buffer);
// Tells the tracker that the array buffer has increased in size. The buffer
// must already be tracked.
inline static void IncreaseArrayBufferSize(Heap* heap, JSArrayBuffer* buffer,
size_t delta);
// Frees all backing store pointers for dead JSArrayBuffers in new space. // Frees all backing store pointers for dead JSArrayBuffers in new space.
// Does not take any locks and can only be called during Scavenge. // Does not take any locks and can only be called during Scavenge.
...@@ -73,6 +77,7 @@ class LocalArrayBufferTracker { ...@@ -73,6 +77,7 @@ class LocalArrayBufferTracker {
inline void Add(JSArrayBuffer* buffer, size_t length); inline void Add(JSArrayBuffer* buffer, size_t length);
inline void Remove(JSArrayBuffer* buffer, size_t length); inline void Remove(JSArrayBuffer* buffer, size_t length);
inline void IncreaseRetainedSize(size_t delta);
// Frees up array buffers. // Frees up array buffers.
// //
......
...@@ -2341,6 +2341,10 @@ void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) { ...@@ -2341,6 +2341,10 @@ void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) {
ArrayBufferTracker::Unregister(this, buffer); ArrayBufferTracker::Unregister(this, buffer);
} }
void Heap::TrackIncreasedArrayBufferSize(JSArrayBuffer* buffer, size_t delta) {
ArrayBufferTracker::IncreaseArrayBufferSize(this, buffer, delta);
}
void Heap::ConfigureInitialOldGenerationSize() { void Heap::ConfigureInitialOldGenerationSize() {
if (!old_generation_size_configured_ && tracer()->SurvivalEventsRecorded()) { if (!old_generation_size_configured_ && tracer()->SurvivalEventsRecorded()) {
old_generation_allocation_limit_ = old_generation_allocation_limit_ =
......
...@@ -1474,6 +1474,7 @@ class Heap { ...@@ -1474,6 +1474,7 @@ class Heap {
// unregistered buffer, too, and the name is confusing. // unregistered buffer, too, and the name is confusing.
void RegisterNewArrayBuffer(JSArrayBuffer* buffer); void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
void UnregisterArrayBuffer(JSArrayBuffer* buffer); void UnregisterArrayBuffer(JSArrayBuffer* buffer);
void TrackIncreasedArrayBufferSize(JSArrayBuffer* buffer, size_t delta);
// =========================================================================== // ===========================================================================
// Allocation site tracking. ================================================= // Allocation site tracking. =================================================
......
...@@ -1807,7 +1807,10 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() { ...@@ -1807,7 +1807,10 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
DCHECK_IMPLIES(EnableGuardRegions(), DCHECK_IMPLIES(EnableGuardRegions(),
module_->is_asm_js() || memory->has_guard_region()); module_->is_asm_js() || memory->has_guard_region());
} else if (initial_pages > 0) { } else if (initial_pages > 0 || EnableGuardRegions()) {
// We need to unconditionally create a guard region if using trap handlers,
// even when the size is zero to prevent null-derence issues
// (e.g. https://crbug.com/769637).
memory_ = AllocateMemory(initial_pages); memory_ = AllocateMemory(initial_pages);
if (memory_.is_null()) return {}; // failed to allocate memory if (memory_.is_null()) return {}; // failed to allocate memory
} }
......
...@@ -88,11 +88,11 @@ Handle<JSArrayBuffer> NewArrayBuffer(Isolate* isolate, size_t size, ...@@ -88,11 +88,11 @@ Handle<JSArrayBuffer> NewArrayBuffer(Isolate* isolate, size_t size,
void* allocation_base = nullptr; // Set by TryAllocateBackingStore void* allocation_base = nullptr; // Set by TryAllocateBackingStore
size_t allocation_length = 0; // Set by TryAllocateBackingStore size_t allocation_length = 0; // Set by TryAllocateBackingStore
// Do not reserve memory till non zero memory is encountered. // Normally we would avoid calling TryAllocateBackingStore at all for
void* memory = // zero-sized memories. This is tricky with guard pages. Instead, this logic
(size == 0) ? nullptr // for when to allocate lives inside TryAllocateBackingStore.
: TryAllocateBackingStore(isolate, size, enable_guard_regions, void* memory = TryAllocateBackingStore(isolate, size, enable_guard_regions,
allocation_base, allocation_length); allocation_base, allocation_length);
if (size > 0 && memory == nullptr) { if (size > 0 && memory == nullptr) {
return Handle<JSArrayBuffer>::null(); return Handle<JSArrayBuffer>::null();
......
...@@ -331,8 +331,8 @@ Handle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate, ...@@ -331,8 +331,8 @@ Handle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate,
const bool enable_guard_regions = old_buffer.is_null() const bool enable_guard_regions = old_buffer.is_null()
? wasm::EnableGuardRegions() ? wasm::EnableGuardRegions()
: old_buffer->has_guard_region(); : old_buffer->has_guard_region();
size_t new_size = const uint32_t size_increase = pages * WasmModule::kPageSize;
static_cast<size_t>(old_pages + pages) * WasmModule::kPageSize; const uint32_t new_size = old_size + size_increase;
if (enable_guard_regions && old_size != 0) { if (enable_guard_regions && old_size != 0) {
DCHECK_NOT_NULL(old_buffer->backing_store()); DCHECK_NOT_NULL(old_buffer->backing_store());
if (new_size > FLAG_wasm_max_mem_pages * WasmModule::kPageSize || if (new_size > FLAG_wasm_max_mem_pages * WasmModule::kPageSize ||
...@@ -346,6 +346,10 @@ Handle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate, ...@@ -346,6 +346,10 @@ Handle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate,
->AdjustAmountOfExternalAllocatedMemory(pages * WasmModule::kPageSize); ->AdjustAmountOfExternalAllocatedMemory(pages * WasmModule::kPageSize);
Handle<Object> length_obj = isolate->factory()->NewNumberFromSize(new_size); Handle<Object> length_obj = isolate->factory()->NewNumberFromSize(new_size);
old_buffer->set_byte_length(*length_obj); old_buffer->set_byte_length(*length_obj);
if (!old_buffer->is_external()) {
isolate->heap()->TrackIncreasedArrayBufferSize(*old_buffer,
size_increase);
}
return old_buffer; return old_buffer;
} else { } else {
Handle<JSArrayBuffer> new_buffer; Handle<JSArrayBuffer> new_buffer;
......
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
load('test/mjsunit/wasm/wasm-constants.js');
load('test/mjsunit/wasm/wasm-module-builder.js');
let builder = new WasmModuleBuilder();
builder
.addMemory()
.addFunction("main", kSig_v_v)
.addBody([kExprI32Const, 4,
kExprI32Const, 8,
kExprI32StoreMem, 0, 16])
.exportAs("main");
let instance = builder.instantiate();
assertTraps(kTrapMemOutOfBounds, instance.exports.main);
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment