Commit 34c8443c authored by Ben Smith's avatar Ben Smith Committed by Commit Bot

[wasm][bulk-memory] Check segment bounds lazily

The bulk memory proposal changed behavior of segment initialization
during instantiation. Previously, all segments would be bounds-checked,
after which the segments would be initialized.

The bulk memory proposal removes the up-front check, and always
initializes active segments in order, starting with element segments and
then continuing with data segments. Each active segment is initialized
as-if they were being initialized with the `memory.init` and
`table.init` instructions, so an out-of-bounds initialization may still
modify the memory or table partially.

Bug: v8:8892
Change-Id: I472fca2401e07d60b288f0cc745629a451b31088
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1565033
Commit-Queue: Ben Smith <binji@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#60885}
parent 3f88ea39
......@@ -11,6 +11,7 @@
#include "src/tracing/trace-event.h"
#include "src/utils.h"
#include "src/wasm/module-compiler.h"
#include "src/wasm/wasm-external-refs.h"
#include "src/wasm/wasm-import-wrapper-cache.h"
#include "src/wasm/wasm-module.h"
#include "src/wasm/wasm-objects-inl.h"
......@@ -445,34 +446,39 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
}
}
//--------------------------------------------------------------------------
// Check that indirect function table segments are within bounds.
//--------------------------------------------------------------------------
for (const WasmElemSegment& elem_segment : module_->elem_segments) {
if (!elem_segment.active) continue;
DCHECK_LT(elem_segment.table_index, table_count);
uint32_t base = EvalUint32InitExpr(instance, elem_segment.offset);
// Because of imported tables, {table_size} has to come from the table
// object itself.
auto table_object = handle(WasmTableObject::cast(instance->tables()->get(
elem_segment.table_index)),
isolate_);
size_t table_size = table_object->elements()->length();
if (!IsInBounds(base, elem_segment.entries.size(), table_size)) {
thrower_->LinkError("table initializer is out of bounds");
return {};
// The bulk memory proposal changes the MVP behavior here; the segments are
// written as if `memory.init` and `table.init` are executed directly, and
// not bounds checked ahead of time.
if (!enabled_.bulk_memory) {
//--------------------------------------------------------------------------
// Check that indirect function table segments are within bounds.
//--------------------------------------------------------------------------
for (const WasmElemSegment& elem_segment : module_->elem_segments) {
if (!elem_segment.active) continue;
DCHECK_LT(elem_segment.table_index, table_count);
uint32_t base = EvalUint32InitExpr(instance, elem_segment.offset);
// Because of imported tables, {table_size} has to come from the table
// object itself.
auto table_object = handle(WasmTableObject::cast(instance->tables()->get(
elem_segment.table_index)),
isolate_);
size_t table_size = table_object->elements()->length();
if (!IsInBounds(base, elem_segment.entries.size(), table_size)) {
thrower_->LinkError("table initializer is out of bounds");
return {};
}
}
}
//--------------------------------------------------------------------------
// Check that memory segments are within bounds.
//--------------------------------------------------------------------------
for (const WasmDataSegment& seg : module_->data_segments) {
if (!seg.active) continue;
uint32_t base = EvalUint32InitExpr(instance, seg.dest_addr);
if (!IsInBounds(base, seg.source.length(), instance->memory_size())) {
thrower_->LinkError("data segment is out of bounds");
return {};
//--------------------------------------------------------------------------
// Check that memory segments are within bounds.
//--------------------------------------------------------------------------
for (const WasmDataSegment& seg : module_->data_segments) {
if (!seg.active) continue;
uint32_t base = EvalUint32InitExpr(instance, seg.dest_addr);
if (!IsInBounds(base, seg.source.length(), instance->memory_size())) {
thrower_->LinkError("data segment is out of bounds");
return {};
}
}
}
......@@ -487,6 +493,7 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
//--------------------------------------------------------------------------
if (table_count > 0) {
LoadTableSegments(instance);
if (thrower_->error()) return {};
}
//--------------------------------------------------------------------------
......@@ -494,6 +501,7 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
//--------------------------------------------------------------------------
if (module_->data_segments.size() > 0) {
LoadDataSegments(instance);
if (thrower_->error()) return {};
}
//--------------------------------------------------------------------------
......@@ -617,16 +625,35 @@ void InstanceBuilder::LoadDataSegments(Handle<WasmInstanceObject> instance) {
Vector<const uint8_t> wire_bytes =
module_object_->native_module()->wire_bytes();
for (const WasmDataSegment& segment : module_->data_segments) {
uint32_t source_size = segment.source.length();
// Segments of size == 0 are just nops.
if (source_size == 0) continue;
// Passive segments are not copied during instantiation.
if (!segment.active) continue;
uint32_t dest_offset = EvalUint32InitExpr(instance, segment.dest_addr);
DCHECK(IsInBounds(dest_offset, source_size, instance->memory_size()));
byte* dest = instance->memory_start() + dest_offset;
const byte* src = wire_bytes.start() + segment.source.offset();
memcpy(dest, src, source_size);
uint32_t size = segment.source.length();
if (enabled_.bulk_memory) {
// Passive segments are not copied during instantiation.
if (!segment.active) continue;
uint32_t dest_offset = EvalUint32InitExpr(instance, segment.dest_addr);
bool ok = ClampToBounds(dest_offset, &size,
static_cast<uint32_t>(instance->memory_size()));
Address dest_addr =
reinterpret_cast<Address>(instance->memory_start()) + dest_offset;
Address src_addr = reinterpret_cast<Address>(wire_bytes.start()) +
segment.source.offset();
memory_copy_wrapper(dest_addr, src_addr, size);
if (!ok) {
thrower_->LinkError("data segment is out of bounds");
return;
}
} else {
DCHECK(segment.active);
// Segments of size == 0 are just nops.
if (size == 0) continue;
uint32_t dest_offset = EvalUint32InitExpr(instance, segment.dest_addr);
DCHECK(IsInBounds(dest_offset, size, instance->memory_size()));
byte* dest = instance->memory_start() + dest_offset;
const byte* src = wire_bytes.start() + segment.source.offset();
memcpy(dest, src, size);
}
}
}
......@@ -1539,7 +1566,17 @@ void InstanceBuilder::LoadTableSegments(Handle<WasmInstanceObject> instance) {
instance->tables()->get(elem_segment.table_index)),
isolate_),
elem_segment, dst, src, count);
CHECK(success);
if (enabled_.bulk_memory) {
if (!success) {
thrower_->LinkError("table initializer is out of bounds");
// Break out instead of returning; we don't want to continue to
// initialize any further element segments, but still need to add
// dispatch tables below.
break;
}
} else {
CHECK(success);
}
}
int table_count = static_cast<int>(module_->tables.size());
......
......@@ -165,3 +165,74 @@ function getMemoryFill(mem) {
const instance = builder.instantiate();
assertTraps(kTrapElemSegmentDropped, () => instance.exports.drop());
})();
(function TestLazyDataSegmentBoundsCheck() {
const memory = new WebAssembly.Memory({initial: 1});
const view = new Uint8Array(memory.buffer);
const builder = new WasmModuleBuilder();
builder.addImportedMemory('m', 'memory', 1);
builder.addDataSegment(kPageSize - 1, [42, 42]);
builder.addDataSegment(0, [111, 111]);
assertEquals(0, view[kPageSize - 1]);
// Instantiation fails, but still modifies memory.
assertThrows(() => builder.instantiate({m: {memory}}), WebAssembly.LinkError);
assertEquals(42, view[kPageSize - 1]);
// The second segment is not initialized.
assertEquals(0, view[0]);
})();
(function TestLazyElementSegmentBoundsCheck() {
const table = new WebAssembly.Table({initial: 3, element: 'anyfunc'});
const builder = new WasmModuleBuilder();
builder.addImportedTable('m', 'table', 1);
const f = builder.addFunction('f', kSig_i_v).addBody([kExprI32Const, 42]);
const tableIndex = 0;
const isGlobal = false;
const isImport = true;
builder.addElementSegment(
tableIndex, 2, isGlobal, [f.index, f.index], isImport);
builder.addElementSegment(
tableIndex, 0, isGlobal, [f.index, f.index], isImport);
assertEquals(null, table.get(0));
assertEquals(null, table.get(1));
assertEquals(null, table.get(2));
// Instantiation fails, but still modifies the table.
assertThrows(() => builder.instantiate({m: {table}}), WebAssembly.LinkError);
// The second segment is not initialized.
assertEquals(null, table.get(0));
assertEquals(null, table.get(1));
assertEquals(42, table.get(2)());
})();
(function TestLazyDataAndElementSegments() {
const table = new WebAssembly.Table({initial: 1, element: 'anyfunc'});
const memory = new WebAssembly.Memory({initial: 1});
const view = new Uint8Array(memory.buffer);
const builder = new WasmModuleBuilder();
builder.addImportedMemory('m', 'memory', 1);
builder.addImportedTable('m', 'table', 1);
const f = builder.addFunction('f', kSig_i_v).addBody([kExprI32Const, 42]);
const tableIndex = 0;
const isGlobal = false;
const isImport = true;
builder.addElementSegment(
tableIndex, 0, isGlobal, [f.index, f.index], isImport);
builder.addDataSegment(0, [42]);
// Instantiation fails, but still modifies the table. The memory is not
// modified, since data segments are initialized after element segments.
assertThrows(
() => builder.instantiate({m: {memory, table}}), WebAssembly.LinkError);
assertEquals(42, table.get(0)());
assertEquals(0, view[0]);
})();
......@@ -898,12 +898,18 @@ class WasmModuleBuilder {
}
this.element_segments.push({table: table, base: base, is_global: is_global,
array: array, is_active: true});
if (!is_global) {
// As a testing convenience, update the table length when adding an element
// segment. If the table is imported, we can't do this because we don't
// know how long the table actually is. If |is_global| is true, then the
// base is a global index, instead of an integer offset, so we can't update
// the table then either.
if (!(is_import || is_global)) {
var length = base + array.length;
if (!is_import && length > this.tables[0].initial_size) {
if (length > this.tables[0].initial_size) {
this.tables[0].initial_size = length;
}
if (!is_import && this.tables[0].has_max &&
if (this.tables[0].has_max &&
length > this.tables[0].max_size) {
this.tables[0].max_size = length;
}
......
......@@ -6,6 +6,10 @@
[ALWAYS, {
#TODO(ahaas): Add additional stack checks on mips.
'tests/skip-stack-guard-page': [PASS, ['arch == mipsel or arch == mips64el or ((arch == ppc or arch == ppc64 or arch == s390 or arch == s390x) and simulator_run)', SKIP]],
# TODO(v8:9144): The MVP behavior when bounds-checking segments changed in
# the bulk-memory proposal. Since we've enabled bulk-memory by default, we
# need to update to use its testsuite.
'tests/linking': [FAIL],
}], # ALWAYS
['arch == mipsel or arch == mips64el or arch == mips or arch == mips64', {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment