Commit 4084185b authored by Ben Smith's avatar Ben Smith Committed by Commit Bot

[wasm] Implement memory.init and memory.drop

`memory.init` copies bytes from a passive data segment to linear memory.

`memory.drop` is an instruction that informs the wasm VM that the instance no
longer needs access to the passive data segment.

Information about the passive data segments, including their contents, length,
and whether they are dropped, is stored in the `WasmInstanceObject` as primitive
arrays.

Bug: v8:7747
Change-Id: I1515c8868c9be227743456a539126c15280b5365
Reviewed-on: https://chromium-review.googlesource.com/c/1370691Reviewed-by: 's avatarClemens Hammacher <clemensh@chromium.org>
Commit-Queue: Ben Smith <binji@chromium.org>
Cr-Commit-Position: refs/heads/master@{#58196}
parent 972c2902
......@@ -1257,6 +1257,7 @@ namespace internal {
TFS(ThrowWasmTrapFloatUnrepresentable) \
TFS(ThrowWasmTrapFuncInvalid) \
TFS(ThrowWasmTrapFuncSigMismatch) \
TFS(ThrowWasmTrapDataSegmentDropped) \
TFC(BigIntToWasmI64, BigIntToWasmI64, 1) \
\
/* WeakMap */ \
......
......@@ -3284,32 +3284,37 @@ Node* WasmGraphBuilder::BoundsCheckMem(uint8_t access_size, Node* index,
return index;
}
Node* WasmGraphBuilder::BoundsCheckMemRange(Node* start, Node* size,
wasm::WasmCodePosition position) {
// TODO(binji): Support trap handler.
// Check that the range [start, start + size) is in the range [0, max).
void WasmGraphBuilder::BoundsCheckRange(Node* start, Node* size, Node* max,
wasm::WasmCodePosition position) {
// The accessed memory is [start, end), where {end} is {start + size}. We
// want to check that {start + size <= max}, making sure that {start + size}
// doesn't overflow. This can be expressed as {start <= max - size} as long
// as {max - size} isn't negative, which is true if {size <= max}.
auto m = mcgraph()->machine();
if (!FLAG_wasm_no_bounds_checks) {
// The accessed memory is [start, end), where {end} is {start + size}.
// We want to check that {start + size <= mem_size}, making sure that
// {start + size} doesn't overflow. This can be expressed as
// {start <= mem_size - size} as long as {mem_size - size} isn't negative,
// which is true if {size <= mem_size}.
Node* mem_size = instance_cache_->mem_size;
Node* cond = graph()->NewNode(m->Uint32LessThanOrEqual(), size, mem_size);
TrapIfFalse(wasm::kTrapMemOutOfBounds, cond, position);
Node* cond = graph()->NewNode(m->Uint32LessThanOrEqual(), size, max);
TrapIfFalse(wasm::kTrapMemOutOfBounds, cond, position);
// This produces a positive number, since {size <= mem_size}.
Node* effective_size = graph()->NewNode(m->Int32Sub(), mem_size, size);
// This produces a positive number, since {size <= max}.
Node* effective_size = graph()->NewNode(m->Int32Sub(), max, size);
// Introduce the actual bounds check.
Node* check =
graph()->NewNode(m->Uint32LessThanOrEqual(), start, effective_size);
TrapIfFalse(wasm::kTrapMemOutOfBounds, check, position);
// Introduce the actual bounds check.
Node* check =
graph()->NewNode(m->Uint32LessThanOrEqual(), start, effective_size);
TrapIfFalse(wasm::kTrapMemOutOfBounds, check, position);
// TODO(binji): Does this need addtional untrusted_code_mitigations_ mask
// like BoundsCheckMem above?
// TODO(binji): Does this need addtional untrusted_code_mitigations_ mask
// like BoundsCheckMem above?
}
Node* WasmGraphBuilder::BoundsCheckMemRange(Node* start, Node* size,
wasm::WasmCodePosition position) {
// TODO(binji): Support trap handler.
if (!FLAG_wasm_no_bounds_checks) {
BoundsCheckRange(start, size, instance_cache_->mem_size, position);
}
return graph()->NewNode(m->IntAdd(), MemBuffer(0), Uint32ToUintptr(start));
return graph()->NewNode(mcgraph()->machine()->IntAdd(), MemBuffer(0),
Uint32ToUintptr(start));
}
const Operator* WasmGraphBuilder::GetSafeLoadOperator(int offset,
......@@ -4232,6 +4237,81 @@ Node* WasmGraphBuilder::AtomicOp(wasm::WasmOpcode opcode, Node* const* inputs,
#undef ATOMIC_LOAD_LIST
#undef ATOMIC_STORE_LIST
Node* WasmGraphBuilder::CheckDataSegmentIsPassiveAndNotDropped(
uint32_t data_segment_index, wasm::WasmCodePosition position) {
// The data segment index must be in bounds since it is required by
// validation.
DCHECK_LT(data_segment_index, env_->module->num_declared_data_segments);
Node* dropped_data_segments =
LOAD_INSTANCE_FIELD(DroppedDataSegments, MachineType::Pointer());
Node* is_segment_dropped = SetEffect(graph()->NewNode(
mcgraph()->machine()->Load(MachineType::Uint8()), dropped_data_segments,
mcgraph()->IntPtrConstant(data_segment_index), Effect(), Control()));
TrapIfTrue(wasm::kTrapDataSegmentDropped, is_segment_dropped, position);
return dropped_data_segments;
}
Node* WasmGraphBuilder::MemoryInit(uint32_t data_segment_index, Node* dst,
Node* src, Node* size,
wasm::WasmCodePosition position) {
CheckDataSegmentIsPassiveAndNotDropped(data_segment_index, position);
dst = BoundsCheckMemRange(dst, size, position);
MachineOperatorBuilder* m = mcgraph()->machine();
Node* seg_index = Uint32Constant(data_segment_index);
{
// Load segment size from WasmInstanceObject::data_segment_sizes.
Node* seg_size_array =
LOAD_INSTANCE_FIELD(DataSegmentSizes, MachineType::Pointer());
STATIC_ASSERT(wasm::kV8MaxWasmDataSegments <= kMaxUInt32 >> 2);
Node* scaled_index = Uint32ToUintptr(
graph()->NewNode(m->Word32Shl(), seg_index, Int32Constant(2)));
Node* seg_size = SetEffect(graph()->NewNode(m->Load(MachineType::Uint32()),
seg_size_array, scaled_index,
Effect(), Control()));
// Bounds check the src index against the segment size.
BoundsCheckRange(src, size, seg_size, position);
}
{
// Load segment's base pointer from WasmInstanceObject::data_segment_starts.
Node* seg_start_array =
LOAD_INSTANCE_FIELD(DataSegmentStarts, MachineType::Pointer());
STATIC_ASSERT(wasm::kV8MaxWasmDataSegments <= kMaxUInt32 >>
kPointerSizeLog2);
Node* scaled_index = Uint32ToUintptr(graph()->NewNode(
m->Word32Shl(), seg_index, Int32Constant(kPointerSizeLog2)));
Node* seg_start = SetEffect(
graph()->NewNode(m->Load(MachineType::Pointer()), seg_start_array,
scaled_index, Effect(), Control()));
// Convert src index to pointer.
src = graph()->NewNode(m->IntAdd(), seg_start, Uint32ToUintptr(src));
}
Node* function = graph()->NewNode(mcgraph()->common()->ExternalConstant(
ExternalReference::wasm_memory_copy()));
MachineType sig_types[] = {MachineType::Pointer(), MachineType::Pointer(),
MachineType::Uint32()};
MachineSignature sig(0, 3, sig_types);
return BuildCCall(&sig, function, dst, src, size);
}
Node* WasmGraphBuilder::MemoryDrop(uint32_t data_segment_index,
wasm::WasmCodePosition position) {
Node* dropped_data_segments =
CheckDataSegmentIsPassiveAndNotDropped(data_segment_index, position);
const Operator* store_op = mcgraph()->machine()->Store(
StoreRepresentation(MachineRepresentation::kWord8, kNoWriteBarrier));
return SetEffect(
graph()->NewNode(store_op, dropped_data_segments,
mcgraph()->IntPtrConstant(data_segment_index),
mcgraph()->Int32Constant(1), Effect(), Control()));
}
Node* WasmGraphBuilder::MemoryCopy(Node* dst, Node* src, Node* size,
wasm::WasmCodePosition position) {
dst = BoundsCheckMemRange(dst, size, position);
......
......@@ -350,6 +350,14 @@ class WasmGraphBuilder {
uint32_t alignment, uint32_t offset,
wasm::WasmCodePosition position);
// Returns a pointer to the dropped_data_segments array. Traps if the data
// segment is active or has been dropped.
Node* CheckDataSegmentIsPassiveAndNotDropped(uint32_t data_segment_index,
wasm::WasmCodePosition position);
Node* MemoryInit(uint32_t data_segment_index, Node* dst, Node* src,
Node* size, wasm::WasmCodePosition position);
Node* MemoryDrop(uint32_t data_segment_index,
wasm::WasmCodePosition position);
Node* MemoryCopy(Node* dst, Node* src, Node* size,
wasm::WasmCodePosition position);
Node* MemoryFill(Node* dst, Node* fill, Node* size,
......@@ -407,9 +415,12 @@ class WasmGraphBuilder {
// BoundsCheckMem receives a uint32 {index} node and returns a ptrsize index.
Node* BoundsCheckMem(uint8_t access_size, Node* index, uint32_t offset,
wasm::WasmCodePosition, EnforceBoundsCheck);
// BoundsCheckMemRange receives a uint32 {index} and {size} and returns
// Check that the range [start, start + size) is in the range [0, max).
void BoundsCheckRange(Node* start, Node* size, Node* max,
wasm::WasmCodePosition);
// BoundsCheckMemRange receives a uint32 {start} and {size} and returns
// a pointer into memory at that index, if it is in bounds.
Node* BoundsCheckMemRange(Node* index, Node* size, wasm::WasmCodePosition);
Node* BoundsCheckMemRange(Node* start, Node* size, wasm::WasmCodePosition);
Node* CheckBoundsAndAlignment(uint8_t access_size, Node* index,
uint32_t offset, wasm::WasmCodePosition);
Node* Uint32ToUintptr(Node*);
......
......@@ -1674,7 +1674,8 @@ enum class LoadSensitivity {
V(TrapRemByZero) \
V(TrapFloatUnrepresentable) \
V(TrapFuncInvalid) \
V(TrapFuncSigMismatch)
V(TrapFuncSigMismatch) \
V(TrapDataSegmentDropped)
enum KeyedAccessLoadMode {
STANDARD_LOAD,
......
......@@ -510,6 +510,7 @@ namespace internal {
T(WasmTrapFuncInvalid, "invalid index into function table") \
T(WasmTrapFuncSigMismatch, "function signature mismatch") \
T(WasmTrapTypeError, "wasm function signature contains illegal type") \
T(WasmTrapDataSegmentDropped, "data segment has been dropped") \
T(WasmExceptionError, "wasm exception") \
/* Asm.js validation related */ \
T(AsmJsInvalid, "Invalid asm.js: %") \
......
......@@ -1845,8 +1845,8 @@ class LiftoffCompiler {
unsupported(decoder, "atomicop");
}
void MemoryInit(FullDecoder* decoder,
const MemoryInitImmediate<validate>& imm,
Vector<Value> args) {
const MemoryInitImmediate<validate>& imm, const Value& dst,
const Value& src, const Value& size) {
unsupported(decoder, "memory.init");
}
void MemoryDrop(FullDecoder* decoder,
......
......@@ -730,7 +730,8 @@ struct ControlWithNamedConstructors : public ControlBase<Value> {
F(CatchAll, Control* block) \
F(AtomicOp, WasmOpcode opcode, Vector<Value> args, \
const MemoryAccessImmediate<validate>& imm, Value* result) \
F(MemoryInit, const MemoryInitImmediate<validate>& imm, Vector<Value> args) \
F(MemoryInit, const MemoryInitImmediate<validate>& imm, const Value& dst, \
const Value& src, const Value& size) \
F(MemoryDrop, const MemoryDropImmediate<validate>& imm) \
F(MemoryCopy, const MemoryIndexImmediate<validate>& imm, const Value& dst, \
const Value& src, const Value& size) \
......@@ -2507,8 +2508,10 @@ class WasmFullDecoder : public WasmDecoder<validate> {
MemoryInitImmediate<validate> imm(this, this->pc_);
if (!this->Validate(imm)) break;
len += imm.length;
PopArgs(sig);
CALL_INTERFACE_IF_REACHABLE(MemoryInit, imm, VectorOf(args_));
auto size = Pop(2, sig->GetParam(2));
auto src = Pop(1, sig->GetParam(1));
auto dst = Pop(0, sig->GetParam(0));
CALL_INTERFACE_IF_REACHABLE(MemoryInit, imm, dst, src, size);
break;
}
case kExprMemoryDrop: {
......
......@@ -507,13 +507,14 @@ class WasmGraphBuildingInterface {
}
void MemoryInit(FullDecoder* decoder,
const MemoryInitImmediate<validate>& imm,
Vector<Value> args) {
BUILD(Unreachable, decoder->position());
const MemoryInitImmediate<validate>& imm, const Value& dst,
const Value& src, const Value& size) {
BUILD(MemoryInit, imm.data_segment_index, dst.node, src.node, size.node,
decoder->position());
}
void MemoryDrop(FullDecoder* decoder,
const MemoryDropImmediate<validate>& imm) {
BUILD(Unreachable, decoder->position());
BUILD(MemoryDrop, imm.index, decoder->position());
}
void MemoryCopy(FullDecoder* decoder,
const MemoryIndexImmediate<validate>& imm, const Value& dst,
......
......@@ -183,6 +183,12 @@ PRIMITIVE_ACCESSORS(WasmInstanceObject, indirect_function_table_targets,
Address*, kIndirectFunctionTableTargetsOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, jump_table_start, Address,
kJumpTableStartOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, data_segment_starts, Address*,
kDataSegmentStartsOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, data_segment_sizes, uint32_t*,
kDataSegmentSizesOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, dropped_data_segments, byte*,
kDroppedDataSegmentsOffset)
ACCESSORS2(WasmInstanceObject, module_object, WasmModuleObject,
kModuleObjectOffset)
......
......@@ -65,13 +65,21 @@ class WasmInstanceNativeAllocations {
// Allocates initial native storage for a given instance.
WasmInstanceNativeAllocations(Handle<WasmInstanceObject> instance,
size_t num_imported_functions,
size_t num_imported_mutable_globals) {
size_t num_imported_mutable_globals,
size_t num_data_segments) {
SET(instance, imported_function_targets,
reinterpret_cast<Address*>(
calloc(num_imported_functions, sizeof(Address))));
SET(instance, imported_mutable_globals,
reinterpret_cast<Address*>(
calloc(num_imported_mutable_globals, sizeof(Address))));
SET(instance, data_segment_starts,
reinterpret_cast<Address*>(calloc(num_data_segments, sizeof(Address))));
SET(instance, data_segment_sizes,
reinterpret_cast<uint32_t*>(
calloc(num_data_segments, sizeof(uint32_t))));
SET(instance, dropped_data_segments,
reinterpret_cast<uint8_t*>(calloc(num_data_segments, sizeof(uint8_t))));
}
~WasmInstanceNativeAllocations() {
::free(indirect_function_table_sig_ids_);
......@@ -82,6 +90,12 @@ class WasmInstanceNativeAllocations {
imported_function_targets_ = nullptr;
::free(imported_mutable_globals_);
imported_mutable_globals_ = nullptr;
::free(data_segment_starts_);
data_segment_starts_ = nullptr;
::free(data_segment_sizes_);
data_segment_sizes_ = nullptr;
::free(dropped_data_segments_);
dropped_data_segments_ = nullptr;
}
// Resizes the indirect function table.
void resize_indirect_function_table(Isolate* isolate,
......@@ -123,13 +137,18 @@ class WasmInstanceNativeAllocations {
Address* indirect_function_table_targets_ = nullptr;
Address* imported_function_targets_ = nullptr;
Address* imported_mutable_globals_ = nullptr;
Address* data_segment_starts_ = nullptr;
uint32_t* data_segment_sizes_ = nullptr;
uint8_t* dropped_data_segments_ = nullptr;
#undef SET
};
size_t EstimateNativeAllocationsSize(const WasmModule* module) {
size_t estimate = sizeof(WasmInstanceNativeAllocations) +
(1 * kPointerSize * module->num_imported_mutable_globals) +
(2 * kPointerSize * module->num_imported_functions);
(2 * kPointerSize * module->num_imported_functions) +
((kPointerSize + sizeof(uint32_t) + sizeof(uint8_t)) *
module->num_declared_data_segments);
for (auto& table : module->tables) {
estimate += 3 * kPointerSize * table.initial_size;
}
......@@ -1268,10 +1287,11 @@ Handle<WasmInstanceObject> WasmInstanceObject::New(
auto module = module_object->module();
auto num_imported_functions = module->num_imported_functions;
auto num_imported_mutable_globals = module->num_imported_mutable_globals;
auto num_data_segments = module->num_declared_data_segments;
size_t native_allocations_size = EstimateNativeAllocationsSize(module);
auto native_allocations = Managed<WasmInstanceNativeAllocations>::Allocate(
isolate, native_allocations_size, instance, num_imported_functions,
num_imported_mutable_globals);
num_imported_mutable_globals, num_data_segments);
instance->set_managed_native_allocations(*native_allocations);
Handle<FixedArray> imported_function_refs =
......@@ -1306,9 +1326,39 @@ Handle<WasmInstanceObject> WasmInstanceObject::New(
isolate, weak_instance_list, MaybeObjectHandle::Weak(instance));
module_object->set_weak_instance_list(*weak_instance_list);
InitDataSegmentArrays(instance, module_object);
return instance;
}
// static
void WasmInstanceObject::InitDataSegmentArrays(
Handle<WasmInstanceObject> instance,
Handle<WasmModuleObject> module_object) {
auto module = module_object->module();
auto wire_bytes = module_object->native_module()->wire_bytes();
auto num_data_segments = module->num_declared_data_segments;
// The number of declared data segments will be zero if there is no DataCount
// section. These arrays will not be allocated nor initialized in that case,
// since they cannot be used (since the validator checks that number of
// declared data segments when validating the memory.init and memory.drop
// instructions).
DCHECK(num_data_segments == 0 ||
num_data_segments == module->data_segments.size());
for (size_t i = 0; i < num_data_segments; ++i) {
const wasm::WasmDataSegment& segment = module->data_segments[i];
// Set the active segments to being already dropped, since memory.init on
// a dropped passive segment and an active segment have the same
// behavior.
instance->dropped_data_segments()[i] = segment.active ? 1 : 0;
// Initialize the pointer and size of passive segments.
instance->data_segment_starts()[i] =
reinterpret_cast<Address>(&wire_bytes[segment.source.offset()]);
instance->data_segment_sizes()[i] = segment.source.length();
}
}
Address WasmInstanceObject::GetCallTarget(uint32_t func_index) {
wasm::NativeModule* native_module = module_object()->native_module();
if (func_index < native_module->num_imported_functions()) {
......
......@@ -418,6 +418,9 @@ class WasmInstanceObject : public JSObject {
DECL_PRIMITIVE_ACCESSORS(indirect_function_table_sig_ids, uint32_t*)
DECL_PRIMITIVE_ACCESSORS(indirect_function_table_targets, Address*)
DECL_PRIMITIVE_ACCESSORS(jump_table_start, Address)
DECL_PRIMITIVE_ACCESSORS(data_segment_starts, Address*)
DECL_PRIMITIVE_ACCESSORS(data_segment_sizes, uint32_t*)
DECL_PRIMITIVE_ACCESSORS(dropped_data_segments, byte*)
// Dispatched behavior.
DECL_PRINTER(WasmInstanceObject)
......@@ -453,6 +456,9 @@ class WasmInstanceObject : public JSObject {
V(kIndirectFunctionTableSigIdsOffset, kPointerSize) /* untagged */ \
V(kIndirectFunctionTableTargetsOffset, kPointerSize) /* untagged */ \
V(kJumpTableStartOffset, kPointerSize) /* untagged */ \
V(kDataSegmentStartsOffset, kPointerSize) /* untagged */ \
V(kDataSegmentSizesOffset, kPointerSize) /* untagged */ \
V(kDroppedDataSegmentsOffset, kPointerSize) /* untagged */ \
V(kIndirectFunctionTableSizeOffset, kUInt32Size) /* untagged */ \
V(k64BitArchPaddingOffset, kPointerSize - kUInt32Size) /* padding */ \
V(kSize, 0)
......@@ -482,6 +488,10 @@ class WasmInstanceObject : public JSObject {
class BodyDescriptor;
OBJECT_CONSTRUCTORS(WasmInstanceObject, JSObject)
private:
static void InitDataSegmentArrays(Handle<WasmInstanceObject>,
Handle<WasmModuleObject>);
};
// Representation of WebAssembly.Exception JavaScript-level object.
......
......@@ -37,6 +37,148 @@ function assertBufferContents(buf, expected) {
}
}
function getMemoryInit(mem, segment_data) {
const builder = new WasmModuleBuilder();
builder.addImportedMemory("", "mem", 0);
builder.addPassiveDataSegment(segment_data);
builder.addFunction('init', kSig_v_iii)
.addBody([
kExprGetLocal, 0, // Dest.
kExprGetLocal, 1, // Source.
kExprGetLocal, 2, // Size in bytes.
kNumericPrefix, kExprMemoryInit,
0, // Memory index.
0, // Data segment index.
])
.exportAs('init');
return builder.instantiate({'': {mem}}).exports.init;
}
(function TestMemoryInit() {
const mem = new WebAssembly.Memory({initial: 1});
const memoryInit = getMemoryInit(mem, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
const u8a = new Uint8Array(mem.buffer);
// All zeroes.
assertBufferContents(u8a, []);
// Copy all bytes from data segment 0, to memory at [10, 20).
memoryInit(10, 0, 10);
assertBufferContents(u8a, [0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
// Copy bytes in range [5, 10) from data segment 0, to memory at [0, 5).
memoryInit(0, 5, 5);
assertBufferContents(u8a, [5, 6, 7, 8, 9, 0, 0, 0, 0, 0,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
})();
(function TestMemoryInitOutOfBounds() {
const mem = new WebAssembly.Memory({initial: 1});
// Create a data segment that has a length of kPageSize.
const memoryInit = getMemoryInit(mem, new Array(kPageSize));
// OK, copy the full data segment to memory.
memoryInit(0, 0, kPageSize);
// Source range must not be out of bounds.
assertTraps(kTrapMemOutOfBounds, () => memoryInit(0, 1, kPageSize));
assertTraps(kTrapMemOutOfBounds, () => memoryInit(0, 1000, kPageSize));
assertTraps(kTrapMemOutOfBounds, () => memoryInit(0, kPageSize, 1));
// Destination range must not be out of bounds.
assertTraps(kTrapMemOutOfBounds, () => memoryInit(1, 0, kPageSize));
assertTraps(kTrapMemOutOfBounds, () => memoryInit(1000, 0, kPageSize));
assertTraps(kTrapMemOutOfBounds, () => memoryInit(kPageSize, 0, 1));
// Make sure bounds aren't checked with 32-bit wrapping.
assertTraps(kTrapMemOutOfBounds, () => memoryInit(1, 1, -1));
mem.grow(1);
// Works properly after grow.
memoryInit(kPageSize, 0, 1000);
// Traps at new boundary.
assertTraps(
kTrapMemOutOfBounds, () => memoryInit(kPageSize + 1, 0, kPageSize));
})();
(function TestMemoryInitOnActiveSegment() {
const builder = new WasmModuleBuilder();
builder.addMemory(1);
builder.addPassiveDataSegment([1, 2, 3]);
builder.addDataSegment(0, [4, 5, 6]);
builder.addFunction('init', kSig_v_v)
.addBody([
kExprI32Const, 0, // Dest.
kExprI32Const, 0, // Source.
kExprI32Const, 0, // Size in bytes.
kNumericPrefix, kExprMemoryInit,
0, // Memory index.
1, // Data segment index.
])
.exportAs('init');
// Instantiation succeeds, because using memory.init with an active segment
// is a trap, not a validation error.
const instance = builder.instantiate();
assertTraps(kTrapDataSegmentDropped, () => instance.exports.init());
})();
(function TestMemoryInitOnDroppedSegment() {
const builder = new WasmModuleBuilder();
builder.addMemory(1);
builder.addPassiveDataSegment([1, 2, 3]);
builder.addFunction('init', kSig_v_v)
.addBody([
kExprI32Const, 0, // Dest.
kExprI32Const, 0, // Source.
kExprI32Const, 0, // Size in bytes.
kNumericPrefix, kExprMemoryInit,
0, // Memory index.
0, // Data segment index.
])
.exportAs('init');
builder.addFunction('drop', kSig_v_v)
.addBody([
kNumericPrefix, kExprMemoryDrop,
0, // Data segment index.
])
.exportAs('drop');
// Instantiation succeeds, because using memory.init with an active segment
// is a trap, not a validation error.
const instance = builder.instantiate();
// OK, segment hasn't been dropped.
instance.exports.init();
instance.exports.drop();
// After segment has been dropped, memory.init and memory.drop fail.
assertTraps(kTrapDataSegmentDropped, () => instance.exports.init());
assertTraps(kTrapDataSegmentDropped, () => instance.exports.drop());
})();
(function TestMemoryDropOnActiveSegment() {
const builder = new WasmModuleBuilder();
builder.addMemory(1);
builder.addPassiveDataSegment([1, 2, 3]);
builder.addDataSegment(0, [4, 5, 6]);
builder.addFunction('drop', kSig_v_v)
.addBody([
kNumericPrefix, kExprMemoryDrop,
1, // Data segment index.
])
.exportAs('drop');
const instance = builder.instantiate();
assertTraps(kTrapDataSegmentDropped, () => instance.exports.drop());
})();
function getMemoryCopy(mem) {
const builder = new WasmModuleBuilder();
builder.addImportedMemory("", "mem", 0);
......
......@@ -65,6 +65,7 @@ let kElementSectionCode = 9; // Elements section
let kCodeSectionCode = 10; // Function code
let kDataSectionCode = 11; // Data segments
let kExceptionSectionCode = 12; // Exception section (between Global & Export)
let kDataCountSectionCode = 13; // Data segments
// Name section types
let kModuleNameCode = 0;
......@@ -444,6 +445,7 @@ let kTrapFuncInvalid = 6;
let kTrapFuncSigMismatch = 7;
let kTrapTypeError = 8;
let kTrapUnalignedAccess = 9;
let kTrapDataSegmentDropped = 10;
let kTrapMsgs = [
"unreachable",
......@@ -455,7 +457,8 @@ let kTrapMsgs = [
"invalid index into function table",
"function signature mismatch",
"wasm function signature contains illegal type",
"operation does not support unaligned accesses"
"operation does not support unaligned accesses",
"data segment has been dropped"
];
function assertTraps(trap, code) {
......
......@@ -579,6 +579,13 @@ class WasmModuleBuilder {
});
}
// If there are any passive data segments, add the DataCount section.
if (wasm.data_segments.some(seg => !seg.is_active)) {
binary.emit_section(kDataCountSectionCode, section => {
section.emit_u32v(wasm.data_segments.length);
});
}
// Add function bodies.
if (wasm.functions.length > 0) {
// emit function bodies
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment