Commit e1514799 authored by Ben Smith's avatar Ben Smith Committed by Commit Bot

[wasm] Implement memory.copy and memory.fill

This implementation currently only supports the optimized tier.

Bug: v8:7747
Change-Id: Ia1af29b11a5d3e8a48b122f6cf3240c9f5948bfb
Reviewed-on: https://chromium-review.googlesource.com/c/1364710Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Reviewed-by: 's avatarClemens Hammacher <clemensh@chromium.org>
Commit-Queue: Ben Smith <binji@chromium.org>
Cr-Commit-Position: refs/heads/master@{#58137}
parent 44b1b245
......@@ -3284,6 +3284,34 @@ Node* WasmGraphBuilder::BoundsCheckMem(uint8_t access_size, Node* index,
return index;
}
Node* WasmGraphBuilder::BoundsCheckMemRange(Node* start, Node* size,
wasm::WasmCodePosition position) {
// TODO(binji): Support trap handler.
auto m = mcgraph()->machine();
if (!FLAG_wasm_no_bounds_checks) {
// The accessed memory is [start, end), where {end} is {start + size}.
// We want to check that {start + size <= mem_size}, making sure that
// {start + size} doesn't overflow. This can be expressed as
// {start <= mem_size - size} as long as {mem_size - size} isn't negative,
// which is true if {size <= mem_size}.
Node* mem_size = instance_cache_->mem_size;
Node* cond = graph()->NewNode(m->Uint32LessThanOrEqual(), size, mem_size);
TrapIfFalse(wasm::kTrapMemOutOfBounds, cond, position);
// This produces a positive number, since {size <= mem_size}.
Node* effective_size = graph()->NewNode(m->Int32Sub(), mem_size, size);
// Introduce the actual bounds check.
Node* check =
graph()->NewNode(m->Uint32LessThanOrEqual(), start, effective_size);
TrapIfFalse(wasm::kTrapMemOutOfBounds, check, position);
// TODO(binji): Does this need addtional untrusted_code_mitigations_ mask
// like BoundsCheckMem above?
}
return graph()->NewNode(m->IntAdd(), MemBuffer(0), Uint32ToUintptr(start));
}
const Operator* WasmGraphBuilder::GetSafeLoadOperator(int offset,
wasm::ValueType type) {
int alignment = offset % (wasm::ValueTypes::ElementSizeInBytes(type));
......@@ -4204,6 +4232,29 @@ Node* WasmGraphBuilder::AtomicOp(wasm::WasmOpcode opcode, Node* const* inputs,
#undef ATOMIC_LOAD_LIST
#undef ATOMIC_STORE_LIST
Node* WasmGraphBuilder::MemoryCopy(Node* dst, Node* src, Node* size,
wasm::WasmCodePosition position) {
dst = BoundsCheckMemRange(dst, size, position);
src = BoundsCheckMemRange(src, size, position);
Node* function = graph()->NewNode(mcgraph()->common()->ExternalConstant(
ExternalReference::wasm_memory_copy()));
MachineType sig_types[] = {MachineType::Pointer(), MachineType::Pointer(),
MachineType::Uint32()};
MachineSignature sig(0, 3, sig_types);
return BuildCCall(&sig, function, dst, src, size);
}
Node* WasmGraphBuilder::MemoryFill(Node* dst, Node* value, Node* size,
wasm::WasmCodePosition position) {
dst = BoundsCheckMemRange(dst, size, position);
Node* function = graph()->NewNode(mcgraph()->common()->ExternalConstant(
ExternalReference::wasm_memory_fill()));
MachineType sig_types[] = {MachineType::Pointer(), MachineType::Uint32(),
MachineType::Uint32()};
MachineSignature sig(0, 3, sig_types);
return BuildCCall(&sig, function, dst, value, size);
}
class WasmDecorator final : public GraphDecorator {
public:
explicit WasmDecorator(NodeOriginTable* origins, wasm::Decoder* decoder)
......
......@@ -349,6 +349,11 @@ class WasmGraphBuilder {
uint32_t alignment, uint32_t offset,
wasm::WasmCodePosition position);
Node* MemoryCopy(Node* dst, Node* src, Node* size,
wasm::WasmCodePosition position);
Node* MemoryFill(Node* dst, Node* fill, Node* size,
wasm::WasmCodePosition position);
bool has_simd() const { return has_simd_; }
const wasm::WasmModule* module() { return env_ ? env_->module : nullptr; }
......@@ -401,6 +406,9 @@ class WasmGraphBuilder {
// BoundsCheckMem receives a uint32 {index} node and returns a ptrsize index.
Node* BoundsCheckMem(uint8_t access_size, Node* index, uint32_t offset,
wasm::WasmCodePosition, EnforceBoundsCheck);
// BoundsCheckMemRange receives a uint32 {index} and {size} and returns
// a pointer into memory at that index, if it is in bounds.
Node* BoundsCheckMemRange(Node* index, Node* size, wasm::WasmCodePosition);
Node* CheckBoundsAndAlignment(uint8_t access_size, Node* index,
uint32_t offset, wasm::WasmCodePosition);
Node* Uint32ToUintptr(Node*);
......
......@@ -307,6 +307,8 @@ FUNCTION_REFERENCE(wasm_word32_popcnt, wasm::word32_popcnt_wrapper)
FUNCTION_REFERENCE(wasm_word64_popcnt, wasm::word64_popcnt_wrapper)
FUNCTION_REFERENCE(wasm_word32_rol, wasm::word32_rol_wrapper)
FUNCTION_REFERENCE(wasm_word32_ror, wasm::word32_ror_wrapper)
FUNCTION_REFERENCE(wasm_memory_copy, wasm::memory_copy_wrapper)
FUNCTION_REFERENCE(wasm_memory_fill, wasm::memory_fill_wrapper)
static void f64_acos_wrapper(Address data) {
double input = ReadUnalignedValue<double>(data);
......
......@@ -183,6 +183,8 @@ class StatsCounter;
V(wasm_word32_ror, "wasm::word32_ror") \
V(wasm_word64_ctz, "wasm::word64_ctz") \
V(wasm_word64_popcnt, "wasm::word64_popcnt") \
V(wasm_memory_copy, "wasm::memory_copy") \
V(wasm_memory_fill, "wasm::memory_fill") \
V(call_enqueue_microtask_function, "MicrotaskQueue::CallEnqueueMicrotask") \
V(atomic_pair_load_function, "atomic_pair_load_function") \
V(atomic_pair_store_function, "atomic_pair_store_function") \
......
......@@ -1865,13 +1865,13 @@ class LiftoffCompiler {
unsupported(decoder, "memory.drop");
}
void MemoryCopy(FullDecoder* decoder,
const MemoryIndexImmediate<validate>& imm,
Vector<Value> args) {
const MemoryIndexImmediate<validate>& imm, const Value& dst,
const Value& src, const Value& size) {
unsupported(decoder, "memory.copy");
}
void MemoryFill(FullDecoder* decoder,
const MemoryIndexImmediate<validate>& imm,
Vector<Value> args) {
const MemoryIndexImmediate<validate>& imm, const Value& dst,
const Value& value, const Value& size) {
unsupported(decoder, "memory.fill");
}
void TableInit(FullDecoder* decoder, const TableInitImmediate<validate>& imm,
......
This diff is collapsed.
......@@ -512,14 +512,14 @@ class WasmGraphBuildingInterface {
BUILD(Unreachable, decoder->position());
}
void MemoryCopy(FullDecoder* decoder,
const MemoryIndexImmediate<validate>& imm,
Vector<Value> args) {
BUILD(Unreachable, decoder->position());
const MemoryIndexImmediate<validate>& imm, const Value& dst,
const Value& src, const Value& size) {
BUILD(MemoryCopy, dst.node, src.node, size.node, decoder->position());
}
void MemoryFill(FullDecoder* decoder,
const MemoryIndexImmediate<validate>& imm,
Vector<Value> args) {
BUILD(Unreachable, decoder->position());
const MemoryIndexImmediate<validate>& imm, const Value& dst,
const Value& value, const Value& size) {
BUILD(MemoryFill, dst.node, value.node, size.node, decoder->position());
}
void TableInit(FullDecoder* decoder, const TableInitImmediate<validate>& imm,
Vector<Value> args) {
......
......@@ -10,6 +10,7 @@
#include "include/v8config.h"
#include "src/base/bits.h"
#include "src/memcopy.h"
#include "src/utils.h"
#include "src/v8memory.h"
#include "src/wasm/wasm-external-refs.h"
......@@ -247,6 +248,14 @@ void float64_pow_wrapper(Address data) {
WriteUnalignedValue<double>(data, Pow(x, y));
}
void memory_copy_wrapper(Address dst, Address src, uint32_t size) {
MemMove(reinterpret_cast<void*>(dst), reinterpret_cast<void*>(src), size);
}
void memory_fill_wrapper(Address dst, uint32_t value, uint32_t size) {
memset(reinterpret_cast<void*>(dst), value, size);
}
static WasmTrapCallbackForTesting wasm_trap_callback_for_testing = nullptr;
void set_trap_callback_for_testing(WasmTrapCallbackForTesting callback) {
......
......@@ -67,6 +67,10 @@ uint32_t word32_ror_wrapper(Address data);
void float64_pow_wrapper(Address data);
void memory_copy_wrapper(Address dst, Address src, uint32_t size);
void memory_fill_wrapper(Address dst, uint32_t value, uint32_t size);
typedef void (*WasmTrapCallbackForTesting)();
void set_trap_callback_for_testing(WasmTrapCallbackForTesting callback);
......
......@@ -14,7 +14,7 @@ load("test/mjsunit/wasm/wasm-module-builder.js");
builder.addPassiveDataSegment([3, 4]);
// Should not throw.
const module = builder.instantiate();
builder.instantiate();
})();
(function TestPassiveElementSegment() {
......@@ -25,5 +25,156 @@ load("test/mjsunit/wasm/wasm-module-builder.js");
builder.addPassiveElementSegment([0, 0]);
// Should not throw.
const module = builder.instantiate();
builder.instantiate();
})();
function assertBufferContents(buf, expected) {
for (let i = 0; i < expected.length; ++i) {
assertEquals(expected[i], buf[i]);
}
for (let i = expected.length; i < buf.length; ++i) {
assertEquals(0, buf[i]);
}
}
function getMemoryCopy(mem) {
const builder = new WasmModuleBuilder();
builder.addImportedMemory("", "mem", 0);
builder.addFunction("copy", kSig_v_iii).addBody([
kExprGetLocal, 0, // Dest.
kExprGetLocal, 1, // Source.
kExprGetLocal, 2, // Size in bytes.
kNumericPrefix, kExprMemoryCopy, 0,
]).exportAs("copy");
return builder.instantiate({'': {mem}}).exports.copy;
}
(function TestMemoryCopy() {
const mem = new WebAssembly.Memory({initial: 1});
const memoryCopy = getMemoryCopy(mem);
const u8a = new Uint8Array(mem.buffer);
u8a.set([0, 11, 22, 33, 44, 55, 66, 77]);
memoryCopy(10, 1, 8);
assertBufferContents(u8a, [0, 11, 22, 33, 44, 55, 66, 77, 0, 0,
11, 22, 33, 44, 55, 66, 77]);
// Copy 0 bytes does nothing.
memoryCopy(10, 1, 0);
assertBufferContents(u8a, [0, 11, 22, 33, 44, 55, 66, 77, 0, 0,
11, 22, 33, 44, 55, 66, 77]);
})();
(function TestMemoryCopyOverlapping() {
const mem = new WebAssembly.Memory({initial: 1});
const memoryCopy = getMemoryCopy(mem);
const u8a = new Uint8Array(mem.buffer);
u8a.set([10, 20, 30]);
// Copy from [0, 3] -> [2, 5]. The copy must not overwrite 30 before copying
// it (i.e. cannot copy forward in this case).
memoryCopy(2, 0, 3);
assertBufferContents(u8a, [10, 20, 10, 20, 30]);
// Copy from [2, 5] -> [0, 3]. The copy must not write the first 10 (i.e.
// cannot copy backward in this case).
memoryCopy(0, 2, 3);
assertBufferContents(u8a, [10, 20, 30, 20, 30]);
})();
(function TestMemoryCopyOutOfBounds() {
const mem = new WebAssembly.Memory({initial: 1});
const memoryCopy = getMemoryCopy(mem);
memoryCopy(0, 0, kPageSize);
// Source range must not be out of bounds.
assertTraps(kTrapMemOutOfBounds, () => memoryCopy(0, 1, kPageSize));
assertTraps(kTrapMemOutOfBounds, () => memoryCopy(0, 1000, kPageSize));
assertTraps(kTrapMemOutOfBounds, () => memoryCopy(0, kPageSize, 1));
// Destination range must not be out of bounds.
assertTraps(kTrapMemOutOfBounds, () => memoryCopy(1, 0, kPageSize));
assertTraps(kTrapMemOutOfBounds, () => memoryCopy(1000, 0, kPageSize));
assertTraps(kTrapMemOutOfBounds, () => memoryCopy(kPageSize, 0, 1));
// Make sure bounds aren't checked with 32-bit wrapping.
assertTraps(kTrapMemOutOfBounds, () => memoryCopy(1, 1, -1));
mem.grow(1);
// Works properly after grow.
memoryCopy(0, kPageSize, 1000);
// Traps at new boundary.
assertTraps(
kTrapMemOutOfBounds, () => memoryCopy(0, kPageSize + 1, kPageSize));
})();
function getMemoryFill(mem) {
const builder = new WasmModuleBuilder();
builder.addImportedMemory("", "mem", 0);
builder.addFunction("fill", kSig_v_iii).addBody([
kExprGetLocal, 0, // Dest.
kExprGetLocal, 1, // Byte value.
kExprGetLocal, 2, // Size.
kNumericPrefix, kExprMemoryFill, 0,
]).exportAs("fill");
return builder.instantiate({'': {mem}}).exports.fill;
}
(function TestMemoryFill() {
const mem = new WebAssembly.Memory({initial: 1});
const memoryFill = getMemoryFill(mem);
const u8a = new Uint8Array(mem.buffer);
memoryFill(1, 33, 5);
assertBufferContents(u8a, [0, 33, 33, 33, 33, 33]);
memoryFill(4, 66, 4);
assertBufferContents(u8a, [0, 33, 33, 33, 66, 66, 66, 66]);
// Fill 0 bytes does nothing.
memoryFill(4, 66, 0);
assertBufferContents(u8a, [0, 33, 33, 33, 66, 66, 66, 66]);
})();
(function TestMemoryFillValueWrapsToByte() {
const mem = new WebAssembly.Memory({initial: 1});
const memoryFill = getMemoryFill(mem);
const u8a = new Uint8Array(mem.buffer);
memoryFill(0, 1000, 3);
const expected = 1000 & 255;
assertBufferContents(u8a, [expected, expected, expected]);
})();
(function TestMemoryFillOutOfBounds() {
const mem = new WebAssembly.Memory({initial: 1});
const memoryFill = getMemoryFill(mem);
const v = 123;
memoryFill(0, 0, kPageSize);
// Destination range must not be out of bounds.
assertTraps(kTrapMemOutOfBounds, () => memoryFill(1, v, kPageSize));
assertTraps(kTrapMemOutOfBounds, () => memoryFill(1000, v, kPageSize));
assertTraps(kTrapMemOutOfBounds, () => memoryFill(kPageSize, v, 1));
// Make sure bounds aren't checked with 32-bit wrapping.
assertTraps(kTrapMemOutOfBounds, () => memoryFill(1, v, -1));
mem.grow(1);
// Works properly after grow.
memoryFill(kPageSize, v, 1000);
// Traps at new boundary.
assertTraps(
kTrapMemOutOfBounds, () => memoryFill(kPageSize + 1, v, kPageSize));
})();
......@@ -353,8 +353,19 @@ let kExprF32ReinterpretI32 = 0xbe;
let kExprF64ReinterpretI64 = 0xbf;
// Prefix opcodes
let kNumericPrefix = 0xfc;
let kAtomicPrefix = 0xfe;
// Numeric opcodes.
let kExprMemoryInit = 0x08;
let kExprMemoryDrop = 0x09;
let kExprMemoryCopy = 0x0a;
let kExprMemoryFill = 0x0b;
let kExprTableInit = 0x0c;
let kExprTableDrop = 0x0d;
let kExprTableCopy = 0x0e;
// Atomic opcodes.
let kExprAtomicWake = 0x00;
let kExprI32AtomicWait = 0x01;
let kExprI64AtomicWait = 0x02;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment