Commit 1ce7f0e8 authored by Andreas Haas's avatar Andreas Haas Committed by Commit Bot

[cleanup][wasm] Set the return type of wasm stores to void

The return type of stores is void according to the WebAssembly spec.
In wasm-opcodes.h, however, the return type of stores was set to the
value type. This CL changes the signature of stores, and fixes some
issues which were caused because now there exist opcodes with a return
count different to 1.

R=titzer@chromium.org, clemensh@chromium.org

Bug: v8:7109
Change-Id: I94d1b049e6d0032868b2ce83f52e16b474de3dea
Reviewed-on: https://chromium-review.googlesource.com/803495Reviewed-by: 's avatarBen Titzer <titzer@chromium.org>
Commit-Queue: Andreas Haas <ahaas@chromium.org>
Cr-Commit-Position: refs/heads/master@{#49836}
parent f119c097
...@@ -223,8 +223,8 @@ class WasmGraphBuildingInterface { ...@@ -223,8 +223,8 @@ class WasmGraphBuildingInterface {
void BinOp(Decoder* decoder, WasmOpcode opcode, FunctionSig* sig, void BinOp(Decoder* decoder, WasmOpcode opcode, FunctionSig* sig,
const Value& lhs, const Value& rhs, Value* result) { const Value& lhs, const Value& rhs, Value* result) {
result->node = auto node = BUILD(Binop, opcode, lhs.node, rhs.node, decoder->position());
BUILD(Binop, opcode, lhs.node, rhs.node, decoder->position()); if (result) result->node = node;
} }
void I32Const(Decoder* decoder, Value* result, int32_t value) { void I32Const(Decoder* decoder, Value* result, int32_t value) {
......
...@@ -340,11 +340,12 @@ enum WasmOpcodeSig : byte { ...@@ -340,11 +340,12 @@ enum WasmOpcodeSig : byte {
FOREACH_SIGNATURE(DECLARE_SIG_ENUM) FOREACH_SIGNATURE(DECLARE_SIG_ENUM)
}; };
#undef DECLARE_SIG_ENUM #undef DECLARE_SIG_ENUM
#define DECLARE_SIG(name, ...) \
#define DECLARE_SIG(name, ...) \ constexpr ValueType kTypes_##name[] = {__VA_ARGS__}; \
constexpr ValueType kTypes_##name[] = {__VA_ARGS__}; \ constexpr int kReturnsCount_##name = kTypes_##name[0] == kWasmStmt ? 0 : 1; \
constexpr FunctionSig kSig_##name( \ constexpr FunctionSig kSig_##name( \
1, static_cast<int>(arraysize(kTypes_##name)) - 1, kTypes_##name); kReturnsCount_##name, static_cast<int>(arraysize(kTypes_##name)) - 1, \
kTypes_##name + (1 - kReturnsCount_##name));
FOREACH_SIGNATURE(DECLARE_SIG) FOREACH_SIGNATURE(DECLARE_SIG)
#undef DECLARE_SIG #undef DECLARE_SIG
......
...@@ -102,15 +102,15 @@ constexpr WasmCodePosition kNoCodePosition = -1; ...@@ -102,15 +102,15 @@ constexpr WasmCodePosition kNoCodePosition = -1;
// Store memory expressions. // Store memory expressions.
#define FOREACH_STORE_MEM_OPCODE(V) \ #define FOREACH_STORE_MEM_OPCODE(V) \
V(I32StoreMem, 0x36, i_ii) \ V(I32StoreMem, 0x36, v_ii) \
V(I64StoreMem, 0x37, l_il) \ V(I64StoreMem, 0x37, v_il) \
V(F32StoreMem, 0x38, f_if) \ V(F32StoreMem, 0x38, v_if) \
V(F64StoreMem, 0x39, d_id) \ V(F64StoreMem, 0x39, v_id) \
V(I32StoreMem8, 0x3a, i_ii) \ V(I32StoreMem8, 0x3a, v_ii) \
V(I32StoreMem16, 0x3b, i_ii) \ V(I32StoreMem16, 0x3b, v_ii) \
V(I64StoreMem8, 0x3c, l_il) \ V(I64StoreMem8, 0x3c, v_il) \
V(I64StoreMem16, 0x3d, l_il) \ V(I64StoreMem16, 0x3d, v_il) \
V(I64StoreMem32, 0x3e, l_il) V(I64StoreMem32, 0x3e, v_il)
// Miscellaneous memory expressions // Miscellaneous memory expressions
#define FOREACH_MISC_MEM_OPCODE(V) \ #define FOREACH_MISC_MEM_OPCODE(V) \
...@@ -413,15 +413,15 @@ constexpr WasmCodePosition kNoCodePosition = -1; ...@@ -413,15 +413,15 @@ constexpr WasmCodePosition kNoCodePosition = -1;
#define FOREACH_SIMD_MEM_OPCODE(V) \ #define FOREACH_SIMD_MEM_OPCODE(V) \
V(S128LoadMem, 0xfd80, s_i) \ V(S128LoadMem, 0xfd80, s_i) \
V(S128StoreMem, 0xfd81, s_is) V(S128StoreMem, 0xfd81, v_is)
#define FOREACH_ATOMIC_OPCODE(V) \ #define FOREACH_ATOMIC_OPCODE(V) \
V(I32AtomicLoad, 0xfe10, i_i) \ V(I32AtomicLoad, 0xfe10, i_i) \
V(I32AtomicLoad8U, 0xfe12, i_i) \ V(I32AtomicLoad8U, 0xfe12, i_i) \
V(I32AtomicLoad16U, 0xfe13, i_i) \ V(I32AtomicLoad16U, 0xfe13, i_i) \
V(I32AtomicStore, 0xfe17, i_ii) \ V(I32AtomicStore, 0xfe17, v_ii) \
V(I32AtomicStore8U, 0xfe19, i_ii) \ V(I32AtomicStore8U, 0xfe19, v_ii) \
V(I32AtomicStore16U, 0xfe1a, i_ii) \ V(I32AtomicStore16U, 0xfe1a, v_ii) \
V(I32AtomicAdd, 0xfe1e, i_ii) \ V(I32AtomicAdd, 0xfe1e, i_ii) \
V(I32AtomicAdd8U, 0xfe20, i_ii) \ V(I32AtomicAdd8U, 0xfe20, i_ii) \
V(I32AtomicAdd16U, 0xfe21, i_ii) \ V(I32AtomicAdd16U, 0xfe21, i_ii) \
...@@ -460,35 +460,38 @@ constexpr WasmCodePosition kNoCodePosition = -1; ...@@ -460,35 +460,38 @@ constexpr WasmCodePosition kNoCodePosition = -1;
FOREACH_ATOMIC_OPCODE(V) FOREACH_ATOMIC_OPCODE(V)
// All signatures. // All signatures.
#define FOREACH_SIGNATURE(V) \ #define FOREACH_SIGNATURE(V) \
FOREACH_SIMD_SIGNATURE(V) \ FOREACH_SIMD_SIGNATURE(V) \
V(i_ii, kWasmI32, kWasmI32, kWasmI32) \ V(i_ii, kWasmI32, kWasmI32, kWasmI32) \
V(i_i, kWasmI32, kWasmI32) \ V(i_i, kWasmI32, kWasmI32) \
V(i_v, kWasmI32) \ V(i_v, kWasmI32) \
V(i_ff, kWasmI32, kWasmF32, kWasmF32) \ V(i_ff, kWasmI32, kWasmF32, kWasmF32) \
V(i_f, kWasmI32, kWasmF32) \ V(i_f, kWasmI32, kWasmF32) \
V(i_dd, kWasmI32, kWasmF64, kWasmF64) \ V(i_dd, kWasmI32, kWasmF64, kWasmF64) \
V(i_d, kWasmI32, kWasmF64) \ V(i_d, kWasmI32, kWasmF64) \
V(i_l, kWasmI32, kWasmI64) \ V(i_l, kWasmI32, kWasmI64) \
V(l_ll, kWasmI64, kWasmI64, kWasmI64) \ V(l_ll, kWasmI64, kWasmI64, kWasmI64) \
V(i_ll, kWasmI32, kWasmI64, kWasmI64) \ V(i_ll, kWasmI32, kWasmI64, kWasmI64) \
V(l_l, kWasmI64, kWasmI64) \ V(l_l, kWasmI64, kWasmI64) \
V(l_i, kWasmI64, kWasmI32) \ V(l_i, kWasmI64, kWasmI32) \
V(l_f, kWasmI64, kWasmF32) \ V(l_f, kWasmI64, kWasmF32) \
V(l_d, kWasmI64, kWasmF64) \ V(l_d, kWasmI64, kWasmF64) \
V(f_ff, kWasmF32, kWasmF32, kWasmF32) \ V(f_ff, kWasmF32, kWasmF32, kWasmF32) \
V(f_f, kWasmF32, kWasmF32) \ V(f_f, kWasmF32, kWasmF32) \
V(f_d, kWasmF32, kWasmF64) \ V(f_d, kWasmF32, kWasmF64) \
V(f_i, kWasmF32, kWasmI32) \ V(f_i, kWasmF32, kWasmI32) \
V(f_l, kWasmF32, kWasmI64) \ V(f_l, kWasmF32, kWasmI64) \
V(d_dd, kWasmF64, kWasmF64, kWasmF64) \ V(d_dd, kWasmF64, kWasmF64, kWasmF64) \
V(d_d, kWasmF64, kWasmF64) \ V(d_d, kWasmF64, kWasmF64) \
V(d_f, kWasmF64, kWasmF32) \ V(d_f, kWasmF64, kWasmF32) \
V(d_i, kWasmF64, kWasmI32) \ V(d_i, kWasmF64, kWasmI32) \
V(d_l, kWasmF64, kWasmI64) \ V(d_l, kWasmF64, kWasmI64) \
V(d_id, kWasmF64, kWasmI32, kWasmF64) \ V(v_ii, kWasmStmt, kWasmI32, kWasmI32) \
V(f_if, kWasmF32, kWasmI32, kWasmF32) \ V(v_id, kWasmStmt, kWasmI32, kWasmF64) \
V(l_il, kWasmI64, kWasmI32, kWasmI64) \ V(d_id, kWasmF64, kWasmI32, kWasmF64) \
V(v_if, kWasmStmt, kWasmI32, kWasmF32) \
V(f_if, kWasmF32, kWasmI32, kWasmF32) \
V(v_il, kWasmI64, kWasmI32, kWasmI64) \
V(i_iii, kWasmI32, kWasmI32, kWasmI32, kWasmI32) V(i_iii, kWasmI32, kWasmI32, kWasmI32, kWasmI32)
#define FOREACH_SIMD_SIGNATURE(V) \ #define FOREACH_SIMD_SIGNATURE(V) \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment