Commit cab5304b authored by Aseem Garg's avatar Aseem Garg Committed by Commit Bot

Revert "[wasm] add 64 bit atomic ops to interpreter"

This reverts commit 5301cdc3.

Reason for revert: Failing on mips

Original change's description:
> [wasm] add 64 bit atomic ops to interpreter
> 
> R=​gdeepti@chromium.org
> BUG=v8:6532
> 
> Change-Id: I532bf67f1631c692e12f9b054b29601a57b76f05
> Reviewed-on: https://chromium-review.googlesource.com/1130635
> Reviewed-by: Deepti Gandluri <gdeepti@chromium.org>
> Commit-Queue: Aseem Garg <aseemgarg@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#54365}

TBR=gdeepti@chromium.org,aseemgarg@chromium.org

Change-Id: Id56d3bb1228b38b6e2ad29876ea78542658e8310
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: v8:6532
Reviewed-on: https://chromium-review.googlesource.com/1132154Reviewed-by: 's avatarAseem Garg <aseemgarg@chromium.org>
Commit-Queue: Aseem Garg <aseemgarg@chromium.org>
Cr-Commit-Position: refs/heads/master@{#54366}
parent 5301cdc3
......@@ -1454,14 +1454,14 @@ class ThreadImpl {
return true;
}
template <typename type, typename op_type>
template <typename type>
bool ExtractAtomicOpParams(Decoder* decoder, InterpreterCode* code,
Address& address, pc_t pc, int& len,
type* val = nullptr, type* val2 = nullptr) {
MemoryAccessImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc + 1),
sizeof(type));
if (val2) *val2 = static_cast<type>(Pop().to<op_type>());
if (val) *val = static_cast<type>(Pop().to<op_type>());
if (val2) *val2 = Pop().to<uint32_t>();
if (val) *val = Pop().to<uint32_t>();
uint32_t index = Pop().to<uint32_t>();
address = BoundsCheckMem<type>(imm.offset, index);
if (!address) {
......@@ -1511,77 +1511,47 @@ class ThreadImpl {
InterpreterCode* code, pc_t pc, int& len) {
WasmValue result;
switch (opcode) {
#define ATOMIC_BINOP_CASE(name, type, op_type, operation) \
#define ATOMIC_BINOP_CASE(name, type, operation) \
case kExpr##name: { \
type val; \
Address addr; \
if (!ExtractAtomicOpParams<type, op_type>(decoder, code, addr, pc, len, \
&val)) { \
if (!ExtractAtomicOpParams<type>(decoder, code, addr, pc, len, &val)) { \
return false; \
} \
static_assert(sizeof(std::atomic<type>) == sizeof(type), \
"Size mismatch for types std::atomic<" #type \
">, and " #type); \
result = WasmValue(static_cast<op_type>( \
std::operation(reinterpret_cast<std::atomic<type>*>(addr), val))); \
result = WasmValue( \
std::operation(reinterpret_cast<std::atomic<type>*>(addr), val)); \
Push(result); \
break; \
}
ATOMIC_BINOP_CASE(I32AtomicAdd, uint32_t, uint32_t, atomic_fetch_add);
ATOMIC_BINOP_CASE(I32AtomicAdd8U, uint8_t, uint32_t, atomic_fetch_add);
ATOMIC_BINOP_CASE(I32AtomicAdd16U, uint16_t, uint32_t, atomic_fetch_add);
ATOMIC_BINOP_CASE(I32AtomicSub, uint32_t, uint32_t, atomic_fetch_sub);
ATOMIC_BINOP_CASE(I32AtomicSub8U, uint8_t, uint32_t, atomic_fetch_sub);
ATOMIC_BINOP_CASE(I32AtomicSub16U, uint16_t, uint32_t, atomic_fetch_sub);
ATOMIC_BINOP_CASE(I32AtomicAnd, uint32_t, uint32_t, atomic_fetch_and);
ATOMIC_BINOP_CASE(I32AtomicAnd8U, uint8_t, uint32_t, atomic_fetch_and);
ATOMIC_BINOP_CASE(I32AtomicAnd16U, uint16_t, uint32_t, atomic_fetch_and);
ATOMIC_BINOP_CASE(I32AtomicOr, uint32_t, uint32_t, atomic_fetch_or);
ATOMIC_BINOP_CASE(I32AtomicOr8U, uint8_t, uint32_t, atomic_fetch_or);
ATOMIC_BINOP_CASE(I32AtomicOr16U, uint16_t, uint32_t, atomic_fetch_or);
ATOMIC_BINOP_CASE(I32AtomicXor, uint32_t, uint32_t, atomic_fetch_xor);
ATOMIC_BINOP_CASE(I32AtomicXor8U, uint8_t, uint32_t, atomic_fetch_xor);
ATOMIC_BINOP_CASE(I32AtomicXor16U, uint16_t, uint32_t, atomic_fetch_xor);
ATOMIC_BINOP_CASE(I32AtomicExchange, uint32_t, uint32_t, atomic_exchange);
ATOMIC_BINOP_CASE(I32AtomicExchange8U, uint8_t, uint32_t,
atomic_exchange);
ATOMIC_BINOP_CASE(I32AtomicExchange16U, uint16_t, uint32_t,
atomic_exchange);
ATOMIC_BINOP_CASE(I64AtomicAdd, uint64_t, uint64_t, atomic_fetch_add);
ATOMIC_BINOP_CASE(I64AtomicAdd8U, uint8_t, uint64_t, atomic_fetch_add);
ATOMIC_BINOP_CASE(I64AtomicAdd16U, uint16_t, uint64_t, atomic_fetch_add);
ATOMIC_BINOP_CASE(I64AtomicAdd32U, uint32_t, uint64_t, atomic_fetch_add);
ATOMIC_BINOP_CASE(I64AtomicSub, uint64_t, uint64_t, atomic_fetch_sub);
ATOMIC_BINOP_CASE(I64AtomicSub8U, uint8_t, uint64_t, atomic_fetch_sub);
ATOMIC_BINOP_CASE(I64AtomicSub16U, uint16_t, uint64_t, atomic_fetch_sub);
ATOMIC_BINOP_CASE(I64AtomicSub32U, uint32_t, uint64_t, atomic_fetch_sub);
ATOMIC_BINOP_CASE(I64AtomicAnd, uint64_t, uint64_t, atomic_fetch_and);
ATOMIC_BINOP_CASE(I64AtomicAnd8U, uint8_t, uint64_t, atomic_fetch_and);
ATOMIC_BINOP_CASE(I64AtomicAnd16U, uint16_t, uint64_t, atomic_fetch_and);
ATOMIC_BINOP_CASE(I64AtomicAnd32U, uint32_t, uint64_t, atomic_fetch_and);
ATOMIC_BINOP_CASE(I64AtomicOr, uint64_t, uint64_t, atomic_fetch_or);
ATOMIC_BINOP_CASE(I64AtomicOr8U, uint8_t, uint64_t, atomic_fetch_or);
ATOMIC_BINOP_CASE(I64AtomicOr16U, uint16_t, uint64_t, atomic_fetch_or);
ATOMIC_BINOP_CASE(I64AtomicOr32U, uint32_t, uint64_t, atomic_fetch_or);
ATOMIC_BINOP_CASE(I64AtomicXor, uint64_t, uint64_t, atomic_fetch_xor);
ATOMIC_BINOP_CASE(I64AtomicXor8U, uint8_t, uint64_t, atomic_fetch_xor);
ATOMIC_BINOP_CASE(I64AtomicXor16U, uint16_t, uint64_t, atomic_fetch_xor);
ATOMIC_BINOP_CASE(I64AtomicXor32U, uint32_t, uint64_t, atomic_fetch_xor);
ATOMIC_BINOP_CASE(I64AtomicExchange, uint64_t, uint64_t, atomic_exchange);
ATOMIC_BINOP_CASE(I64AtomicExchange8U, uint8_t, uint64_t,
atomic_exchange);
ATOMIC_BINOP_CASE(I64AtomicExchange16U, uint16_t, uint64_t,
atomic_exchange);
ATOMIC_BINOP_CASE(I64AtomicExchange32U, uint32_t, uint64_t,
atomic_exchange);
ATOMIC_BINOP_CASE(I32AtomicAdd, uint32_t, atomic_fetch_add);
ATOMIC_BINOP_CASE(I32AtomicAdd8U, uint8_t, atomic_fetch_add);
ATOMIC_BINOP_CASE(I32AtomicAdd16U, uint16_t, atomic_fetch_add);
ATOMIC_BINOP_CASE(I32AtomicSub, uint32_t, atomic_fetch_sub);
ATOMIC_BINOP_CASE(I32AtomicSub8U, uint8_t, atomic_fetch_sub);
ATOMIC_BINOP_CASE(I32AtomicSub16U, uint16_t, atomic_fetch_sub);
ATOMIC_BINOP_CASE(I32AtomicAnd, uint32_t, atomic_fetch_and);
ATOMIC_BINOP_CASE(I32AtomicAnd8U, uint8_t, atomic_fetch_and);
ATOMIC_BINOP_CASE(I32AtomicAnd16U, uint16_t, atomic_fetch_and);
ATOMIC_BINOP_CASE(I32AtomicOr, uint32_t, atomic_fetch_or);
ATOMIC_BINOP_CASE(I32AtomicOr8U, uint8_t, atomic_fetch_or);
ATOMIC_BINOP_CASE(I32AtomicOr16U, uint16_t, atomic_fetch_or);
ATOMIC_BINOP_CASE(I32AtomicXor, uint32_t, atomic_fetch_xor);
ATOMIC_BINOP_CASE(I32AtomicXor8U, uint8_t, atomic_fetch_xor);
ATOMIC_BINOP_CASE(I32AtomicXor16U, uint16_t, atomic_fetch_xor);
ATOMIC_BINOP_CASE(I32AtomicExchange, uint32_t, atomic_exchange);
ATOMIC_BINOP_CASE(I32AtomicExchange8U, uint8_t, atomic_exchange);
ATOMIC_BINOP_CASE(I32AtomicExchange16U, uint16_t, atomic_exchange);
#undef ATOMIC_BINOP_CASE
#define ATOMIC_COMPARE_EXCHANGE_CASE(name, type, op_type) \
#define ATOMIC_COMPARE_EXCHANGE_CASE(name, type) \
case kExpr##name: { \
type val; \
type val2; \
Address addr; \
if (!ExtractAtomicOpParams<type, op_type>(decoder, code, addr, pc, len, \
&val, &val2)) { \
if (!ExtractAtomicOpParams<type>(decoder, code, addr, pc, len, &val, \
&val2)) { \
return false; \
} \
static_assert(sizeof(std::atomic<type>) == sizeof(type), \
......@@ -1589,52 +1559,36 @@ class ThreadImpl {
">, and " #type); \
std::atomic_compare_exchange_strong( \
reinterpret_cast<std::atomic<type>*>(addr), &val, val2); \
Push(WasmValue(static_cast<op_type>(val))); \
Push(WasmValue(val)); \
break; \
}
ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange, uint32_t,
uint32_t);
ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange8U, uint8_t,
uint32_t);
ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange16U, uint16_t,
uint32_t);
ATOMIC_COMPARE_EXCHANGE_CASE(I64AtomicCompareExchange, uint64_t,
uint64_t);
ATOMIC_COMPARE_EXCHANGE_CASE(I64AtomicCompareExchange8U, uint8_t,
uint64_t);
ATOMIC_COMPARE_EXCHANGE_CASE(I64AtomicCompareExchange16U, uint16_t,
uint64_t);
ATOMIC_COMPARE_EXCHANGE_CASE(I64AtomicCompareExchange32U, uint32_t,
uint64_t);
ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange, uint32_t);
ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange8U, uint8_t);
ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange16U, uint16_t);
#undef ATOMIC_COMPARE_EXCHANGE_CASE
#define ATOMIC_LOAD_CASE(name, type, op_type, operation) \
#define ATOMIC_LOAD_CASE(name, type, operation) \
case kExpr##name: { \
Address addr; \
if (!ExtractAtomicOpParams<type, op_type>(decoder, code, addr, pc, len)) { \
if (!ExtractAtomicOpParams<type>(decoder, code, addr, pc, len)) { \
return false; \
} \
static_assert(sizeof(std::atomic<type>) == sizeof(type), \
"Size mismatch for types std::atomic<" #type \
">, and " #type); \
result = WasmValue(static_cast<op_type>( \
std::operation(reinterpret_cast<std::atomic<type>*>(addr)))); \
result = \
WasmValue(std::operation(reinterpret_cast<std::atomic<type>*>(addr))); \
Push(result); \
break; \
}
ATOMIC_LOAD_CASE(I32AtomicLoad, uint32_t, uint32_t, atomic_load);
ATOMIC_LOAD_CASE(I32AtomicLoad8U, uint8_t, uint32_t, atomic_load);
ATOMIC_LOAD_CASE(I32AtomicLoad16U, uint16_t, uint32_t, atomic_load);
ATOMIC_LOAD_CASE(I64AtomicLoad, uint64_t, uint64_t, atomic_load);
ATOMIC_LOAD_CASE(I64AtomicLoad8U, uint8_t, uint64_t, atomic_load);
ATOMIC_LOAD_CASE(I64AtomicLoad16U, uint16_t, uint64_t, atomic_load);
ATOMIC_LOAD_CASE(I64AtomicLoad32U, uint32_t, uint64_t, atomic_load);
ATOMIC_LOAD_CASE(I32AtomicLoad, uint32_t, atomic_load);
ATOMIC_LOAD_CASE(I32AtomicLoad8U, uint8_t, atomic_load);
ATOMIC_LOAD_CASE(I32AtomicLoad16U, uint16_t, atomic_load);
#undef ATOMIC_LOAD_CASE
#define ATOMIC_STORE_CASE(name, type, op_type, operation) \
#define ATOMIC_STORE_CASE(name, type, operation) \
case kExpr##name: { \
type val; \
Address addr; \
if (!ExtractAtomicOpParams<type, op_type>(decoder, code, addr, pc, len, \
&val)) { \
if (!ExtractAtomicOpParams<type>(decoder, code, addr, pc, len, &val)) { \
return false; \
} \
static_assert(sizeof(std::atomic<type>) == sizeof(type), \
......@@ -1643,13 +1597,9 @@ class ThreadImpl {
std::operation(reinterpret_cast<std::atomic<type>*>(addr), val); \
break; \
}
ATOMIC_STORE_CASE(I32AtomicStore, uint32_t, uint32_t, atomic_store);
ATOMIC_STORE_CASE(I32AtomicStore8U, uint8_t, uint32_t, atomic_store);
ATOMIC_STORE_CASE(I32AtomicStore16U, uint16_t, uint32_t, atomic_store);
ATOMIC_STORE_CASE(I64AtomicStore, uint64_t, uint64_t, atomic_store);
ATOMIC_STORE_CASE(I64AtomicStore8U, uint8_t, uint64_t, atomic_store);
ATOMIC_STORE_CASE(I64AtomicStore16U, uint16_t, uint64_t, atomic_store);
ATOMIC_STORE_CASE(I64AtomicStore32U, uint32_t, uint64_t, atomic_store);
ATOMIC_STORE_CASE(I32AtomicStore, uint32_t, atomic_store);
ATOMIC_STORE_CASE(I32AtomicStore8U, uint8_t, atomic_store);
ATOMIC_STORE_CASE(I32AtomicStore16U, uint16_t, atomic_store);
#undef ATOMIC_STORE_CASE
default:
UNREACHABLE();
......
......@@ -32,22 +32,22 @@ void RunU64BinOp(WasmExecutionMode execution_mode, WasmOpcode wasm_op,
}
}
WASM_EXEC_TEST(I64AtomicAdd) {
WASM_COMPILED_EXEC_TEST(I64AtomicAdd) {
RunU64BinOp(execution_mode, kExprI64AtomicAdd, Add);
}
WASM_EXEC_TEST(I64AtomicSub) {
WASM_COMPILED_EXEC_TEST(I64AtomicSub) {
RunU64BinOp(execution_mode, kExprI64AtomicSub, Sub);
}
WASM_EXEC_TEST(I64AtomicAnd) {
WASM_COMPILED_EXEC_TEST(I64AtomicAnd) {
RunU64BinOp(execution_mode, kExprI64AtomicAnd, And);
}
WASM_EXEC_TEST(I64AtomicOr) {
WASM_COMPILED_EXEC_TEST(I64AtomicOr) {
RunU64BinOp(execution_mode, kExprI64AtomicOr, Or);
}
WASM_EXEC_TEST(I64AtomicXor) {
WASM_COMPILED_EXEC_TEST(I64AtomicXor) {
RunU64BinOp(execution_mode, kExprI64AtomicXor, Xor);
}
WASM_EXEC_TEST(I64AtomicExchange) {
WASM_COMPILED_EXEC_TEST(I64AtomicExchange) {
RunU64BinOp(execution_mode, kExprI64AtomicExchange, Exchange);
}
......@@ -73,22 +73,22 @@ void RunU32BinOp(WasmExecutionMode execution_mode, WasmOpcode wasm_op,
}
}
WASM_EXEC_TEST(I64AtomicAdd32U) {
WASM_COMPILED_EXEC_TEST(I64AtomicAdd32U) {
RunU32BinOp(execution_mode, kExprI64AtomicAdd32U, Add);
}
WASM_EXEC_TEST(I64AtomicSub32U) {
WASM_COMPILED_EXEC_TEST(I64AtomicSub32U) {
RunU32BinOp(execution_mode, kExprI64AtomicSub32U, Sub);
}
WASM_EXEC_TEST(I64AtomicAnd32U) {
WASM_COMPILED_EXEC_TEST(I64AtomicAnd32U) {
RunU32BinOp(execution_mode, kExprI64AtomicAnd32U, And);
}
WASM_EXEC_TEST(I64AtomicOr32U) {
WASM_COMPILED_EXEC_TEST(I64AtomicOr32U) {
RunU32BinOp(execution_mode, kExprI64AtomicOr32U, Or);
}
WASM_EXEC_TEST(I64AtomicXor32U) {
WASM_COMPILED_EXEC_TEST(I64AtomicXor32U) {
RunU32BinOp(execution_mode, kExprI64AtomicXor32U, Xor);
}
WASM_EXEC_TEST(I64AtomicExchange32U) {
WASM_COMPILED_EXEC_TEST(I64AtomicExchange32U) {
RunU32BinOp(execution_mode, kExprI64AtomicExchange32U, Exchange);
}
......@@ -114,22 +114,22 @@ void RunU16BinOp(WasmExecutionMode mode, WasmOpcode wasm_op,
}
}
WASM_EXEC_TEST(I64AtomicAdd16U) {
WASM_COMPILED_EXEC_TEST(I64AtomicAdd16U) {
RunU16BinOp(execution_mode, kExprI64AtomicAdd16U, Add);
}
WASM_EXEC_TEST(I64AtomicSub16U) {
WASM_COMPILED_EXEC_TEST(I64AtomicSub16U) {
RunU16BinOp(execution_mode, kExprI64AtomicSub16U, Sub);
}
WASM_EXEC_TEST(I64AtomicAnd16U) {
WASM_COMPILED_EXEC_TEST(I64AtomicAnd16U) {
RunU16BinOp(execution_mode, kExprI64AtomicAnd16U, And);
}
WASM_EXEC_TEST(I64AtomicOr16U) {
WASM_COMPILED_EXEC_TEST(I64AtomicOr16U) {
RunU16BinOp(execution_mode, kExprI64AtomicOr16U, Or);
}
WASM_EXEC_TEST(I64AtomicXor16U) {
WASM_COMPILED_EXEC_TEST(I64AtomicXor16U) {
RunU16BinOp(execution_mode, kExprI64AtomicXor16U, Xor);
}
WASM_EXEC_TEST(I64AtomicExchange16U) {
WASM_COMPILED_EXEC_TEST(I64AtomicExchange16U) {
RunU16BinOp(execution_mode, kExprI64AtomicExchange16U, Exchange);
}
......@@ -154,26 +154,26 @@ void RunU8BinOp(WasmExecutionMode execution_mode, WasmOpcode wasm_op,
}
}
WASM_EXEC_TEST(I64AtomicAdd8U) {
WASM_COMPILED_EXEC_TEST(I64AtomicAdd8U) {
RunU8BinOp(execution_mode, kExprI64AtomicAdd8U, Add);
}
WASM_EXEC_TEST(I64AtomicSub8U) {
WASM_COMPILED_EXEC_TEST(I64AtomicSub8U) {
RunU8BinOp(execution_mode, kExprI64AtomicSub8U, Sub);
}
WASM_EXEC_TEST(I64AtomicAnd8U) {
WASM_COMPILED_EXEC_TEST(I64AtomicAnd8U) {
RunU8BinOp(execution_mode, kExprI64AtomicAnd8U, And);
}
WASM_EXEC_TEST(I64AtomicOr8U) {
WASM_COMPILED_EXEC_TEST(I64AtomicOr8U) {
RunU8BinOp(execution_mode, kExprI64AtomicOr8U, Or);
}
WASM_EXEC_TEST(I64AtomicXor8U) {
WASM_COMPILED_EXEC_TEST(I64AtomicXor8U) {
RunU8BinOp(execution_mode, kExprI64AtomicXor8U, Xor);
}
WASM_EXEC_TEST(I64AtomicExchange8U) {
WASM_COMPILED_EXEC_TEST(I64AtomicExchange8U) {
RunU8BinOp(execution_mode, kExprI64AtomicExchange8U, Exchange);
}
WASM_EXEC_TEST(I64AtomicCompareExchange) {
WASM_COMPILED_EXEC_TEST(I64AtomicCompareExchange) {
EXPERIMENTAL_FLAG_SCOPE(threads);
WasmRunner<uint64_t, uint64_t, uint64_t> r(execution_mode);
r.builder().SetHasSharedMemory();
......@@ -194,7 +194,7 @@ WASM_EXEC_TEST(I64AtomicCompareExchange) {
}
}
WASM_EXEC_TEST(I64AtomicCompareExchange32U) {
WASM_COMPILED_EXEC_TEST(I64AtomicCompareExchange32U) {
EXPERIMENTAL_FLAG_SCOPE(threads);
WasmRunner<uint64_t, uint64_t, uint64_t> r(execution_mode);
r.builder().SetHasSharedMemory();
......@@ -216,7 +216,7 @@ WASM_EXEC_TEST(I64AtomicCompareExchange32U) {
}
}
WASM_EXEC_TEST(I64AtomicCompareExchange16U) {
WASM_COMPILED_EXEC_TEST(I64AtomicCompareExchange16U) {
EXPERIMENTAL_FLAG_SCOPE(threads);
WasmRunner<uint64_t, uint64_t, uint64_t> r(execution_mode);
r.builder().SetHasSharedMemory();
......@@ -238,7 +238,7 @@ WASM_EXEC_TEST(I64AtomicCompareExchange16U) {
}
}
WASM_EXEC_TEST(I32AtomicCompareExchange8U) {
WASM_COMPILED_EXEC_TEST(I32AtomicCompareExchange8U) {
EXPERIMENTAL_FLAG_SCOPE(threads);
WasmRunner<uint64_t, uint64_t, uint64_t> r(execution_mode);
r.builder().SetHasSharedMemory();
......@@ -258,7 +258,7 @@ WASM_EXEC_TEST(I32AtomicCompareExchange8U) {
}
}
WASM_EXEC_TEST(I64AtomicLoad) {
WASM_COMPILED_EXEC_TEST(I64AtomicLoad) {
EXPERIMENTAL_FLAG_SCOPE(threads);
WasmRunner<uint64_t> r(execution_mode);
r.builder().SetHasSharedMemory();
......@@ -274,7 +274,7 @@ WASM_EXEC_TEST(I64AtomicLoad) {
}
}
WASM_EXEC_TEST(I64AtomicLoad32U) {
WASM_COMPILED_EXEC_TEST(I64AtomicLoad32U) {
EXPERIMENTAL_FLAG_SCOPE(threads);
WasmRunner<uint64_t> r(execution_mode);
r.builder().SetHasSharedMemory();
......@@ -290,7 +290,7 @@ WASM_EXEC_TEST(I64AtomicLoad32U) {
}
}
WASM_EXEC_TEST(I64AtomicLoad16U) {
WASM_COMPILED_EXEC_TEST(I64AtomicLoad16U) {
EXPERIMENTAL_FLAG_SCOPE(threads);
WasmRunner<uint64_t> r(execution_mode);
r.builder().SetHasSharedMemory();
......@@ -306,7 +306,7 @@ WASM_EXEC_TEST(I64AtomicLoad16U) {
}
}
WASM_EXEC_TEST(I64AtomicLoad8U) {
WASM_COMPILED_EXEC_TEST(I64AtomicLoad8U) {
EXPERIMENTAL_FLAG_SCOPE(threads);
WasmRunner<uint64_t> r(execution_mode);
r.builder().SetHasSharedMemory();
......@@ -321,7 +321,7 @@ WASM_EXEC_TEST(I64AtomicLoad8U) {
}
}
WASM_EXEC_TEST(I64AtomicStoreLoad) {
WASM_COMPILED_EXEC_TEST(I64AtomicStoreLoad) {
EXPERIMENTAL_FLAG_SCOPE(threads);
WasmRunner<uint64_t, uint64_t> r(execution_mode);
r.builder().SetHasSharedMemory();
......@@ -341,7 +341,7 @@ WASM_EXEC_TEST(I64AtomicStoreLoad) {
}
}
WASM_EXEC_TEST(I64AtomicStoreLoad32U) {
WASM_COMPILED_EXEC_TEST(I64AtomicStoreLoad32U) {
EXPERIMENTAL_FLAG_SCOPE(threads);
WasmRunner<uint64_t, uint64_t> r(execution_mode);
r.builder().SetHasSharedMemory();
......@@ -362,7 +362,7 @@ WASM_EXEC_TEST(I64AtomicStoreLoad32U) {
}
}
WASM_EXEC_TEST(I64AtomicStoreLoad16U) {
WASM_COMPILED_EXEC_TEST(I64AtomicStoreLoad16U) {
EXPERIMENTAL_FLAG_SCOPE(threads);
WasmRunner<uint64_t, uint64_t> r(execution_mode);
r.builder().SetHasSharedMemory();
......@@ -383,7 +383,7 @@ WASM_EXEC_TEST(I64AtomicStoreLoad16U) {
}
}
WASM_EXEC_TEST(I64AtomicStoreLoad8U) {
WASM_COMPILED_EXEC_TEST(I64AtomicStoreLoad8U) {
EXPERIMENTAL_FLAG_SCOPE(threads);
WasmRunner<uint64_t, uint64_t> r(execution_mode);
r.builder().SetHasSharedMemory();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment