Commit 8c0bd711 authored by Lu Yahan's avatar Lu Yahan Committed by V8 LUCI CQ

[riscv64][wasm] Implement atomic

Change-Id: I0fb3a4738c8e9b4b4328b8a1e142eefed61ec998
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2881494
Commit-Queue: Yahan Lu <yahan@iscas.ac.cn>
Reviewed-by: 's avatarBrice Dobry <brice.dobry@futurewei.com>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74623}
parent b856f1c9
...@@ -558,60 +558,297 @@ void LiftoffAssembler::Store(Register dst_addr, Register offset_reg, ...@@ -558,60 +558,297 @@ void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
} }
} }
namespace liftoff {
#define __ lasm->
inline Register CalculateActualAddress(LiftoffAssembler* lasm,
Register addr_reg, Register offset_reg,
uintptr_t offset_imm,
Register result_reg) {
DCHECK_NE(offset_reg, no_reg);
DCHECK_NE(addr_reg, no_reg);
__ Add64(result_reg, addr_reg, Operand(offset_reg));
if (offset_imm != 0) {
__ Add64(result_reg, result_reg, Operand(offset_imm));
}
return result_reg;
}
enum class Binop { kAdd, kSub, kAnd, kOr, kXor, kExchange };
inline void AtomicBinop(LiftoffAssembler* lasm, Register dst_addr,
Register offset_reg, uintptr_t offset_imm,
LiftoffRegister value, LiftoffRegister result,
StoreType type, Binop op) {
LiftoffRegList pinned =
LiftoffRegList::ForRegs(dst_addr, offset_reg, value, result);
Register store_result = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
// Make sure that {result} is unique.
Register result_reg = result.gp();
if (result_reg == value.gp() || result_reg == dst_addr ||
result_reg == offset_reg) {
result_reg = __ GetUnusedRegister(kGpReg, pinned).gp();
}
UseScratchRegisterScope temps(lasm);
Register actual_addr = liftoff::CalculateActualAddress(
lasm, dst_addr, offset_reg, offset_imm, temps.Acquire());
// Allocate an additional {temp} register to hold the result that should be
// stored to memory. Note that {temp} and {store_result} are not allowed to be
// the same register.
Register temp = temps.Acquire();
Label retry;
__ bind(&retry);
switch (type.value()) {
case StoreType::kI64Store8:
case StoreType::kI32Store8:
__ lbu(result_reg, actual_addr, 0);
__ sync();
break;
case StoreType::kI64Store16:
case StoreType::kI32Store16:
__ lhu(result_reg, actual_addr, 0);
__ sync();
break;
case StoreType::kI64Store32:
case StoreType::kI32Store:
__ lr_w(true, false, result_reg, actual_addr);
break;
case StoreType::kI64Store:
__ lr_d(true, false, result_reg, actual_addr);
break;
default:
UNREACHABLE();
}
switch (op) {
case Binop::kAdd:
__ add(temp, result_reg, value.gp());
break;
case Binop::kSub:
__ sub(temp, result_reg, value.gp());
break;
case Binop::kAnd:
__ and_(temp, result_reg, value.gp());
break;
case Binop::kOr:
__ or_(temp, result_reg, value.gp());
break;
case Binop::kXor:
__ xor_(temp, result_reg, value.gp());
break;
case Binop::kExchange:
__ mv(temp, value.gp());
break;
}
switch (type.value()) {
case StoreType::kI64Store8:
case StoreType::kI32Store8:
__ sync();
__ sb(temp, actual_addr, 0);
__ sync();
__ mv(store_result, zero_reg);
break;
case StoreType::kI64Store16:
case StoreType::kI32Store16:
__ sync();
__ sh(temp, actual_addr, 0);
__ sync();
__ mv(store_result, zero_reg);
break;
case StoreType::kI64Store32:
case StoreType::kI32Store:
__ sc_w(false, true, store_result, actual_addr, temp);
break;
case StoreType::kI64Store:
__ sc_w(false, true, store_result, actual_addr, temp);
break;
default:
UNREACHABLE();
}
__ bnez(store_result, &retry);
if (result_reg != result.gp()) {
__ mv(result.gp(), result_reg);
}
}
#undef __
} // namespace liftoff
void LiftoffAssembler::AtomicLoad(LiftoffRegister dst, Register src_addr, void LiftoffAssembler::AtomicLoad(LiftoffRegister dst, Register src_addr,
Register offset_reg, uintptr_t offset_imm, Register offset_reg, uintptr_t offset_imm,
LoadType type, LiftoffRegList pinned) { LoadType type, LiftoffRegList pinned) {
bailout(kAtomics, "AtomicLoad"); UseScratchRegisterScope temps(this);
Register src_reg = liftoff::CalculateActualAddress(
this, src_addr, offset_reg, offset_imm, temps.Acquire());
switch (type.value()) {
case LoadType::kI32Load8U:
case LoadType::kI64Load8U:
lbu(dst.gp(), src_reg, 0);
sync();
return;
case LoadType::kI32Load16U:
case LoadType::kI64Load16U:
lhu(dst.gp(), src_reg, 0);
sync();
return;
case LoadType::kI32Load:
lr_w(true, true, dst.gp(), src_reg);
return;
case LoadType::kI64Load32U:
lr_w(true, true, dst.gp(), src_reg);
slli(dst.gp(), dst.gp(), 32);
srli(dst.gp(), dst.gp(), 32);
return;
case LoadType::kI64Load:
lr_d(true, true, dst.gp(), src_reg);
return;
default:
UNREACHABLE();
}
} }
void LiftoffAssembler::AtomicStore(Register dst_addr, Register offset_reg, void LiftoffAssembler::AtomicStore(Register dst_addr, Register offset_reg,
uintptr_t offset_imm, LiftoffRegister src, uintptr_t offset_imm, LiftoffRegister src,
StoreType type, LiftoffRegList pinned) { StoreType type, LiftoffRegList pinned) {
bailout(kAtomics, "AtomicStore"); UseScratchRegisterScope temps(this);
Register dst_reg = liftoff::CalculateActualAddress(
this, dst_addr, offset_reg, offset_imm, temps.Acquire());
switch (type.value()) {
case StoreType::kI64Store8:
case StoreType::kI32Store8:
sync();
sb(src.gp(), dst_reg, 0);
sync();
return;
case StoreType::kI64Store16:
case StoreType::kI32Store16:
sync();
sh(src.gp(), dst_reg, 0);
sync();
return;
case StoreType::kI64Store32:
case StoreType::kI32Store:
sc_w(true, true, zero_reg, dst_reg, src.gp());
return;
case StoreType::kI64Store:
sc_d(true, true, zero_reg, dst_reg, src.gp());
return;
default:
UNREACHABLE();
}
} }
void LiftoffAssembler::AtomicAdd(Register dst_addr, Register offset_reg, void LiftoffAssembler::AtomicAdd(Register dst_addr, Register offset_reg,
uintptr_t offset_imm, LiftoffRegister value, uintptr_t offset_imm, LiftoffRegister value,
LiftoffRegister result, StoreType type) { LiftoffRegister result, StoreType type) {
bailout(kAtomics, "AtomicAdd"); liftoff::AtomicBinop(this, dst_addr, offset_reg, offset_imm, value, result,
type, liftoff::Binop::kAdd);
} }
void LiftoffAssembler::AtomicSub(Register dst_addr, Register offset_reg, void LiftoffAssembler::AtomicSub(Register dst_addr, Register offset_reg,
uintptr_t offset_imm, LiftoffRegister value, uintptr_t offset_imm, LiftoffRegister value,
LiftoffRegister result, StoreType type) { LiftoffRegister result, StoreType type) {
bailout(kAtomics, "AtomicSub"); liftoff::AtomicBinop(this, dst_addr, offset_reg, offset_imm, value, result,
type, liftoff::Binop::kSub);
} }
void LiftoffAssembler::AtomicAnd(Register dst_addr, Register offset_reg, void LiftoffAssembler::AtomicAnd(Register dst_addr, Register offset_reg,
uintptr_t offset_imm, LiftoffRegister value, uintptr_t offset_imm, LiftoffRegister value,
LiftoffRegister result, StoreType type) { LiftoffRegister result, StoreType type) {
bailout(kAtomics, "AtomicAnd"); liftoff::AtomicBinop(this, dst_addr, offset_reg, offset_imm, value, result,
type, liftoff::Binop::kAnd);
} }
void LiftoffAssembler::AtomicOr(Register dst_addr, Register offset_reg, void LiftoffAssembler::AtomicOr(Register dst_addr, Register offset_reg,
uintptr_t offset_imm, LiftoffRegister value, uintptr_t offset_imm, LiftoffRegister value,
LiftoffRegister result, StoreType type) { LiftoffRegister result, StoreType type) {
bailout(kAtomics, "AtomicOr"); liftoff::AtomicBinop(this, dst_addr, offset_reg, offset_imm, value, result,
type, liftoff::Binop::kOr);
} }
void LiftoffAssembler::AtomicXor(Register dst_addr, Register offset_reg, void LiftoffAssembler::AtomicXor(Register dst_addr, Register offset_reg,
uintptr_t offset_imm, LiftoffRegister value, uintptr_t offset_imm, LiftoffRegister value,
LiftoffRegister result, StoreType type) { LiftoffRegister result, StoreType type) {
bailout(kAtomics, "AtomicXor"); liftoff::AtomicBinop(this, dst_addr, offset_reg, offset_imm, value, result,
type, liftoff::Binop::kXor);
} }
void LiftoffAssembler::AtomicExchange(Register dst_addr, Register offset_reg, void LiftoffAssembler::AtomicExchange(Register dst_addr, Register offset_reg,
uintptr_t offset_imm, uintptr_t offset_imm,
LiftoffRegister value, LiftoffRegister value,
LiftoffRegister result, StoreType type) { LiftoffRegister result, StoreType type) {
bailout(kAtomics, "AtomicExchange"); liftoff::AtomicBinop(this, dst_addr, offset_reg, offset_imm, value, result,
type, liftoff::Binop::kExchange);
} }
void LiftoffAssembler::AtomicCompareExchange( void LiftoffAssembler::AtomicCompareExchange(
Register dst_addr, Register offset_reg, uintptr_t offset_imm, Register dst_addr, Register offset_reg, uintptr_t offset_imm,
LiftoffRegister expected, LiftoffRegister new_value, LiftoffRegister result, LiftoffRegister expected, LiftoffRegister new_value, LiftoffRegister result,
StoreType type) { StoreType type) {
bailout(kAtomics, "AtomicCompareExchange"); LiftoffRegList pinned =
LiftoffRegList::ForRegs(dst_addr, offset_reg, expected, new_value);
Register result_reg = result.gp();
if (pinned.has(result)) {
result_reg = GetUnusedRegister(kGpReg, pinned).gp();
}
UseScratchRegisterScope temps(this);
Register actual_addr = liftoff::CalculateActualAddress(
this, dst_addr, offset_reg, offset_imm, temps.Acquire());
Register store_result = temps.Acquire();
Label retry;
Label done;
bind(&retry);
switch (type.value()) {
case StoreType::kI64Store8:
case StoreType::kI32Store8:
lbu(result_reg, actual_addr, 0);
sync();
Branch(&done, ne, result.gp(), Operand(expected.gp()));
sync();
sb(new_value.gp(), actual_addr, 0);
sync();
mv(store_result, zero_reg);
break;
case StoreType::kI64Store16:
case StoreType::kI32Store16:
lhu(result_reg, actual_addr, 0);
sync();
Branch(&done, ne, result.gp(), Operand(expected.gp()));
sync();
sh(new_value.gp(), actual_addr, 0);
sync();
mv(store_result, zero_reg);
break;
case StoreType::kI64Store32:
case StoreType::kI32Store:
lr_w(true, true, result_reg, actual_addr);
Branch(&done, ne, result.gp(), Operand(expected.gp()));
sc_w(true, true, store_result, new_value.gp(), actual_addr);
break;
case StoreType::kI64Store:
lr_d(true, true, result_reg, actual_addr);
Branch(&done, ne, result.gp(), Operand(expected.gp()));
sc_d(true, true, store_result, new_value.gp(), actual_addr);
break;
default:
UNREACHABLE();
}
bnez(store_result, &retry);
bind(&done);
if (result_reg != result.gp()) {
mv(result.gp(), result_reg);
}
} }
void LiftoffAssembler::AtomicFence() { sync(); } void LiftoffAssembler::AtomicFence() { sync(); }
......
...@@ -813,31 +813,13 @@ ...@@ -813,31 +813,13 @@
# This often fails in debug mode because it is too slow # This often fails in debug mode because it is too slow
'd8/d8-performance-now': [PASS, ['mode == debug', SKIP]], 'd8/d8-performance-now': [PASS, ['mode == debug', SKIP]],
# Some atomic functions are not yet implemented
'wasm/compare-exchange64-stress': [SKIP],
'wasm/compare-exchange-stress': [SKIP],
'regress/wasm/regress-1045225': [SKIP],
'regress/wasm/regress-1045737': [SKIP],
'regress/wasm/regress-1048241': [SKIP],
'regress/wasm/regress-1074586-b': [SKIP],
'regress/wasm/regress-1075953': [SKIP],
'regress/wasm/regress-1074586': [SKIP],
'regress/wasm/regress-1079449': [SKIP],
'regress/wasm/regress-1080902': [SKIP],
'regress/wasm/regress-1140549': [SKIP],
'regress/wasm/regress-1153442': [SKIP],
'regress/wasm/regress-1168116': [SKIP],
'regress/wasm/regress-1196837': [SKIP],
'regress/wasm/regress-1202736': [SKIP],
'wasm/atomics': [SKIP],
'wasm/atomics-non-shared': [SKIP],
'wasm/grow-shared-memory': [SKIP],
'wasm/shared-memory': [SKIP],
# https://github.com/v8-riscv/v8/issues/418 # https://github.com/v8-riscv/v8/issues/418
'regress/regress-1138075': [SKIP], 'regress/regress-1138075': [SKIP],
'regress/regress-1138611': [SKIP], 'regress/regress-1138611': [SKIP],
# Some atomic functions are not yet implemented
'regress/wasm/regress-1196837': [SKIP],
# SIMD not be implemented # SIMD not be implemented
'regress/wasm/regress-1054466': [SKIP], 'regress/wasm/regress-1054466': [SKIP],
'regress/wasm/regress-1065599': [SKIP], 'regress/wasm/regress-1065599': [SKIP],
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment