Commit e8cdd505 authored by Junliang Yan's avatar Junliang Yan Committed by V8 LUCI CQ

aix: fix atomic compare and swap for liftoff

Change-Id: Ie46687a1af834b9c1a6c8fa5bcf5badb61cc1647
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3259645Reviewed-by: 's avatarMilad Fa <mfarazma@redhat.com>
Commit-Queue: Junliang Yan <junyan@redhat.com>
Cr-Commit-Position: refs/heads/main@{#77686}
parent ebc74954
...@@ -617,9 +617,6 @@ void LiftoffAssembler::AtomicExchange(Register dst_addr, Register offset_reg, ...@@ -617,9 +617,6 @@ void LiftoffAssembler::AtomicExchange(Register dst_addr, Register offset_reg,
uintptr_t offset_imm, uintptr_t offset_imm,
LiftoffRegister value, LiftoffRegister value,
LiftoffRegister result, StoreType type) { LiftoffRegister result, StoreType type) {
#if defined(V8_OS_AIX)
bailout(kUnsupportedArchitecture, "atomic");
#else
Register offset = r0; Register offset = r0;
if (offset_imm != 0) { if (offset_imm != 0) {
mov(ip, Operand(offset_imm)); mov(ip, Operand(offset_imm));
...@@ -674,16 +671,12 @@ void LiftoffAssembler::AtomicExchange(Register dst_addr, Register offset_reg, ...@@ -674,16 +671,12 @@ void LiftoffAssembler::AtomicExchange(Register dst_addr, Register offset_reg,
default: default:
UNREACHABLE(); UNREACHABLE();
} }
#endif
} }
void LiftoffAssembler::AtomicCompareExchange( void LiftoffAssembler::AtomicCompareExchange(
Register dst_addr, Register offset_reg, uintptr_t offset_imm, Register dst_addr, Register offset_reg, uintptr_t offset_imm,
LiftoffRegister expected, LiftoffRegister new_value, LiftoffRegister result, LiftoffRegister expected, LiftoffRegister new_value, LiftoffRegister result,
StoreType type) { StoreType type) {
#if defined(V8_OS_AIX)
bailout(kUnsupportedArchitecture, "atomic");
#else
Register offset = r0; Register offset = r0;
if (offset_imm != 0) { if (offset_imm != 0) {
mov(ip, Operand(offset_imm)); mov(ip, Operand(offset_imm));
...@@ -707,13 +700,13 @@ void LiftoffAssembler::AtomicCompareExchange( ...@@ -707,13 +700,13 @@ void LiftoffAssembler::AtomicCompareExchange(
case StoreType::kI32Store16: case StoreType::kI32Store16:
case StoreType::kI64Store16: { case StoreType::kI64Store16: {
if (is_be) { if (is_be) {
Push(r3, r4); Push(new_value.gp(), expected.gp());
ByteReverseU16(r3, new_value.gp()); ByteReverseU16(new_value.gp(), new_value.gp());
ByteReverseU16(r4, expected.gp()); ByteReverseU16(expected.gp(), expected.gp());
TurboAssembler::AtomicCompareExchange<uint16_t>(dst, r4, r3, TurboAssembler::AtomicCompareExchange<uint16_t>(
result.gp(), r0); dst, expected.gp(), new_value.gp(), result.gp(), r0);
ByteReverseU16(result.gp(), result.gp()); ByteReverseU16(result.gp(), result.gp());
Pop(r3, r4); Pop(new_value.gp(), expected.gp());
} else { } else {
TurboAssembler::AtomicCompareExchange<uint16_t>( TurboAssembler::AtomicCompareExchange<uint16_t>(
dst, expected.gp(), new_value.gp(), result.gp(), r0); dst, expected.gp(), new_value.gp(), result.gp(), r0);
...@@ -723,13 +716,13 @@ void LiftoffAssembler::AtomicCompareExchange( ...@@ -723,13 +716,13 @@ void LiftoffAssembler::AtomicCompareExchange(
case StoreType::kI32Store: case StoreType::kI32Store:
case StoreType::kI64Store32: { case StoreType::kI64Store32: {
if (is_be) { if (is_be) {
Push(r3, r4); Push(new_value.gp(), expected.gp());
ByteReverseU32(r3, new_value.gp()); ByteReverseU32(new_value.gp(), new_value.gp());
ByteReverseU32(r4, expected.gp()); ByteReverseU32(expected.gp(), expected.gp());
TurboAssembler::AtomicCompareExchange<uint32_t>(dst, r4, r3, TurboAssembler::AtomicCompareExchange<uint32_t>(
result.gp(), r0); dst, expected.gp(), new_value.gp(), result.gp(), r0);
ByteReverseU32(result.gp(), result.gp()); ByteReverseU32(result.gp(), result.gp());
Pop(r3, r4); Pop(new_value.gp(), expected.gp());
} else { } else {
TurboAssembler::AtomicCompareExchange<uint32_t>( TurboAssembler::AtomicCompareExchange<uint32_t>(
dst, expected.gp(), new_value.gp(), result.gp(), r0); dst, expected.gp(), new_value.gp(), result.gp(), r0);
...@@ -738,13 +731,13 @@ void LiftoffAssembler::AtomicCompareExchange( ...@@ -738,13 +731,13 @@ void LiftoffAssembler::AtomicCompareExchange(
} }
case StoreType::kI64Store: { case StoreType::kI64Store: {
if (is_be) { if (is_be) {
Push(r3, r4); Push(new_value.gp(), expected.gp());
ByteReverseU64(r3, new_value.gp()); ByteReverseU64(new_value.gp(), new_value.gp());
ByteReverseU64(r4, expected.gp()); ByteReverseU64(expected.gp(), expected.gp());
TurboAssembler::AtomicCompareExchange<uint64_t>(dst, r4, r3, TurboAssembler::AtomicCompareExchange<uint64_t>(
result.gp(), r0); dst, expected.gp(), new_value.gp(), result.gp(), r0);
ByteReverseU64(result.gp(), result.gp()); ByteReverseU64(result.gp(), result.gp());
Pop(r3, r4); Pop(new_value.gp(), expected.gp());
} else { } else {
TurboAssembler::AtomicCompareExchange<uint64_t>( TurboAssembler::AtomicCompareExchange<uint64_t>(
dst, expected.gp(), new_value.gp(), result.gp(), r0); dst, expected.gp(), new_value.gp(), result.gp(), r0);
...@@ -754,7 +747,6 @@ void LiftoffAssembler::AtomicCompareExchange( ...@@ -754,7 +747,6 @@ void LiftoffAssembler::AtomicCompareExchange(
default: default:
UNREACHABLE(); UNREACHABLE();
} }
#endif
} }
void LiftoffAssembler::AtomicFence() { sync(); } void LiftoffAssembler::AtomicFence() { sync(); }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment