Commit 928900aa authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

[Assembler] Cleanup AVX instructions

Several cleanups:
- unify identical macro defintions
- use existing macros instead of duplicating the code
- add AVX versions for xorps and xorpd (to be used by Liftoff)

R=mstarzinger@chromium.org

Bug: v8:7310, v8:6600
Change-Id: Id37c58cf6187533ad9d4a0ca8bc5d49f9de66785
Reviewed-on: https://chromium-review.googlesource.com/937124Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#51597}
parent d7e59efa
......@@ -247,6 +247,8 @@ class CpuFeatureScope BASE_EMBEDDED {
#else
CpuFeatureScope(AssemblerBase* assembler, CpuFeature f,
CheckPolicy check = kCheckSupported) {}
// Define a destructor to avoid unused variable warnings.
~CpuFeatureScope() {}
#endif
};
......
......@@ -2262,8 +2262,7 @@ void Assembler::divsd(XMMRegister dst, Operand src) {
emit_sse_operand(dst, src);
}
void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
void Assembler::xorpd(XMMRegister dst, Operand src) {
EnsureSpace ensure_space(this);
EMIT(0x66);
EMIT(0x0F);
......
......@@ -1059,7 +1059,8 @@ class Assembler : public AssemblerBase {
void mulsd(XMMRegister dst, Operand src);
void divsd(XMMRegister dst, XMMRegister src) { divsd(dst, Operand(src)); }
void divsd(XMMRegister dst, Operand src);
void xorpd(XMMRegister dst, XMMRegister src);
void xorpd(XMMRegister dst, XMMRegister src) { xorpd(dst, Operand(src)); }
void xorpd(XMMRegister dst, Operand src);
void sqrtsd(XMMRegister dst, XMMRegister src) { sqrtsd(dst, Operand(src)); }
void sqrtsd(XMMRegister dst, Operand src);
......@@ -1582,9 +1583,7 @@ class Assembler : public AssemblerBase {
}
PACKED_OP_LIST(AVX_PACKED_OP_DECLARE);
void vps(byte op, XMMRegister dst, XMMRegister src1, XMMRegister src2);
void vps(byte op, XMMRegister dst, XMMRegister src1, Operand src2);
void vpd(byte op, XMMRegister dst, XMMRegister src1, XMMRegister src2);
void vpd(byte op, XMMRegister dst, XMMRegister src1, Operand src2);
void vcmpps(XMMRegister dst, XMMRegister src1, Operand src2, int8_t cmp);
......
......@@ -251,6 +251,8 @@ class TurboAssembler : public Assembler {
AVX_OP3_XO(Psubw, psubw)
AVX_OP3_XO(Psubd, psubd)
AVX_OP3_XO(Pxor, pxor)
AVX_OP3_XO(Xorps, xorps)
AVX_OP3_XO(Xorpd, xorpd)
#undef AVX_OP3_XO
#undef AVX_OP3_WITH_TYPE
......
......@@ -1158,10 +1158,10 @@ void TurboAssembler::Move(XMMRegister dst, uint64_t src) {
Pcmpeqd(dst, dst);
} else if (pop + ntz == 64) {
Pcmpeqd(dst, dst);
Psllq(dst, ntz);
Psllq(dst, static_cast<byte>(ntz));
} else if (pop + nlz == 64) {
Pcmpeqd(dst, dst);
Psrlq(dst, nlz);
Psrlq(dst, static_cast<byte>(nlz));
} else {
uint32_t lower = static_cast<uint32_t>(src);
uint32_t upper = static_cast<uint32_t>(src >> 32);
......@@ -1175,260 +1175,6 @@ void TurboAssembler::Move(XMMRegister dst, uint64_t src) {
}
}
void TurboAssembler::Movaps(XMMRegister dst, XMMRegister src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovaps(dst, src);
} else {
movaps(dst, src);
}
}
void TurboAssembler::Movups(XMMRegister dst, XMMRegister src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovups(dst, src);
} else {
movups(dst, src);
}
}
void TurboAssembler::Movups(XMMRegister dst, Operand src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovups(dst, src);
} else {
movups(dst, src);
}
}
void TurboAssembler::Movups(Operand dst, XMMRegister src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovups(dst, src);
} else {
movups(dst, src);
}
}
void TurboAssembler::Movapd(XMMRegister dst, XMMRegister src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovapd(dst, src);
} else {
movapd(dst, src);
}
}
void TurboAssembler::Movsd(XMMRegister dst, XMMRegister src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovsd(dst, dst, src);
} else {
movsd(dst, src);
}
}
void TurboAssembler::Movsd(XMMRegister dst, Operand src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovsd(dst, src);
} else {
movsd(dst, src);
}
}
void TurboAssembler::Movsd(Operand dst, XMMRegister src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovsd(dst, src);
} else {
movsd(dst, src);
}
}
void TurboAssembler::Movss(XMMRegister dst, XMMRegister src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovss(dst, dst, src);
} else {
movss(dst, src);
}
}
void TurboAssembler::Movss(XMMRegister dst, Operand src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovss(dst, src);
} else {
movss(dst, src);
}
}
void TurboAssembler::Movss(Operand dst, XMMRegister src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovss(dst, src);
} else {
movss(dst, src);
}
}
void TurboAssembler::Movd(XMMRegister dst, Register src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovd(dst, src);
} else {
movd(dst, src);
}
}
void TurboAssembler::Movd(XMMRegister dst, Operand src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovd(dst, src);
} else {
movd(dst, src);
}
}
void TurboAssembler::Movd(Register dst, XMMRegister src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovd(dst, src);
} else {
movd(dst, src);
}
}
void TurboAssembler::Movq(XMMRegister dst, Register src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovq(dst, src);
} else {
movq(dst, src);
}
}
void TurboAssembler::Movq(Register dst, XMMRegister src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovq(dst, src);
} else {
movq(dst, src);
}
}
void TurboAssembler::Movmskps(Register dst, XMMRegister src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovmskps(dst, src);
} else {
movmskps(dst, src);
}
}
void TurboAssembler::Movmskpd(Register dst, XMMRegister src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vmovmskpd(dst, src);
} else {
movmskpd(dst, src);
}
}
void TurboAssembler::Xorps(XMMRegister dst, XMMRegister src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vxorps(dst, dst, src);
} else {
xorps(dst, src);
}
}
void TurboAssembler::Xorps(XMMRegister dst, Operand src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vxorps(dst, dst, src);
} else {
xorps(dst, src);
}
}
void TurboAssembler::Roundss(XMMRegister dst, XMMRegister src,
RoundingMode mode) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vroundss(dst, dst, src, mode);
} else {
roundss(dst, src, mode);
}
}
void TurboAssembler::Roundsd(XMMRegister dst, XMMRegister src,
RoundingMode mode) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vroundsd(dst, dst, src, mode);
} else {
roundsd(dst, src, mode);
}
}
void TurboAssembler::Sqrtsd(XMMRegister dst, XMMRegister src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vsqrtsd(dst, dst, src);
} else {
sqrtsd(dst, src);
}
}
void TurboAssembler::Sqrtsd(XMMRegister dst, Operand src) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vsqrtsd(dst, dst, src);
} else {
sqrtsd(dst, src);
}
}
void TurboAssembler::Ucomiss(XMMRegister src1, XMMRegister src2) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vucomiss(src1, src2);
} else {
ucomiss(src1, src2);
}
}
void TurboAssembler::Ucomiss(XMMRegister src1, Operand src2) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vucomiss(src1, src2);
} else {
ucomiss(src1, src2);
}
}
void TurboAssembler::Ucomisd(XMMRegister src1, XMMRegister src2) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vucomisd(src1, src2);
} else {
ucomisd(src1, src2);
}
}
void TurboAssembler::Ucomisd(XMMRegister src1, Operand src2) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(this, AVX);
vucomisd(src1, src2);
} else {
ucomisd(src1, src2);
}
}
// ----------------------------------------------------------------------------
void MacroAssembler::Absps(XMMRegister dst) {
......
......@@ -136,50 +136,83 @@ class TurboAssembler : public Assembler {
return code_object_;
}
#define AVX_OP2_WITH_TYPE(macro_name, name, src_type) \
void macro_name(XMMRegister dst, src_type src) { \
if (CpuFeatures::IsSupported(AVX)) { \
CpuFeatureScope scope(this, AVX); \
v##name(dst, dst, src); \
} else { \
name(dst, src); \
} \
template <typename Dst, typename... Args>
struct AvxHelper {
Assembler* assm;
// Call an method where the AVX version expects the dst argument to be
// duplicated.
template <void (Assembler::*avx)(Dst, Dst, Args...),
void (Assembler::*no_avx)(Dst, Args...)>
void emit(Dst dst, Args... args) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(assm, AVX);
(assm->*avx)(dst, dst, args...);
} else {
(assm->*no_avx)(dst, args...);
}
}
// Call an method where the AVX version expects no duplicated dst argument.
template <void (Assembler::*avx)(Dst, Args...),
void (Assembler::*no_avx)(Dst, Args...)>
void emit(Dst dst, Args... args) {
if (CpuFeatures::IsSupported(AVX)) {
CpuFeatureScope scope(assm, AVX);
(assm->*avx)(dst, args...);
} else {
(assm->*no_avx)(dst, args...);
}
}
};
#define AVX_OP(macro_name, name) \
template <typename Dst, typename... Args> \
void macro_name(Dst dst, Args... args) { \
AvxHelper<Dst, Args...>{this} \
.template emit<&Assembler::v##name, &Assembler::name>(dst, args...); \
}
#define AVX_OP2_X(macro_name, name) \
AVX_OP2_WITH_TYPE(macro_name, name, XMMRegister)
#define AVX_OP2_O(macro_name, name) AVX_OP2_WITH_TYPE(macro_name, name, Operand)
#define AVX_OP2_XO(macro_name, name) \
AVX_OP2_X(macro_name, name) \
AVX_OP2_O(macro_name, name)
AVX_OP2_XO(Subsd, subsd)
AVX_OP2_XO(Divss, divss)
AVX_OP2_XO(Divsd, divsd)
AVX_OP2_XO(Xorpd, xorpd)
AVX_OP2_X(Pcmpeqd, pcmpeqd)
AVX_OP2_WITH_TYPE(Psllq, psllq, byte)
AVX_OP2_WITH_TYPE(Psrlq, psrlq, byte)
#undef AVX_OP2_O
#undef AVX_OP2_X
#undef AVX_OP2_XO
#undef AVX_OP2_WITH_TYPE
void Xorps(XMMRegister dst, XMMRegister src);
void Xorps(XMMRegister dst, Operand src);
void Movd(XMMRegister dst, Register src);
void Movd(XMMRegister dst, Operand src);
void Movd(Register dst, XMMRegister src);
void Movq(XMMRegister dst, Register src);
void Movq(Register dst, XMMRegister src);
void Movsd(XMMRegister dst, XMMRegister src);
void Movsd(XMMRegister dst, Operand src);
void Movsd(Operand dst, XMMRegister src);
void Movss(XMMRegister dst, XMMRegister src);
void Movss(XMMRegister dst, Operand src);
void Movss(Operand dst, XMMRegister src);
AVX_OP(Subsd, subsd)
AVX_OP(Divss, divss)
AVX_OP(Divsd, divsd)
AVX_OP(Xorps, xorps)
AVX_OP(Xorpd, xorpd)
AVX_OP(Movd, movd)
AVX_OP(Movq, movq)
AVX_OP(Movaps, movaps)
AVX_OP(Movapd, movapd)
AVX_OP(Movups, movups)
AVX_OP(Movmskps, movmskps)
AVX_OP(Movmskpd, movmskpd)
AVX_OP(Movss, movss)
AVX_OP(Movsd, movsd)
AVX_OP(Pcmpeqd, pcmpeqd)
AVX_OP(Psllq, psllq)
AVX_OP(Psrlq, psrlq)
AVX_OP(Addsd, addsd)
AVX_OP(Mulsd, mulsd)
AVX_OP(Andps, andps)
AVX_OP(Andpd, andpd)
AVX_OP(Orpd, orpd)
AVX_OP(Cmpeqps, cmpeqps)
AVX_OP(Cmpltps, cmpltps)
AVX_OP(Cmpleps, cmpleps)
AVX_OP(Cmpneqps, cmpneqps)
AVX_OP(Cmpnltps, cmpnltps)
AVX_OP(Cmpnleps, cmpnleps)
AVX_OP(Cmpeqpd, cmpeqpd)
AVX_OP(Cmpltpd, cmpltpd)
AVX_OP(Cmplepd, cmplepd)
AVX_OP(Cmpneqpd, cmpneqpd)
AVX_OP(Cmpnltpd, cmpnltpd)
AVX_OP(Cmpnlepd, cmpnlepd)
AVX_OP(Roundss, roundss)
AVX_OP(Roundsd, roundsd)
AVX_OP(Sqrtsd, sqrtsd)
AVX_OP(Ucomiss, ucomiss)
AVX_OP(Ucomisd, ucomisd)
#undef AVX_OP
void PushReturnAddressFrom(Register src) { pushq(src); }
void PopReturnAddressTo(Register dst) { popq(dst); }
......@@ -201,14 +234,6 @@ class TurboAssembler : public Assembler {
movp(destination, kScratchRegister);
}
void Movups(XMMRegister dst, XMMRegister src);
void Movups(XMMRegister dst, Operand src);
void Movups(Operand dst, XMMRegister src);
void Movapd(XMMRegister dst, XMMRegister src);
void Movaps(XMMRegister dst, XMMRegister src);
void Movmskpd(Register dst, XMMRegister src);
void Movmskps(Register dst, XMMRegister src);
void Push(Register src);
void Push(Operand src);
void Push(Immediate value);
......@@ -267,17 +292,6 @@ class TurboAssembler : public Assembler {
void Cvtlsi2sd(XMMRegister dst, Register src);
void Cvtlsi2sd(XMMRegister dst, Operand src);
void Roundss(XMMRegister dst, XMMRegister src, RoundingMode mode);
void Roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode);
void Sqrtsd(XMMRegister dst, XMMRegister src);
void Sqrtsd(XMMRegister dst, Operand src);
void Ucomiss(XMMRegister src1, XMMRegister src2);
void Ucomiss(XMMRegister src1, Operand src2);
void Ucomisd(XMMRegister src1, XMMRegister src2);
void Ucomisd(XMMRegister src1, Operand src2);
void Lzcntq(Register dst, Register src);
void Lzcntq(Register dst, Operand src);
void Lzcntl(Register dst, Register src);
......@@ -738,45 +752,6 @@ class MacroAssembler : public TurboAssembler {
void Pop(Operand dst);
void PopQuad(Operand dst);
#define AVX_OP2_WITH_TYPE(macro_name, name, src_type) \
void macro_name(XMMRegister dst, src_type src) { \
if (CpuFeatures::IsSupported(AVX)) { \
CpuFeatureScope scope(this, AVX); \
v##name(dst, dst, src); \
} else { \
name(dst, src); \
} \
}
#define AVX_OP2_X(macro_name, name) \
AVX_OP2_WITH_TYPE(macro_name, name, XMMRegister)
#define AVX_OP2_O(macro_name, name) AVX_OP2_WITH_TYPE(macro_name, name, Operand)
#define AVX_OP2_XO(macro_name, name) \
AVX_OP2_X(macro_name, name) \
AVX_OP2_O(macro_name, name)
AVX_OP2_XO(Addsd, addsd)
AVX_OP2_XO(Mulsd, mulsd)
AVX_OP2_XO(Andps, andps)
AVX_OP2_XO(Andpd, andpd)
AVX_OP2_XO(Orpd, orpd)
AVX_OP2_XO(Cmpeqps, cmpeqps)
AVX_OP2_XO(Cmpltps, cmpltps)
AVX_OP2_XO(Cmpleps, cmpleps)
AVX_OP2_XO(Cmpneqps, cmpneqps)
AVX_OP2_XO(Cmpnltps, cmpnltps)
AVX_OP2_XO(Cmpnleps, cmpnleps)
AVX_OP2_XO(Cmpeqpd, cmpeqpd)
AVX_OP2_XO(Cmpltpd, cmpltpd)
AVX_OP2_XO(Cmplepd, cmplepd)
AVX_OP2_XO(Cmpneqpd, cmpneqpd)
AVX_OP2_XO(Cmpnltpd, cmpnltpd)
AVX_OP2_XO(Cmpnlepd, cmpnlepd)
#undef AVX_OP2_O
#undef AVX_OP2_X
#undef AVX_OP2_XO
#undef AVX_OP2_WITH_TYPE
// ---------------------------------------------------------------------------
// SIMD macros.
void Absps(XMMRegister dst);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment