Commit 66870070 authored by Ross McIlroy's avatar Ross McIlroy Committed by Commit Bot

[Compiler] Use conditional move / select to generate poison mask.

Replace bitwise arithmetic with conditional move / select instructions
on ia32, x64, Arm and Arm64. In local tests this improves --noopt Ignition
performance by between 2-5%.

BUG=chromium:798964

Change-Id: I82832e5d28469a574a575119c1a665b5c2c93bb2
Reviewed-on: https://chromium-review.googlesource.com/916561
Commit-Queue: Ross McIlroy <rmcilroy@chromium.org>
Reviewed-by: 's avatarMichael Stanton <mvstanton@chromium.org>
Cr-Commit-Position: refs/heads/master@{#51444}
parent aa9843d7
......@@ -428,8 +428,7 @@ void TurboAssembler::Cset(const Register& rd, Condition cond) {
cset(rd, cond);
}
void MacroAssembler::Csetm(const Register& rd, Condition cond) {
void TurboAssembler::Csetm(const Register& rd, Condition cond) {
DCHECK(allow_macro_instructions());
DCHECK(!rd.IsZero());
DCHECK((cond != al) && (cond != nv));
......
......@@ -595,11 +595,8 @@ void TurboAssembler::ConditionalCompareMacro(const Register& rn,
}
}
void MacroAssembler::Csel(const Register& rd,
const Register& rn,
const Operand& operand,
Condition cond) {
void TurboAssembler::Csel(const Register& rd, const Register& rn,
const Operand& operand, Condition cond) {
DCHECK(allow_macro_instructions());
DCHECK(!rd.IsZero());
DCHECK((cond != al) && (cond != nv));
......
......@@ -645,6 +645,8 @@ class TurboAssembler : public Assembler {
inline void Cmp(const Register& rn, const Operand& operand);
inline void Subs(const Register& rd, const Register& rn,
const Operand& operand);
void Csel(const Register& rd, const Register& rn, const Operand& operand,
Condition cond);
// Emits a runtime assert that the stack pointer is aligned.
void AssertSpAligned();
......@@ -1011,6 +1013,7 @@ class TurboAssembler : public Assembler {
void CanonicalizeNaN(const VRegister& reg) { CanonicalizeNaN(reg, reg); }
inline void Cset(const Register& rd, Condition cond);
inline void Csetm(const Register& rd, Condition cond);
inline void Fccmp(const VRegister& fn, const VRegister& fm, StatusFlags nzcv,
Condition cond);
inline void Csinc(const Register& rd, const Register& rn, const Register& rm,
......@@ -1295,8 +1298,6 @@ class MacroAssembler : public TurboAssembler {
inline void Ccmn(const Register& rn, const Operand& operand, StatusFlags nzcv,
Condition cond);
void Csel(const Register& rd, const Register& rn, const Operand& operand,
Condition cond);
#define DECLARE_FUNCTION(FN, OP) \
inline void FN(const Register& rs, const Register& rt, const Register& rn);
......@@ -1314,7 +1315,6 @@ class MacroAssembler : public TurboAssembler {
inline void Cinv(const Register& rd, const Register& rn, Condition cond);
inline void CzeroX(const Register& rd, Condition cond);
inline void CmovX(const Register& rd, const Register& rn, Condition cond);
inline void Csetm(const Register& rd, Condition cond);
inline void Csinv(const Register& rd, const Register& rn, const Register& rm,
Condition cond);
inline void Csneg(const Register& rd, const Register& rn, const Register& rm,
......
......@@ -624,21 +624,13 @@ void CodeGenerator::GenerateSpeculationPoison() {
UseScratchRegisterScope temps(tasm());
Register scratch = temps.Acquire();
// Calculate a mask which has all bits set in the normal case, but has all
// Set a mask which has all bits set in the normal case, but has all
// bits cleared if we are speculatively executing the wrong PC.
// difference = (current - expected) | (expected - current)
// poison = ~(difference >> (kBitsPerPointer - 1))
__ ComputeCodeStartAddress(scratch);
__ mov(kSpeculationPoisonRegister, scratch);
__ sub(kSpeculationPoisonRegister, kSpeculationPoisonRegister,
kJavaScriptCallCodeStartRegister);
__ sub(kJavaScriptCallCodeStartRegister, kJavaScriptCallCodeStartRegister,
scratch);
__ orr(kSpeculationPoisonRegister, kSpeculationPoisonRegister,
kJavaScriptCallCodeStartRegister);
__ asr(kSpeculationPoisonRegister, kSpeculationPoisonRegister,
Operand(kBitsPerPointer - 1));
__ mvn(kSpeculationPoisonRegister, Operand(kSpeculationPoisonRegister));
__ cmp(kJavaScriptCallCodeStartRegister, scratch);
__ mov(kSpeculationPoisonRegister, Operand(-1), SBit::LeaveCC, eq);
__ mov(kSpeculationPoisonRegister, Operand(0), SBit::LeaveCC, ne);
__ csdb();
}
// Assembles an instruction after register allocation, producing machine code.
......
......@@ -568,21 +568,12 @@ void CodeGenerator::GenerateSpeculationPoison() {
UseScratchRegisterScope temps(tasm());
Register scratch = temps.AcquireX();
// Calculate a mask which has all bits set in the normal case, but has all
// Set a mask which has all bits set in the normal case, but has all
// bits cleared if we are speculatively executing the wrong PC.
// difference = (current - expected) | (expected - current)
// poison = ~(difference >> (kBitsPerPointer - 1))
__ ComputeCodeStartAddress(scratch);
__ Mov(kSpeculationPoisonRegister, scratch);
__ Sub(kSpeculationPoisonRegister, kSpeculationPoisonRegister,
kJavaScriptCallCodeStartRegister);
__ Sub(kJavaScriptCallCodeStartRegister, kJavaScriptCallCodeStartRegister,
scratch);
__ Orr(kSpeculationPoisonRegister, kSpeculationPoisonRegister,
kJavaScriptCallCodeStartRegister);
__ Asr(kSpeculationPoisonRegister, kSpeculationPoisonRegister,
kBitsPerPointer - 1);
__ Mvn(kSpeculationPoisonRegister, Operand(kSpeculationPoisonRegister));
__ Cmp(kJavaScriptCallCodeStartRegister, scratch);
__ Csetm(kSpeculationPoisonRegister, eq);
__ Csdb();
}
// Assembles an instruction after register allocation, producing machine code.
......
......@@ -524,17 +524,13 @@ void CodeGenerator::BailoutIfDeoptimized() {
void CodeGenerator::GenerateSpeculationPoison() {
__ push(eax); // Push eax so we can use it as a scratch register.
// Calculate a mask which has all bits set in the normal case, but has all
// Set a mask which has all bits set in the normal case, but has all
// bits cleared if we are speculatively executing the wrong PC.
// difference = (current - expected) | (expected - current)
// poison = ~(difference >> (kBitsPerPointer - 1))
__ ComputeCodeStartAddress(eax);
__ mov(kSpeculationPoisonRegister, eax);
__ sub(kSpeculationPoisonRegister, kJavaScriptCallCodeStartRegister);
__ sub(kJavaScriptCallCodeStartRegister, eax);
__ or_(kSpeculationPoisonRegister, kJavaScriptCallCodeStartRegister);
__ sar(kSpeculationPoisonRegister, kBitsPerPointer - 1);
__ not_(kSpeculationPoisonRegister);
__ mov(kSpeculationPoisonRegister, Immediate(0));
__ cmp(kJavaScriptCallCodeStartRegister, eax);
__ mov(eax, Immediate(-1));
__ cmov(equal, kSpeculationPoisonRegister, eax);
__ pop(eax); // Restore eax.
}
......
......@@ -607,17 +607,13 @@ void CodeGenerator::BailoutIfDeoptimized() {
}
void CodeGenerator::GenerateSpeculationPoison() {
// Calculate a mask which has all bits set in the normal case, but has all
// Set a mask which has all bits set in the normal case, but has all
// bits cleared if we are speculatively executing the wrong PC.
// difference = (current - expected) | (expected - current)
// poison = ~(difference >> (kBitsPerPointer - 1))
__ ComputeCodeStartAddress(rbx);
__ movp(kSpeculationPoisonRegister, rbx);
__ subq(kSpeculationPoisonRegister, kJavaScriptCallCodeStartRegister);
__ subq(kJavaScriptCallCodeStartRegister, rbx);
__ orq(kSpeculationPoisonRegister, kJavaScriptCallCodeStartRegister);
__ sarq(kSpeculationPoisonRegister, Immediate(kBitsPerPointer - 1));
__ notq(kSpeculationPoisonRegister);
__ movp(kSpeculationPoisonRegister, Immediate(0));
__ cmpp(kJavaScriptCallCodeStartRegister, rbx);
__ movp(rbx, Immediate(-1));
__ cmovq(equal, kSpeculationPoisonRegister, rbx);
}
// Assembles an instruction after register allocation, producing machine code.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment