Commit d7810454 authored by baptiste.afsa@arm.com's avatar baptiste.afsa@arm.com

[turbofan] Allow encodable 64-bit constants as immediate for ARM64.

Also make sure that operands order for commutative 64-bit binary operations
are properly swapped when the constant is on the left hand-side.

R=bmeurer@chromium.org
BUG=

Review URL: https://codereview.chromium.org/551543005

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@24114 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent b62b8a8a
......@@ -32,6 +32,26 @@ std::ostream& operator<<(std::ostream& os, const MachInst<T>& mi) {
}
// Helper to build Int32Constant or Int64Constant depending on the given
// machine type.
Node* BuildConstant(InstructionSelectorTest::StreamBuilder& m, MachineType type,
int64_t value) {
switch (type) {
case kMachInt32:
return m.Int32Constant(value);
break;
case kMachInt64:
return m.Int64Constant(value);
break;
default:
UNIMPLEMENTED();
}
return NULL;
}
// ARM64 logical instructions.
static const MachInst2 kLogicalInstructions[] = {
{&RawMachineAssembler::Word32And, "Word32And", kArm64And32, kMachInt32},
......@@ -286,13 +306,13 @@ TEST_P(InstructionSelectorAddSubTest, ImmediateOnRight) {
const MachineType type = dpi.machine_type;
TRACED_FOREACH(int32_t, imm, kAddSubImmediates) {
StreamBuilder m(this, type, type);
m.Return((m.*dpi.constructor)(m.Parameter(0), m.Int32Constant(imm)));
m.Return((m.*dpi.constructor)(m.Parameter(0), BuildConstant(m, type, imm)));
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
EXPECT_EQ(dpi.arch_opcode, s[0]->arch_opcode());
ASSERT_EQ(2U, s[0]->InputCount());
EXPECT_TRUE(s[0]->InputAt(1)->IsImmediate());
EXPECT_EQ(imm, s.ToInt32(s[0]->InputAt(1)));
EXPECT_EQ(imm, s.ToInt64(s[0]->InputAt(1)));
EXPECT_EQ(1U, s[0]->OutputCount());
}
}
......@@ -304,7 +324,7 @@ TEST_P(InstructionSelectorAddSubTest, ImmediateOnLeft) {
TRACED_FOREACH(int32_t, imm, kAddSubImmediates) {
StreamBuilder m(this, type, type);
m.Return((m.*dpi.constructor)(m.Int32Constant(imm), m.Parameter(0)));
m.Return((m.*dpi.constructor)(BuildConstant(m, type, imm), m.Parameter(0)));
Stream s = m.Build();
// Add can support an immediate on the left by commuting, but Sub can't
......@@ -314,7 +334,7 @@ TEST_P(InstructionSelectorAddSubTest, ImmediateOnLeft) {
EXPECT_EQ(dpi.arch_opcode, s[0]->arch_opcode());
ASSERT_EQ(2U, s[0]->InputCount());
EXPECT_TRUE(s[0]->InputAt(1)->IsImmediate());
EXPECT_EQ(imm, s.ToInt32(s[0]->InputAt(1)));
EXPECT_EQ(imm, s.ToInt64(s[0]->InputAt(1)));
EXPECT_EQ(1U, s[0]->OutputCount());
}
}
......@@ -1004,38 +1024,35 @@ TEST_P(InstructionSelectorComparisonTest, WithParameters) {
TEST_P(InstructionSelectorComparisonTest, WithImmediate) {
const MachInst2 cmp = GetParam();
const MachineType type = cmp.machine_type;
// TODO(all): Add support for testing 64-bit immediates.
if (type == kMachInt32) {
TRACED_FOREACH(int32_t, imm, kAddSubImmediates) {
// Compare with 0 are turned into tst instruction.
if (imm == 0) continue;
StreamBuilder m(this, type, type);
m.Return((m.*cmp.constructor)(m.Parameter(0), m.Int32Constant(imm)));
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
EXPECT_EQ(cmp.arch_opcode, s[0]->arch_opcode());
ASSERT_EQ(2U, s[0]->InputCount());
ASSERT_EQ(InstructionOperand::IMMEDIATE, s[0]->InputAt(1)->kind());
EXPECT_EQ(imm, s.ToInt32(s[0]->InputAt(1)));
EXPECT_EQ(1U, s[0]->OutputCount());
EXPECT_EQ(kFlags_set, s[0]->flags_mode());
EXPECT_EQ(kEqual, s[0]->flags_condition());
}
TRACED_FOREACH(int32_t, imm, kAddSubImmediates) {
// Compare with 0 are turned into tst instruction.
if (imm == 0) continue;
StreamBuilder m(this, type, type);
m.Return((m.*cmp.constructor)(m.Int32Constant(imm), m.Parameter(0)));
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
EXPECT_EQ(cmp.arch_opcode, s[0]->arch_opcode());
ASSERT_EQ(2U, s[0]->InputCount());
ASSERT_EQ(InstructionOperand::IMMEDIATE, s[0]->InputAt(1)->kind());
EXPECT_EQ(imm, s.ToInt32(s[0]->InputAt(1)));
EXPECT_EQ(1U, s[0]->OutputCount());
EXPECT_EQ(kFlags_set, s[0]->flags_mode());
EXPECT_EQ(kEqual, s[0]->flags_condition());
}
TRACED_FOREACH(int32_t, imm, kAddSubImmediates) {
// Compare with 0 are turned into tst instruction.
if (imm == 0) continue;
StreamBuilder m(this, type, type);
m.Return((m.*cmp.constructor)(m.Parameter(0), BuildConstant(m, type, imm)));
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
EXPECT_EQ(cmp.arch_opcode, s[0]->arch_opcode());
ASSERT_EQ(2U, s[0]->InputCount());
ASSERT_EQ(InstructionOperand::IMMEDIATE, s[0]->InputAt(1)->kind());
EXPECT_EQ(imm, s.ToInt64(s[0]->InputAt(1)));
EXPECT_EQ(1U, s[0]->OutputCount());
EXPECT_EQ(kFlags_set, s[0]->flags_mode());
EXPECT_EQ(kEqual, s[0]->flags_condition());
}
TRACED_FOREACH(int32_t, imm, kAddSubImmediates) {
// Compare with 0 are turned into tst instruction.
if (imm == 0) continue;
StreamBuilder m(this, type, type);
m.Return((m.*cmp.constructor)(m.Parameter(0), BuildConstant(m, type, imm)));
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
EXPECT_EQ(cmp.arch_opcode, s[0]->arch_opcode());
ASSERT_EQ(2U, s[0]->InputCount());
ASSERT_EQ(InstructionOperand::IMMEDIATE, s[0]->InputAt(1)->kind());
EXPECT_EQ(imm, s.ToInt64(s[0]->InputAt(1)));
EXPECT_EQ(1U, s[0]->OutputCount());
EXPECT_EQ(kFlags_set, s[0]->flags_mode());
EXPECT_EQ(kEqual, s[0]->flags_condition());
}
}
......
......@@ -37,9 +37,13 @@ class Arm64OperandGenerator FINAL : public OperandGenerator {
}
bool CanBeImmediate(Node* node, ImmediateMode mode) {
Int32Matcher m(node);
if (!m.HasValue()) return false;
int64_t value = m.Value();
int64_t value;
if (node->opcode() == IrOpcode::kInt32Constant)
value = OpParameter<int32_t>(node);
else if (node->opcode() == IrOpcode::kInt64Constant)
value = OpParameter<int64_t>(node);
else
return false;
unsigned ignored;
switch (mode) {
case kLogical32Imm:
......@@ -107,11 +111,12 @@ static void VisitRRO(InstructionSelector* selector, ArchOpcode opcode,
// Shared routine for multiple binary operations.
template <typename Matcher>
static void VisitBinop(InstructionSelector* selector, Node* node,
InstructionCode opcode, ImmediateMode operand_mode,
FlagsContinuation* cont) {
Arm64OperandGenerator g(selector);
Int32BinopMatcher m(node);
Matcher m(node);
InstructionOperand* inputs[4];
size_t input_count = 0;
InstructionOperand* outputs[2];
......@@ -142,10 +147,11 @@ static void VisitBinop(InstructionSelector* selector, Node* node,
// Shared routine for multiple binary operations.
template <typename Matcher>
static void VisitBinop(InstructionSelector* selector, Node* node,
ArchOpcode opcode, ImmediateMode operand_mode) {
FlagsContinuation cont;
VisitBinop(selector, node, opcode, operand_mode, &cont);
VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont);
}
......@@ -262,22 +268,22 @@ void InstructionSelector::VisitStore(Node* node) {
void InstructionSelector::VisitWord32And(Node* node) {
VisitBinop(this, node, kArm64And32, kLogical32Imm);
VisitBinop<Int32BinopMatcher>(this, node, kArm64And32, kLogical32Imm);
}
void InstructionSelector::VisitWord64And(Node* node) {
VisitBinop(this, node, kArm64And, kLogical64Imm);
VisitBinop<Int64BinopMatcher>(this, node, kArm64And, kLogical64Imm);
}
void InstructionSelector::VisitWord32Or(Node* node) {
VisitBinop(this, node, kArm64Or32, kLogical32Imm);
VisitBinop<Int32BinopMatcher>(this, node, kArm64Or32, kLogical32Imm);
}
void InstructionSelector::VisitWord64Or(Node* node) {
VisitBinop(this, node, kArm64Or, kLogical64Imm);
VisitBinop<Int64BinopMatcher>(this, node, kArm64Or, kLogical64Imm);
}
......@@ -287,7 +293,7 @@ void InstructionSelector::VisitWord32Xor(Node* node) {
if (m.right().Is(-1)) {
Emit(kArm64Not32, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
} else {
VisitBinop(this, node, kArm64Xor32, kLogical32Imm);
VisitBinop<Int32BinopMatcher>(this, node, kArm64Xor32, kLogical32Imm);
}
}
......@@ -298,7 +304,7 @@ void InstructionSelector::VisitWord64Xor(Node* node) {
if (m.right().Is(-1)) {
Emit(kArm64Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
} else {
VisitBinop(this, node, kArm64Xor, kLogical32Imm);
VisitBinop<Int64BinopMatcher>(this, node, kArm64Xor, kLogical32Imm);
}
}
......@@ -344,12 +350,12 @@ void InstructionSelector::VisitWord64Ror(Node* node) {
void InstructionSelector::VisitInt32Add(Node* node) {
VisitBinop(this, node, kArm64Add32, kArithmeticImm);
VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32, kArithmeticImm);
}
void InstructionSelector::VisitInt64Add(Node* node) {
VisitBinop(this, node, kArm64Add, kArithmeticImm);
VisitBinop<Int64BinopMatcher>(this, node, kArm64Add, kArithmeticImm);
}
......@@ -360,7 +366,7 @@ void InstructionSelector::VisitInt32Sub(Node* node) {
Emit(kArm64Neg32, g.DefineAsRegister(node),
g.UseRegister(m.right().node()));
} else {
VisitBinop(this, node, kArm64Sub32, kArithmeticImm);
VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32, kArithmeticImm);
}
}
......@@ -371,7 +377,7 @@ void InstructionSelector::VisitInt64Sub(Node* node) {
if (m.left().Is(0)) {
Emit(kArm64Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
} else {
VisitBinop(this, node, kArm64Sub, kArithmeticImm);
VisitBinop<Int64BinopMatcher>(this, node, kArm64Sub, kArithmeticImm);
}
}
......@@ -502,13 +508,13 @@ void InstructionSelector::VisitFloat64Mod(Node* node) {
void InstructionSelector::VisitInt32AddWithOverflow(Node* node,
FlagsContinuation* cont) {
VisitBinop(this, node, kArm64Add32, kArithmeticImm, cont);
VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32, kArithmeticImm, cont);
}
void InstructionSelector::VisitInt32SubWithOverflow(Node* node,
FlagsContinuation* cont) {
VisitBinop(this, node, kArm64Sub32, kArithmeticImm, cont);
VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32, kArithmeticImm, cont);
}
......
......@@ -147,6 +147,10 @@ class InstructionSelectorTest : public TestWithContext, public TestWithZone {
return ToConstant(operand).ToInt32();
}
int64_t ToInt64(const InstructionOperand* operand) const {
return ToConstant(operand).ToInt64();
}
int ToVreg(const InstructionOperand* operand) const {
if (operand->IsConstant()) return operand->index();
EXPECT_EQ(InstructionOperand::UNALLOCATED, operand->kind());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment