Commit 87e9d839 authored by titzer's avatar titzer Committed by Commit bot

[turbofan] Clean up and factor out branch generation logic.

R=dcarney@chromium.org
BUG=

Review URL: https://codereview.chromium.org/745633002

Cr-Commit-Position: refs/heads/master@{#25446}
parent bf11bf47
......@@ -193,7 +193,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break;
}
case kArchJmp:
__ b(GetLabel(i.InputRpo(0)));
AssembleArchJump(i.InputRpo(0));
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
case kArchNop:
......@@ -539,21 +539,11 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
// Assembles branches after an instruction.
void CodeGenerator::AssembleArchBranch(Instruction* instr,
FlagsCondition condition) {
void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
ArmOperandConverter i(this, instr);
Label done;
// Emit a branch. The true and false targets are always the last two inputs
// to the instruction.
BasicBlock::RpoNumber tblock =
i.InputRpo(static_cast<int>(instr->InputCount()) - 2);
BasicBlock::RpoNumber fblock =
i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
bool fallthru = IsNextInAssemblyOrder(fblock);
Label* tlabel = GetLabel(tblock);
Label* flabel = fallthru ? &done : GetLabel(fblock);
switch (condition) {
Label* tlabel = branch->true_label;
Label* flabel = branch->false_label;
switch (branch->condition) {
case kUnorderedEqual:
__ b(vs, flabel);
// Fall through.
......@@ -609,8 +599,12 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
__ b(vc, tlabel);
break;
}
if (!fallthru) __ b(flabel); // no fallthru to flabel.
__ bind(&done);
if (!branch->fallthru) __ b(flabel); // no fallthru to flabel.
}
void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) {
if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target));
}
......
......@@ -1170,10 +1170,6 @@ void VisitWordCompareZero(InstructionSelector* selector, Node* user,
void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
BasicBlock* fbranch) {
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
if (IsNextInAssemblyOrder(tbranch)) { // We can fallthru to the true block.
cont.Negate();
cont.SwapBlocks();
}
VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
}
......
......@@ -214,7 +214,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break;
}
case kArchJmp:
__ B(GetLabel(i.InputRpo(0)));
AssembleArchJump(i.InputRpo(0));
break;
case kArchNop:
// don't emit code for nops.
......@@ -612,21 +612,11 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
// Assemble branches after this instruction.
void CodeGenerator::AssembleArchBranch(Instruction* instr,
FlagsCondition condition) {
void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
Arm64OperandConverter i(this, instr);
Label done;
// Emit a branch. The true and false targets are always the last two inputs
// to the instruction.
BasicBlock::RpoNumber tblock =
i.InputRpo(static_cast<int>(instr->InputCount()) - 2);
BasicBlock::RpoNumber fblock =
i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
bool fallthru = IsNextInAssemblyOrder(fblock);
Label* tlabel = GetLabel(tblock);
Label* flabel = fallthru ? &done : GetLabel(fblock);
switch (condition) {
Label* tlabel = branch->true_label;
Label* flabel = branch->false_label;
switch (branch->condition) {
case kUnorderedEqual:
__ B(vs, flabel);
// Fall through.
......@@ -682,8 +672,12 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
__ B(vc, tlabel);
break;
}
if (!fallthru) __ B(flabel); // no fallthru to flabel.
__ Bind(&done);
if (!branch->fallthru) __ B(flabel); // no fallthru to flabel.
}
void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) {
if (!IsNextInAssemblyOrder(target)) __ B(GetLabel(target));
}
......
......@@ -1120,12 +1120,6 @@ void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
// If we can fall through to the true block, invert the branch.
if (IsNextInAssemblyOrder(tbranch)) {
cont.Negate();
cont.SwapBlocks();
}
// Try to combine with comparisons against 0 by simply inverting the branch.
while (CanCover(user, value)) {
if (value->opcode() == IrOpcode::kWord32Equal) {
......
......@@ -139,18 +139,39 @@ void CodeGenerator::AssembleInstruction(Instruction* instr) {
// Assemble architecture-specific code for the instruction.
AssembleArchInstruction(instr);
// Assemble branches or boolean materializations after this instruction.
FlagsMode mode = FlagsModeField::decode(instr->opcode());
FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
switch (mode) {
case kFlags_none:
if (mode == kFlags_branch) {
// Assemble a branch after this instruction.
InstructionOperandConverter i(this, instr);
BasicBlock::RpoNumber true_rpo =
i.InputRpo(static_cast<int>(instr->InputCount()) - 2);
BasicBlock::RpoNumber false_rpo =
i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
if (true_rpo == false_rpo) {
// redundant branch.
if (!IsNextInAssemblyOrder(true_rpo)) {
AssembleArchJump(true_rpo);
}
return;
case kFlags_set:
return AssembleArchBoolean(instr, condition);
case kFlags_branch:
return AssembleArchBranch(instr, condition);
}
UNREACHABLE();
if (IsNextInAssemblyOrder(true_rpo)) {
// true block is next, can fall through if condition negated.
std::swap(true_rpo, false_rpo);
condition = NegateFlagsCondition(condition);
}
BranchInfo branch;
branch.condition = condition;
branch.true_label = GetLabel(true_rpo);
branch.false_label = GetLabel(false_rpo);
branch.fallthru = IsNextInAssemblyOrder(false_rpo);
// Assemble architecture-specific branch.
AssembleArchBranch(instr, &branch);
} else if (mode == kFlags_set) {
// Assemble a boolean materialization after this instruction.
AssembleArchBoolean(instr, condition);
}
}
}
......
......@@ -19,6 +19,14 @@ namespace compiler {
class Linkage;
struct BranchInfo {
FlagsCondition condition;
Label* true_label;
Label* false_label;
bool fallthru;
};
// Generates native code for a sequence of instructions.
class CodeGenerator FINAL : public GapResolver::Assembler {
public:
......@@ -60,7 +68,8 @@ class CodeGenerator FINAL : public GapResolver::Assembler {
// ===========================================================================
void AssembleArchInstruction(Instruction* instr);
void AssembleArchBranch(Instruction* instr, FlagsCondition condition);
void AssembleArchJump(BasicBlock::RpoNumber target);
void AssembleArchBranch(Instruction* instr, BranchInfo* branch);
void AssembleArchBoolean(Instruction* instr, FlagsCondition condition);
void AssembleDeoptimizerCall(int deoptimization_id);
......
......@@ -195,7 +195,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break;
}
case kArchJmp:
__ jmp(GetLabel(i.InputRpo(0)));
AssembleArchJump(i.InputRpo(0));
break;
case kArchNop:
// don't emit code for nops.
......@@ -485,23 +485,14 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
}
// Assembles branches after an instruction.
void CodeGenerator::AssembleArchBranch(Instruction* instr,
FlagsCondition condition) {
// Assembles a branch after an instruction.
void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
IA32OperandConverter i(this, instr);
Label done;
// Emit a branch. The true and false targets are always the last two inputs
// to the instruction.
BasicBlock::RpoNumber tblock =
i.InputRpo(static_cast<int>(instr->InputCount()) - 2);
BasicBlock::RpoNumber fblock =
i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
bool fallthru = IsNextInAssemblyOrder(fblock);
Label* tlabel = GetLabel(tblock);
Label* flabel = fallthru ? &done : GetLabel(fblock);
Label::Distance flabel_distance = fallthru ? Label::kNear : Label::kFar;
switch (condition) {
Label::Distance flabel_distance =
branch->fallthru ? Label::kNear : Label::kFar;
Label* tlabel = branch->true_label;
Label* flabel = branch->false_label;
switch (branch->condition) {
case kUnorderedEqual:
__ j(parity_even, flabel, flabel_distance);
// Fall through.
......@@ -557,8 +548,13 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
__ j(no_overflow, tlabel);
break;
}
if (!fallthru) __ jmp(flabel, flabel_distance); // no fallthru to flabel.
__ bind(&done);
// Add a jump if not falling through to the next block.
if (!branch->fallthru) __ jmp(flabel);
}
void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) {
if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target));
}
......
......@@ -1003,10 +1003,6 @@ void VisitWordCompareZero(InstructionSelector* selector, Node* user,
void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
BasicBlock* fbranch) {
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
if (IsNextInAssemblyOrder(tbranch)) { // We can fallthru to the true block.
cont.Negate();
cont.SwapBlocks();
}
VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
}
......
......@@ -98,6 +98,10 @@ enum FlagsCondition {
kNotOverflow
};
inline FlagsCondition NegateFlagsCondition(FlagsCondition condition) {
return static_cast<FlagsCondition>(condition ^ 1);
}
std::ostream& operator<<(std::ostream& os, const FlagsCondition& fc);
// The InstructionCode is an opaque, target-specific integer that encodes
......
......@@ -257,7 +257,7 @@ class FlagsContinuation FINAL {
void Negate() {
DCHECK(!IsNone());
condition_ = static_cast<FlagsCondition>(condition_ ^ 1);
condition_ = NegateFlagsCondition(condition_);
}
void Commute() {
......@@ -317,8 +317,6 @@ class FlagsContinuation FINAL {
if (negate) Negate();
}
void SwapBlocks() { std::swap(true_block_, false_block_); }
// Encodes this flags continuation into the given opcode.
InstructionCode Encode(InstructionCode opcode) {
opcode |= FlagsModeField::encode(mode_);
......
......@@ -966,14 +966,9 @@ void InstructionSelector::VisitConstant(Node* node) {
void InstructionSelector::VisitGoto(BasicBlock* target) {
if (IsNextInAssemblyOrder(target)) {
// fall through to the next block.
Emit(kArchNop, NULL)->MarkAsControl();
} else {
// jump to the next block.
OperandGenerator g(this);
Emit(kArchJmp, NULL, g.Label(target))->MarkAsControl();
}
}
......
......@@ -154,7 +154,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break;
}
case kArchJmp:
__ Branch(GetLabel(i.InputRpo(0)));
AssembleArchJump(i.InputRpo(0));
break;
case kArchNop:
// don't emit code for nops.
......@@ -394,30 +394,21 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
void CodeGenerator::AssembleArchBranch(Instruction* instr,
FlagsCondition condition) {
MipsOperandConverter i(this, instr);
Label done;
// Emit a branch. The true and false targets are always the last two inputs
// to the instruction.
BasicBlock::RpoNumber tblock =
i.InputRpo(static_cast<int>(instr->InputCount()) - 2);
BasicBlock::RpoNumber fblock =
i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
bool fallthru = IsNextInAssemblyOrder(fblock);
Label* tlabel = GetLabel(tblock);
Label* flabel = fallthru ? &done : GetLabel(fblock);
Label* tlabel = branch->true_label;
Label* flabel = branch->false_label;
Condition cc = kNoCondition;
// MIPS does not have condition code flags, so compare and branch are
// implemented differently than on the other arch's. The compare operations
// emit mips psuedo-instructions, which are handled here by branch
// emit mips pseudo-instructions, which are handled here by branch
// instructions that do the actual comparison. Essential that the input
// registers to compare psuedo-op are not modified before this branch op, as
// registers to compare pseudo-op are not modified before this branch op, as
// they are tested here.
// TODO(plind): Add CHECK() to ensure that test/cmp and this branch were
// not separated by other instructions.
if (instr->arch_opcode() == kMipsTst) {
switch (condition) {
switch (branch->condition) {
case kNotEqual:
cc = ne;
break;
......@@ -434,7 +425,7 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
} else if (instr->arch_opcode() == kMipsAddOvf ||
instr->arch_opcode() == kMipsSubOvf) {
// kMipsAddOvf, SubOvf emit negative result to 'kCompareReg' on overflow.
switch (condition) {
switch (branch->condition) {
case kOverflow:
cc = lt;
break;
......@@ -442,13 +433,13 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
cc = ge;
break;
default:
UNSUPPORTED_COND(kMipsAddOvf, condition);
UNSUPPORTED_COND(kMipsAddOvf, branch->condition);
break;
}
__ Branch(tlabel, cc, kCompareReg, Operand(zero_reg));
} else if (instr->arch_opcode() == kMipsCmp) {
switch (condition) {
switch (branch->condition) {
case kEqual:
cc = eq;
break;
......@@ -480,19 +471,18 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
cc = hi;
break;
default:
UNSUPPORTED_COND(kMipsCmp, condition);
UNSUPPORTED_COND(kMipsCmp, branch->condition);
break;
}
__ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1));
if (!fallthru) __ Branch(flabel); // no fallthru to flabel.
__ bind(&done);
if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel.
} else if (instr->arch_opcode() == kMipsCmpD) {
// TODO(dusmil) optimize unordered checks to use less instructions
// TODO(dusmil) optimize unordered checks to use fewer instructions
// even if we have to unfold BranchF macro.
Label* nan = flabel;
switch (condition) {
switch (branch->condition) {
case kUnorderedEqual:
cc = eq;
break;
......@@ -515,14 +505,13 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
nan = tlabel;
break;
default:
UNSUPPORTED_COND(kMipsCmpD, condition);
UNSUPPORTED_COND(kMipsCmpD, branch->condition);
break;
}
__ BranchF(tlabel, nan, cc, i.InputDoubleRegister(0),
i.InputDoubleRegister(1));
if (!fallthru) __ Branch(flabel); // no fallthru to flabel.
__ bind(&done);
if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel.
} else {
PrintF("AssembleArchBranch Unimplemented arch_opcode: %d\n",
......@@ -532,6 +521,11 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
}
void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) {
if (!IsNextInAssemblyOrder(target)) __ Branch(GetLabel(target));
}
// Assembles boolean materializations after an instruction.
void CodeGenerator::AssembleArchBoolean(Instruction* instr,
FlagsCondition condition) {
......
......@@ -634,11 +634,6 @@ void VisitWordCompareZero(InstructionSelector* selector, Node* user,
void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
BasicBlock* fbranch) {
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
// If we can fall through to the true block, invert the branch.
if (IsNextInAssemblyOrder(tbranch)) {
cont.Negate();
cont.SwapBlocks();
}
VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
}
......
......@@ -155,7 +155,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break;
}
case kArchJmp:
__ Branch(GetLabel(i.InputRpo(0)));
AssembleArchJump(i.InputRpo(0));
break;
case kArchNop:
// don't emit code for nops.
......@@ -480,17 +480,8 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
void CodeGenerator::AssembleArchBranch(Instruction* instr,
FlagsCondition condition) {
MipsOperandConverter i(this, instr);
Label done;
// Emit a branch. The true and false targets are always the last two inputs
// to the instruction.
BasicBlock::RpoNumber tblock =
i.InputRpo(static_cast<int>(instr->InputCount()) - 2);
BasicBlock::RpoNumber fblock =
i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
bool fallthru = IsNextInAssemblyOrder(fblock);
Label* tlabel = GetLabel(tblock);
Label* flabel = fallthru ? &done : GetLabel(fblock);
Label* tlabel = branch->true_label;
Label* flabel = branch->false_label;
Condition cc = kNoCondition;
// MIPS does not have condition code flags, so compare and branch are
......@@ -503,7 +494,7 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
// not separated by other instructions.
if (instr->arch_opcode() == kMips64Tst) {
switch (condition) {
switch (branch->condition) {
case kNotEqual:
cc = ne;
break;
......@@ -511,13 +502,13 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
cc = eq;
break;
default:
UNSUPPORTED_COND(kMips64Tst, condition);
UNSUPPORTED_COND(kMips64Tst, branch->condition);
break;
}
__ And(at, i.InputRegister(0), i.InputOperand(1));
__ Branch(tlabel, cc, at, Operand(zero_reg));
} else if (instr->arch_opcode() == kMips64Tst32) {
switch (condition) {
switch (branch->condition) {
case kNotEqual:
cc = ne;
break;
......@@ -525,7 +516,7 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
cc = eq;
break;
default:
UNSUPPORTED_COND(kMips64Tst32, condition);
UNSUPPORTED_COND(kMips64Tst32, branch->condition);
break;
}
// Zero-extend registers on MIPS64 only 64-bit operand
......@@ -538,7 +529,7 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
__ Branch(tlabel, cc, at, Operand(zero_reg));
} else if (instr->arch_opcode() == kMips64Dadd ||
instr->arch_opcode() == kMips64Dsub) {
switch (condition) {
switch (branch->condition) {
case kOverflow:
cc = ne;
break;
......@@ -546,7 +537,7 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
cc = eq;
break;
default:
UNSUPPORTED_COND(kMips64Dadd, condition);
UNSUPPORTED_COND(kMips64Dadd, branch->condition);
break;
}
......@@ -554,7 +545,7 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
__ sra(at, i.OutputRegister(), 31);
__ Branch(tlabel, cc, at, Operand(kScratchReg));
} else if (instr->arch_opcode() == kMips64Cmp) {
switch (condition) {
switch (branch->condition) {
case kEqual:
cc = eq;
break;
......@@ -586,16 +577,16 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
cc = hi;
break;
default:
UNSUPPORTED_COND(kMips64Cmp, condition);
UNSUPPORTED_COND(kMips64Cmp, branch->condition);
break;
}
__ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1));
if (!fallthru) __ Branch(flabel); // no fallthru to flabel.
if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel.
__ bind(&done);
} else if (instr->arch_opcode() == kMips64Cmp32) {
switch (condition) {
switch (branch->condition) {
case kEqual:
cc = eq;
break;
......@@ -627,11 +618,11 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
cc = hi;
break;
default:
UNSUPPORTED_COND(kMips64Cmp32, condition);
UNSUPPORTED_COND(kMips64Cmp32, branch->condition);
break;
}
switch (condition) {
switch (branch->condition) {
case kEqual:
case kNotEqual:
case kSignedLessThan:
......@@ -657,18 +648,18 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
}
break;
default:
UNSUPPORTED_COND(kMips64Cmp, condition);
UNSUPPORTED_COND(kMips64Cmp, branch->condition);
break;
}
__ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1));
if (!fallthru) __ Branch(flabel); // no fallthru to flabel.
if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel.
__ bind(&done);
} else if (instr->arch_opcode() == kMips64CmpD) {
// TODO(dusmil) optimize unordered checks to use less instructions
// even if we have to unfold BranchF macro.
Label* nan = flabel;
switch (condition) {
switch (branch->condition) {
case kUnorderedEqual:
cc = eq;
break;
......@@ -691,13 +682,13 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
nan = tlabel;
break;
default:
UNSUPPORTED_COND(kMips64CmpD, condition);
UNSUPPORTED_COND(kMips64CmpD, branch->condition);
break;
}
__ BranchF(tlabel, nan, cc, i.InputDoubleRegister(0),
i.InputDoubleRegister(1));
if (!fallthru) __ Branch(flabel); // no fallthru to flabel.
if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel.
__ bind(&done);
} else {
......@@ -708,6 +699,11 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
}
void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) {
if (!IsNextInAssemblyOrder(target)) __ Branch(GetLabel(target));
}
// Assembles boolean materializations after an instruction.
void CodeGenerator::AssembleArchBoolean(Instruction* instr,
FlagsCondition condition) {
......
......@@ -871,12 +871,6 @@ void VisitWordCompareZero(InstructionSelector* selector, Node* user,
void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
BasicBlock* fbranch) {
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
// If we can fall through to the true block, invert the branch.
if (IsNextInAssemblyOrder(tbranch)) {
cont.Negate();
cont.SwapBlocks();
}
VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
}
......
......@@ -238,7 +238,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break;
}
case kArchJmp:
__ jmp(GetLabel(i.InputRpo(0)));
AssembleArchJump(i.InputRpo(0));
break;
case kArchNop:
// don't emit code for nops.
......@@ -631,22 +631,13 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
// Assembles branches after this instruction.
void CodeGenerator::AssembleArchBranch(Instruction* instr,
FlagsCondition condition) {
void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
X64OperandConverter i(this, instr);
Label done;
// Emit a branch. The true and false targets are always the last two inputs
// to the instruction.
BasicBlock::RpoNumber tblock =
i.InputRpo(static_cast<int>(instr->InputCount()) - 2);
BasicBlock::RpoNumber fblock =
i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
bool fallthru = IsNextInAssemblyOrder(fblock);
Label* tlabel = GetLabel(tblock);
Label* flabel = fallthru ? &done : GetLabel(fblock);
Label::Distance flabel_distance = fallthru ? Label::kNear : Label::kFar;
switch (condition) {
Label::Distance flabel_distance =
branch->fallthru ? Label::kNear : Label::kFar;
Label* tlabel = branch->true_label;
Label* flabel = branch->false_label;
switch (branch->condition) {
case kUnorderedEqual:
__ j(parity_even, flabel, flabel_distance);
// Fall through.
......@@ -702,8 +693,12 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr,
__ j(no_overflow, tlabel);
break;
}
if (!fallthru) __ jmp(flabel, flabel_distance); // no fallthru to flabel.
__ bind(&done);
if (!branch->fallthru) __ jmp(flabel, flabel_distance);
}
void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) {
if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target));
}
......
......@@ -942,12 +942,6 @@ void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
// If we can fall through to the true block, invert the branch.
if (IsNextInAssemblyOrder(tbranch)) {
cont.Negate();
cont.SwapBlocks();
}
// Try to combine with comparisons against 0 by simply inverting the branch.
while (CanCover(user, value)) {
if (value->opcode() == IrOpcode::kWord32Equal) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment