Commit cee362af authored by Junliang Yan's avatar Junliang Yan Committed by Commit Bot

PPC/s390: [turbofan] Masking/poisoning in codegen (optimized code, x64)

Port 8f489e73

Original Commit Message:

    This introduces masking of loads with speculation bit during code generation.
    At the moment, this is done only for x64 optimized code, under the
    --branch-load-poisoning flag.

    Overview of changes:
    - new register configuration configuration with one register reserved for
      the speculation poison/mask (kSpeculationPoisonRegister).
    - in codegen, we introduce an update to the poison register at the starts
      of all successors of branches (and deopts) that are marked as safety
      branches (deopts).
    - in memory optimizer, we lower all field and element loads to PoisonedLoads.
    - poisoned loads are then masked in codegen with the poison register.
      * only integer loads are masked at the moment.

R=mvstanton@chromium.org, joransiu@ca.ibm.com, michael_dawson@ca.ibm.com
BUG=
LOG=N

Change-Id: I7decc16bbadf87a8c8b178278eb79a9b783f79e1
Reviewed-on: https://chromium-review.googlesource.com/916744Reviewed-by: 's avatarJoran Siu <joransiu@ca.ibm.com>
Commit-Queue: Junliang Yan <jyan@ca.ibm.com>
Cr-Commit-Position: refs/heads/master@{#51275}
parent 1e9504ba
......@@ -1961,6 +1961,11 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
if (!branch->fallthru) __ b(flabel); // no fallthru to flabel.
}
void CodeGenerator::AssembleBranchPoisoning(FlagsCondition condition,
Instruction* instr) {
UNREACHABLE();
}
void CodeGenerator::AssembleArchDeoptBranch(Instruction* instr,
BranchInfo* branch) {
AssembleArchBranch(instr, branch);
......
......@@ -194,7 +194,7 @@ void InstructionSelector::VisitLoad(Node* node) {
PPCOperandGenerator g(this);
Node* base = node->InputAt(0);
Node* offset = node->InputAt(1);
ArchOpcode opcode = kArchNop;
InstructionCode opcode = kArchNop;
ImmediateMode mode = kInt16Imm;
switch (load_rep.representation()) {
case MachineRepresentation::kFloat32:
......@@ -234,6 +234,12 @@ void InstructionSelector::VisitLoad(Node* node) {
UNREACHABLE();
return;
}
if (node->opcode() == IrOpcode::kPoisonedLoad &&
load_poisoning_ == LoadPoisoning::kDoPoison) {
opcode |= MiscField::encode(kMemoryAccessPoisoned);
}
if (g.CanBeImmediate(offset, mode)) {
Emit(opcode | AddressingModeField::encode(kMode_MRI),
g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(offset));
......@@ -246,6 +252,8 @@ void InstructionSelector::VisitLoad(Node* node) {
}
}
void InstructionSelector::VisitPoisonedLoad(Node* node) { VisitLoad(node); }
void InstructionSelector::VisitProtectedLoad(Node* node) {
// TODO(eholk)
UNIMPLEMENTED();
......@@ -2250,6 +2258,9 @@ InstructionSelector::AlignmentRequirements() {
FullUnalignedAccessSupport();
}
// static
bool InstructionSelector::SupportsSpeculationPoisoning() { return false; }
} // namespace compiler
} // namespace internal
} // namespace v8
......@@ -2425,6 +2425,11 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
if (!branch->fallthru) __ b(flabel); // no fallthru to flabel.
}
void CodeGenerator::AssembleBranchPoisoning(FlagsCondition condition,
Instruction* instr) {
UNREACHABLE();
}
void CodeGenerator::AssembleArchDeoptBranch(Instruction* instr,
BranchInfo* branch) {
AssembleArchBranch(instr, branch);
......
......@@ -719,17 +719,24 @@ void InstructionSelector::VisitDebugAbort(Node* node) {
void InstructionSelector::VisitLoad(Node* node) {
S390OperandGenerator g(this);
ArchOpcode opcode = SelectLoadOpcode(node);
InstructionCode opcode = SelectLoadOpcode(node);
InstructionOperand outputs[1];
outputs[0] = g.DefineAsRegister(node);
InstructionOperand inputs[3];
size_t input_count = 0;
AddressingMode mode =
g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
InstructionCode code = opcode | AddressingModeField::encode(mode);
Emit(code, 1, outputs, input_count, inputs);
opcode |= AddressingModeField::encode(mode);
if (node->opcode() == IrOpcode::kPoisonedLoad &&
load_poisoning_ == LoadPoisoning::kDoPoison) {
opcode |= MiscField::encode(kMemoryAccessPoisoned);
}
Emit(opcode, 1, outputs, input_count, inputs);
}
void InstructionSelector::VisitPoisonedLoad(Node* node) { VisitLoad(node); }
void InstructionSelector::VisitProtectedLoad(Node* node) {
// TODO(eholk)
UNIMPLEMENTED();
......@@ -2583,6 +2590,9 @@ InstructionSelector::AlignmentRequirements() {
FullUnalignedAccessSupport();
}
// static
bool InstructionSelector::SupportsSpeculationPoisoning() { return false; }
} // namespace compiler
} // namespace internal
} // namespace v8
......@@ -2939,6 +2939,7 @@ bool AreAliased(DoubleRegister reg1, DoubleRegister reg2, DoubleRegister reg3,
}
#endif
void TurboAssembler::ResetSpeculationPoisonRegister() { UNREACHABLE(); }
} // namespace internal
} // namespace v8
......
......@@ -650,6 +650,8 @@ class TurboAssembler : public Assembler {
#endif
}
void ResetSpeculationPoisonRegister();
private:
static const int kSmiShift = kSmiTagSize + kSmiShiftSize;
......
......@@ -4284,6 +4284,8 @@ bool AreAliased(DoubleRegister reg1, DoubleRegister reg2, DoubleRegister reg3,
}
#endif
void TurboAssembler::ResetSpeculationPoisonRegister() { UNREACHABLE(); }
} // namespace internal
} // namespace v8
......
......@@ -1004,6 +1004,8 @@ class TurboAssembler : public Assembler {
void CheckPageFlag(Register object, Register scratch, int mask, Condition cc,
Label* condition_met);
void ResetSpeculationPoisonRegister();
private:
static const int kSmiShift = kSmiTagSize + kSmiShiftSize;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment