Commit 2304c194 authored by Georg Neis's avatar Georg Neis Committed by Commit Bot

[compiler] Replace remaining mutable reference arguments

Bug: v8:9429
Change-Id: Id775a765d9700e1d2c46b4598f5e4c8350e28f14
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1796340Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Commit-Queue: Georg Neis <neis@chromium.org>
Cr-Commit-Position: refs/heads/master@{#63705}
parent 91e3243d
......@@ -44,7 +44,7 @@ class ArmOperandConverter final : public InstructionOperandConverter {
UNREACHABLE();
}
Operand InputImmediate(size_t index) {
Operand InputImmediate(size_t index) const {
return ToImmediate(instr_->InputAt(index));
}
......@@ -111,7 +111,7 @@ class ArmOperandConverter final : public InstructionOperandConverter {
return InputOffset(&first_index);
}
Operand ToImmediate(InstructionOperand* operand) {
Operand ToImmediate(InstructionOperand* operand) const {
Constant constant = ToConstant(operand);
switch (constant.type()) {
case Constant::kInt32:
......@@ -309,9 +309,9 @@ Condition FlagsConditionToCondition(FlagsCondition condition) {
UNREACHABLE();
}
void EmitWordLoadPoisoningIfNeeded(
CodeGenerator* codegen, InstructionCode opcode,
ArmOperandConverter& i) { // NOLINT(runtime/references)
void EmitWordLoadPoisoningIfNeeded(CodeGenerator* codegen,
InstructionCode opcode,
ArmOperandConverter const& i) {
const MemoryAccessMode access_mode =
static_cast<MemoryAccessMode>(MiscField::decode(opcode));
if (access_mode == kMemoryAccessPoisoned) {
......@@ -320,10 +320,10 @@ void EmitWordLoadPoisoningIfNeeded(
}
}
void ComputePoisonedAddressForLoad(
CodeGenerator* codegen, InstructionCode opcode,
ArmOperandConverter& i, // NOLINT(runtime/references)
Register address) {
void ComputePoisonedAddressForLoad(CodeGenerator* codegen,
InstructionCode opcode,
ArmOperandConverter const& i,
Register address) {
DCHECK_EQ(kMemoryAccessPoisoned,
static_cast<MemoryAccessMode>(MiscField::decode(opcode)));
switch (AddressingModeField::decode(opcode)) {
......
......@@ -376,9 +376,9 @@ Condition FlagsConditionToCondition(FlagsCondition condition) {
UNREACHABLE();
}
void EmitWordLoadPoisoningIfNeeded(
CodeGenerator* codegen, InstructionCode opcode, Instruction* instr,
Arm64OperandConverter& i) { // NOLINT(runtime/references)
void EmitWordLoadPoisoningIfNeeded(CodeGenerator* codegen,
InstructionCode opcode, Instruction* instr,
Arm64OperandConverter const& i) {
const MemoryAccessMode access_mode =
static_cast<MemoryAccessMode>(MiscField::decode(opcode));
if (access_mode == kMemoryAccessPoisoned) {
......
......@@ -26,7 +26,7 @@ class InstructionOperandConverter {
// -- Instruction operand accesses with conversions --------------------------
Register InputRegister(size_t index) {
Register InputRegister(size_t index) const {
return ToRegister(instr_->InputAt(index));
}
......@@ -96,7 +96,7 @@ class InstructionOperandConverter {
return ToRpoNumber(instr_->InputAt(index));
}
Register OutputRegister(size_t index = 0) {
Register OutputRegister(size_t index = 0) const {
return ToRegister(instr_->OutputAt(index));
}
......@@ -130,7 +130,7 @@ class InstructionOperandConverter {
return ToConstant(op).ToRpoNumber();
}
Register ToRegister(InstructionOperand* op) {
Register ToRegister(InstructionOperand* op) const {
return LocationOperand::cast(op)->GetRegister();
}
......@@ -146,7 +146,7 @@ class InstructionOperandConverter {
return LocationOperand::cast(op)->GetSimd128Register();
}
Constant ToConstant(InstructionOperand* op) {
Constant ToConstant(InstructionOperand* op) const {
if (op->IsImmediate()) {
return gen_->instructions()->GetImmediate(ImmediateOperand::cast(op));
}
......
......@@ -29,8 +29,8 @@ inline bool operator<(const CaseInfo& l, const CaseInfo& r) {
// Helper struct containing data about a table or lookup switch.
class SwitchInfo {
public:
SwitchInfo(ZoneVector<CaseInfo>& cases, // NOLINT(runtime/references)
int32_t min_value, int32_t max_value, BasicBlock* default_branch)
SwitchInfo(ZoneVector<CaseInfo> const& cases, int32_t min_value,
int32_t max_value, BasicBlock* default_branch)
: cases_(cases),
min_value_(min_value),
max_value_(max_value),
......
......@@ -2306,8 +2306,8 @@ void InstructionSelector::VisitFloat64Tanh(Node* node) {
VisitFloat64Ieee754Unop(node, kIeee754Float64Tanh);
}
void InstructionSelector::EmitTableSwitch(const SwitchInfo& sw,
InstructionOperand& index_operand) {
void InstructionSelector::EmitTableSwitch(
const SwitchInfo& sw, InstructionOperand const& index_operand) {
OperandGenerator g(this);
size_t input_count = 2 + sw.value_range();
DCHECK_LE(sw.value_range(), std::numeric_limits<size_t>::max() - 2);
......@@ -2324,8 +2324,8 @@ void InstructionSelector::EmitTableSwitch(const SwitchInfo& sw,
Emit(kArchTableSwitch, 0, nullptr, input_count, inputs, 0, nullptr);
}
void InstructionSelector::EmitLookupSwitch(const SwitchInfo& sw,
InstructionOperand& value_operand) {
void InstructionSelector::EmitLookupSwitch(
const SwitchInfo& sw, InstructionOperand const& value_operand) {
OperandGenerator g(this);
std::vector<CaseInfo> cases = sw.CasesSortedByOriginalOrder();
size_t input_count = 2 + sw.case_count() * 2;
......@@ -2342,7 +2342,7 @@ void InstructionSelector::EmitLookupSwitch(const SwitchInfo& sw,
}
void InstructionSelector::EmitBinarySearchSwitch(
const SwitchInfo& sw, InstructionOperand& value_operand) {
const SwitchInfo& sw, InstructionOperand const& value_operand) {
OperandGenerator g(this);
size_t input_count = 2 + sw.case_count() * 2;
DCHECK_LE(sw.case_count(), (std::numeric_limits<size_t>::max() - 2) / 2);
......
......@@ -506,15 +506,12 @@ class V8_EXPORT_PRIVATE InstructionSelector final {
FeedbackSource const& feedback,
Node* frame_state);
void EmitTableSwitch(
const SwitchInfo& sw,
InstructionOperand& index_operand); // NOLINT(runtime/references)
void EmitLookupSwitch(
const SwitchInfo& sw,
InstructionOperand& value_operand); // NOLINT(runtime/references)
void EmitBinarySearchSwitch(
const SwitchInfo& sw,
InstructionOperand& value_operand); // NOLINT(runtime/references)
void EmitTableSwitch(const SwitchInfo& sw,
InstructionOperand const& index_operand);
void EmitLookupSwitch(const SwitchInfo& sw,
InstructionOperand const& value_operand);
void EmitBinarySearchSwitch(const SwitchInfo& sw,
InstructionOperand const& value_operand);
void TryRename(InstructionOperand* op);
int GetRename(int virtual_register);
......
......@@ -69,11 +69,11 @@ bool IsBlockWithBranchPoisoning(InstructionSequence* code,
} // namespace
bool JumpThreading::ComputeForwarding(Zone* local_zone,
ZoneVector<RpoNumber>& result,
ZoneVector<RpoNumber>* result,
InstructionSequence* code,
bool frame_at_start) {
ZoneStack<RpoNumber> stack(local_zone);
JumpThreadingState state = {false, result, stack};
JumpThreadingState state = {false, *result, stack};
state.Clear(code->InstructionBlockCount());
// Iterate over the blocks forward, pushing the blocks onto the stack.
......@@ -135,15 +135,15 @@ bool JumpThreading::ComputeForwarding(Zone* local_zone,
}
#ifdef DEBUG
for (RpoNumber num : result) {
for (RpoNumber num : *result) {
DCHECK(num.IsValid());
}
#endif
if (FLAG_trace_turbo_jt) {
for (int i = 0; i < static_cast<int>(result.size()); i++) {
for (int i = 0; i < static_cast<int>(result->size()); i++) {
TRACE("B%d ", i);
int to = result[i].ToInt();
int to = (*result)[i].ToInt();
if (i != to) {
TRACE("-> B%d\n", to);
} else {
......@@ -156,7 +156,7 @@ bool JumpThreading::ComputeForwarding(Zone* local_zone,
}
void JumpThreading::ApplyForwarding(Zone* local_zone,
ZoneVector<RpoNumber>& result,
ZoneVector<RpoNumber> const& result,
InstructionSequence* code) {
if (!FLAG_turbo_jt) return;
......
......@@ -17,17 +17,14 @@ class V8_EXPORT_PRIVATE JumpThreading {
public:
// Compute the forwarding map of basic blocks to their ultimate destination.
// Returns {true} if there is at least one block that is forwarded.
static bool ComputeForwarding(
Zone* local_zone,
ZoneVector<RpoNumber>& result, // NOLINT(runtime/references)
InstructionSequence* code, bool frame_at_start);
static bool ComputeForwarding(Zone* local_zone, ZoneVector<RpoNumber>* result,
InstructionSequence* code, bool frame_at_start);
// Rewrite the instructions to forward jumps and branches.
// May also negate some branches.
static void ApplyForwarding(
Zone* local_zone,
ZoneVector<RpoNumber>& forwarding, // NOLINT(runtime/references)
InstructionSequence* code);
static void ApplyForwarding(Zone* local_zone,
ZoneVector<RpoNumber> const& forwarding,
InstructionSequence* code);
};
} // namespace compiler
......
......@@ -265,9 +265,8 @@ Condition FlagsConditionToConditionTst(FlagsCondition condition) {
UNREACHABLE();
}
FPUCondition FlagsConditionToConditionCmpFPU(
bool& predicate, // NOLINT(runtime/references)
FlagsCondition condition) {
FPUCondition FlagsConditionToConditionCmpFPU(bool* predicate,
FlagsCondition condition) {
switch (condition) {
case kEqual:
predicate = true;
......@@ -303,9 +302,9 @@ FPUCondition FlagsConditionToConditionCmpFPU(
<< "\""; \
UNIMPLEMENTED();
void EmitWordLoadPoisoningIfNeeded(
CodeGenerator* codegen, InstructionCode opcode, Instruction* instr,
MipsOperandConverter& i) { // NOLINT(runtime/references)
void EmitWordLoadPoisoningIfNeeded(CodeGenerator* codegen,
InstructionCode opcode, Instruction* instr,
MipsOperandConverter const& i) {
const MemoryAccessMode access_mode =
static_cast<MemoryAccessMode>(MiscField::decode(opcode));
if (access_mode == kMemoryAccessPoisoned) {
......@@ -1179,7 +1178,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
FPURegister right = i.InputOrZeroSingleRegister(1);
bool predicate;
FPUCondition cc =
FlagsConditionToConditionCmpFPU(predicate, instr->flags_condition());
FlagsConditionToConditionCmpFPU(&predicate, instr->flags_condition());
if ((left == kDoubleRegZero || right == kDoubleRegZero) &&
!__ IsDoubleZeroRegSet()) {
......@@ -1239,7 +1238,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
FPURegister right = i.InputOrZeroDoubleRegister(1);
bool predicate;
FPUCondition cc =
FlagsConditionToConditionCmpFPU(predicate, instr->flags_condition());
FlagsConditionToConditionCmpFPU(&predicate, instr->flags_condition());
if ((left == kDoubleRegZero || right == kDoubleRegZero) &&
!__ IsDoubleZeroRegSet()) {
__ Move(kDoubleRegZero, 0.0);
......@@ -3026,7 +3025,7 @@ void AssembleBranchToLabels(CodeGenerator* gen, TurboAssembler* tasm,
} else if (instr->arch_opcode() == kMipsCmpS ||
instr->arch_opcode() == kMipsCmpD) {
bool predicate;
FlagsConditionToConditionCmpFPU(predicate, condition);
FlagsConditionToConditionCmpFPU(&predicate, condition);
if (predicate) {
__ BranchTrueF(tlabel);
} else {
......@@ -3116,7 +3115,7 @@ void CodeGenerator::AssembleBranchPoisoning(FlagsCondition condition,
case kMipsCmpS:
case kMipsCmpD: {
bool predicate;
FlagsConditionToConditionCmpFPU(predicate, condition);
FlagsConditionToConditionCmpFPU(&predicate, condition);
if (predicate) {
__ LoadZeroIfFPUCondition(kSpeculationPoisonRegister);
} else {
......@@ -3314,7 +3313,7 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr,
__ Move(kDoubleRegZero, 0.0);
}
bool predicate;
FlagsConditionToConditionCmpFPU(predicate, condition);
FlagsConditionToConditionCmpFPU(&predicate, condition);
if (!IsMipsArchVariant(kMips32r6)) {
__ li(result, Operand(1));
if (predicate) {
......
......@@ -278,9 +278,8 @@ Condition FlagsConditionToConditionOvf(FlagsCondition condition) {
UNREACHABLE();
}
FPUCondition FlagsConditionToConditionCmpFPU(
bool& predicate, // NOLINT(runtime/references)
FlagsCondition condition) {
FPUCondition FlagsConditionToConditionCmpFPU(bool* predicate,
FlagsCondition condition) {
switch (condition) {
case kEqual:
predicate = true;
......@@ -311,9 +310,9 @@ FPUCondition FlagsConditionToConditionCmpFPU(
UNREACHABLE();
}
void EmitWordLoadPoisoningIfNeeded(
CodeGenerator* codegen, InstructionCode opcode, Instruction* instr,
MipsOperandConverter& i) { // NOLINT(runtime/references)
void EmitWordLoadPoisoningIfNeeded(CodeGenerator* codegen,
InstructionCode opcode, Instruction* instr,
MipsOperandConverter const& i) {
const MemoryAccessMode access_mode =
static_cast<MemoryAccessMode>(MiscField::decode(opcode));
if (access_mode == kMemoryAccessPoisoned) {
......@@ -1276,7 +1275,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
FPURegister right = i.InputOrZeroSingleRegister(1);
bool predicate;
FPUCondition cc =
FlagsConditionToConditionCmpFPU(predicate, instr->flags_condition());
FlagsConditionToConditionCmpFPU(&predicate, instr->flags_condition());
if ((left == kDoubleRegZero || right == kDoubleRegZero) &&
!__ IsDoubleZeroRegSet()) {
......@@ -1339,7 +1338,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
FPURegister right = i.InputOrZeroDoubleRegister(1);
bool predicate;
FPUCondition cc =
FlagsConditionToConditionCmpFPU(predicate, instr->flags_condition());
FlagsConditionToConditionCmpFPU(&predicate, instr->flags_condition());
if ((left == kDoubleRegZero || right == kDoubleRegZero) &&
!__ IsDoubleZeroRegSet()) {
__ Move(kDoubleRegZero, 0.0);
......@@ -3151,7 +3150,7 @@ void AssembleBranchToLabels(CodeGenerator* gen, TurboAssembler* tasm,
} else if (instr->arch_opcode() == kMips64CmpS ||
instr->arch_opcode() == kMips64CmpD) {
bool predicate;
FlagsConditionToConditionCmpFPU(predicate, condition);
FlagsConditionToConditionCmpFPU(&predicate, condition);
if (predicate) {
__ BranchTrueF(tlabel);
} else {
......@@ -3261,7 +3260,7 @@ void CodeGenerator::AssembleBranchPoisoning(FlagsCondition condition,
case kMips64CmpS:
case kMips64CmpD: {
bool predicate;
FlagsConditionToConditionCmpFPU(predicate, condition);
FlagsConditionToConditionCmpFPU(&predicate, condition);
if (predicate) {
__ LoadZeroIfFPUCondition(kSpeculationPoisonRegister);
} else {
......@@ -3470,7 +3469,7 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr,
__ Move(kDoubleRegZero, 0.0);
}
bool predicate;
FlagsConditionToConditionCmpFPU(predicate, condition);
FlagsConditionToConditionCmpFPU(&predicate, condition);
if (kArchVariant != kMips64r6) {
__ li(result, Operand(1));
if (predicate) {
......
......@@ -263,9 +263,8 @@ Condition FlagsConditionToCondition(FlagsCondition condition, ArchOpcode op) {
UNREACHABLE();
}
void EmitWordLoadPoisoningIfNeeded(
CodeGenerator* codegen, Instruction* instr,
PPCOperandConverter& i) { // NOLINT(runtime/references)
void EmitWordLoadPoisoningIfNeeded(CodeGenerator* codegen, Instruction* instr,
PPCOperandConverter const& i) {
const MemoryAccessMode access_mode =
static_cast<MemoryAccessMode>(MiscField::decode(instr->opcode()));
if (access_mode == kMemoryAccessPoisoned) {
......
......@@ -3142,15 +3142,15 @@ void LinearScanAllocator::MaybeUndoPreviousSplit(LiveRange* range) {
}
}
void LinearScanAllocator::SpillNotLiveRanges(RangeWithRegisterSet& to_be_live,
void LinearScanAllocator::SpillNotLiveRanges(RangeWithRegisterSet* to_be_live,
LifetimePosition position,
SpillMode spill_mode) {
for (auto it = active_live_ranges().begin();
it != active_live_ranges().end();) {
LiveRange* active_range = *it;
TopLevelLiveRange* toplevel = (*it)->TopLevel();
auto found = to_be_live.find({toplevel, kUnassignedRegister});
if (found == to_be_live.end()) {
auto found = to_be_live->find({toplevel, kUnassignedRegister});
if (found == to_be_live->end()) {
// Is not contained in {to_be_live}, spill it.
// Fixed registers are exempt from this. They might have been
// added from inactive at the block boundary but we know that
......@@ -3206,7 +3206,7 @@ void LinearScanAllocator::SpillNotLiveRanges(RangeWithRegisterSet& to_be_live,
} else {
// This range is contained in {to_be_live}, so we can keep it.
int expected_register = (*found).expected_register;
to_be_live.erase(found);
to_be_live->erase(found);
if (expected_register == active_range->assigned_register()) {
// Was life and in correct register, simply pass through.
TRACE("Keeping %d:%d in %s\n", toplevel->vreg(),
......@@ -3274,8 +3274,8 @@ LiveRange* LinearScanAllocator::AssignRegisterOnReload(LiveRange* range,
return range;
}
void LinearScanAllocator::ReloadLiveRanges(RangeWithRegisterSet& to_be_live,
LifetimePosition position) {
void LinearScanAllocator::ReloadLiveRanges(
RangeWithRegisterSet const& to_be_live, LifetimePosition position) {
// Assumption: All ranges in {to_be_live} are currently spilled and there are
// no conflicting registers in the active ranges.
// The former is ensured by SpillNotLiveRanges, the latter is by construction
......@@ -3852,7 +3852,7 @@ void LinearScanAllocator::AllocateRegisters() {
}
if (!no_change_required) {
SpillNotLiveRanges(to_be_live, next_block_boundary, spill_mode);
SpillNotLiveRanges(&to_be_live, next_block_boundary, spill_mode);
ReloadLiveRanges(to_be_live, next_block_boundary);
}
......
......@@ -1298,13 +1298,11 @@ class LinearScanAllocator final : public RegisterAllocator {
LifetimePosition begin_pos,
LiveRange* end_range);
void MaybeUndoPreviousSplit(LiveRange* range);
void SpillNotLiveRanges(
RangeWithRegisterSet& to_be_live, // NOLINT(runtime/references)
LifetimePosition position, SpillMode spill_mode);
void SpillNotLiveRanges(RangeWithRegisterSet* to_be_live,
LifetimePosition position, SpillMode spill_mode);
LiveRange* AssignRegisterOnReload(LiveRange* range, int reg);
void ReloadLiveRanges(
RangeWithRegisterSet& to_be_live, // NOLINT(runtime/references)
LifetimePosition position);
void ReloadLiveRanges(RangeWithRegisterSet const& to_be_live,
LifetimePosition position);
void UpdateDeferredFixedRanges(SpillMode spill_mode, InstructionBlock* block);
bool BlockIsDeferredOrImmediatePredecessorIsNotDeferred(
......
......@@ -1246,9 +1246,8 @@ void AdjustStackPointerForTailCall(
}
}
void EmitWordLoadPoisoningIfNeeded(
CodeGenerator* codegen, Instruction* instr,
S390OperandConverter& i) { // NOLINT(runtime/references)
void EmitWordLoadPoisoningIfNeeded(CodeGenerator* codegen, Instruction* instr,
S390OperandConverter const& i) {
const MemoryAccessMode access_mode =
static_cast<MemoryAccessMode>(MiscField::decode(instr->opcode()));
if (access_mode == kMemoryAccessPoisoned) {
......
......@@ -435,18 +435,16 @@ void VisitTryTruncateDouble(InstructionSelector* selector, ArchOpcode opcode,
#endif
template <class CanCombineWithLoad>
void GenerateRightOperands(
InstructionSelector* selector, Node* node, Node* right,
InstructionCode& opcode, // NOLINT(runtime/references)
OperandModes& operand_mode, // NOLINT(runtime/references)
InstructionOperand* inputs,
size_t& input_count, // NOLINT(runtime/references)
CanCombineWithLoad canCombineWithLoad) {
void GenerateRightOperands(InstructionSelector* selector, Node* node,
Node* right, InstructionCode* opcode,
OperandModes* operand_mode,
InstructionOperand* inputs, size_t* input_count,
CanCombineWithLoad canCombineWithLoad) {
S390OperandGenerator g(selector);
if ((operand_mode & OperandMode::kAllowImmediate) &&
g.CanBeImmediate(right, operand_mode)) {
inputs[input_count++] = g.UseImmediate(right);
inputs[(*input_count)++] = g.UseImmediate(right);
// Can only be RI or RRI
operand_mode &= OperandMode::kAllowImmediate;
} else if (operand_mode & OperandMode::kAllowMemoryOperand) {
......@@ -454,47 +452,45 @@ void GenerateRightOperands(
if (mright.IsLoad() && selector->CanCover(node, right) &&
canCombineWithLoad(SelectLoadOpcode(right))) {
AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
right, inputs, &input_count, OpcodeImmMode(opcode));
right, inputs, input_count, OpcodeImmMode(opcode));
opcode |= AddressingModeField::encode(mode);
operand_mode &= ~OperandMode::kAllowImmediate;
if (operand_mode & OperandMode::kAllowRM)
operand_mode &= ~OperandMode::kAllowDistinctOps;
} else if (operand_mode & OperandMode::kAllowRM) {
DCHECK(!(operand_mode & OperandMode::kAllowRRM));
inputs[input_count++] = g.UseAnyExceptImmediate(right);
inputs[(*input_count)++] = g.UseAnyExceptImmediate(right);
// Can not be Immediate
operand_mode &=
~OperandMode::kAllowImmediate & ~OperandMode::kAllowDistinctOps;
} else if (operand_mode & OperandMode::kAllowRRM) {
DCHECK(!(operand_mode & OperandMode::kAllowRM));
inputs[input_count++] = g.UseAnyExceptImmediate(right);
inputs[(*input_count)++] = g.UseAnyExceptImmediate(right);
// Can not be Immediate
operand_mode &= ~OperandMode::kAllowImmediate;
} else {
UNREACHABLE();
}
} else {
inputs[input_count++] = g.UseRegister(right);
inputs[(*input_count)++] = g.UseRegister(right);
// Can only be RR or RRR
operand_mode &= OperandMode::kAllowRRR;
}
}
template <class CanCombineWithLoad>
void GenerateBinOpOperands(
InstructionSelector* selector, Node* node, Node* left, Node* right,
InstructionCode& opcode, // NOLINT(runtime/references)
OperandModes& operand_mode, // NOLINT(runtime/references)
InstructionOperand* inputs,
size_t& input_count, // NOLINT(runtime/references)
CanCombineWithLoad canCombineWithLoad) {
void GenerateBinOpOperands(InstructionSelector* selector, Node* node,
Node* left, Node* right, InstructionCode* opcode,
OperandModes* operand_mode,
InstructionOperand* inputs, size_t* input_count,
CanCombineWithLoad canCombineWithLoad) {
S390OperandGenerator g(selector);
// left is always register
InstructionOperand const left_input = g.UseRegister(left);
inputs[input_count++] = left_input;
inputs[(*input_count)++] = left_input;
if (left == right) {
inputs[input_count++] = left_input;
inputs[(*input_count)++] = left_input;
// Can only be RR or RRR
operand_mode &= OperandMode::kAllowRRR;
} else {
......
......@@ -361,7 +361,6 @@ class WasmProtectedInstructionTrap final : public WasmOutOfLineTrap {
void EmitOOLTrapIfNeeded(Zone* zone, CodeGenerator* codegen,
InstructionCode opcode, Instruction* instr,
X64OperandConverter& i, // NOLINT(runtime/references)
int pc) {
const MemoryAccessMode access_mode =
static_cast<MemoryAccessMode>(MiscField::decode(opcode));
......@@ -370,9 +369,9 @@ void EmitOOLTrapIfNeeded(Zone* zone, CodeGenerator* codegen,
}
}
void EmitWordLoadPoisoningIfNeeded(
CodeGenerator* codegen, InstructionCode opcode, Instruction* instr,
X64OperandConverter& i) { // NOLINT(runtime/references)
void EmitWordLoadPoisoningIfNeeded(CodeGenerator* codegen,
InstructionCode opcode, Instruction* instr,
X64OperandConverter const& i) {
const MemoryAccessMode access_mode =
static_cast<MemoryAccessMode>(MiscField::decode(opcode));
if (access_mode == kMemoryAccessPoisoned) {
......@@ -1888,30 +1887,30 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ Subsd(i.InputDoubleRegister(0), kScratchDoubleReg);
break;
case kX64Movsxbl:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
ASSEMBLE_MOVX(movsxbl);
__ AssertZeroExtended(i.OutputRegister());
EmitWordLoadPoisoningIfNeeded(this, opcode, instr, i);
break;
case kX64Movzxbl:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
ASSEMBLE_MOVX(movzxbl);
__ AssertZeroExtended(i.OutputRegister());
EmitWordLoadPoisoningIfNeeded(this, opcode, instr, i);
break;
case kX64Movsxbq:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
ASSEMBLE_MOVX(movsxbq);
EmitWordLoadPoisoningIfNeeded(this, opcode, instr, i);
break;
case kX64Movzxbq:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
ASSEMBLE_MOVX(movzxbq);
__ AssertZeroExtended(i.OutputRegister());
EmitWordLoadPoisoningIfNeeded(this, opcode, instr, i);
break;
case kX64Movb: {
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
size_t index = 0;
Operand operand = i.MemoryOperand(&index);
if (HasImmediateInput(instr, index)) {
......@@ -1923,29 +1922,29 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kX64Movsxwl:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
ASSEMBLE_MOVX(movsxwl);
__ AssertZeroExtended(i.OutputRegister());
EmitWordLoadPoisoningIfNeeded(this, opcode, instr, i);
break;
case kX64Movzxwl:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
ASSEMBLE_MOVX(movzxwl);
__ AssertZeroExtended(i.OutputRegister());
EmitWordLoadPoisoningIfNeeded(this, opcode, instr, i);
break;
case kX64Movsxwq:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
ASSEMBLE_MOVX(movsxwq);
break;
case kX64Movzxwq:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
ASSEMBLE_MOVX(movzxwq);
__ AssertZeroExtended(i.OutputRegister());
EmitWordLoadPoisoningIfNeeded(this, opcode, instr, i);
break;
case kX64Movw: {
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
size_t index = 0;
Operand operand = i.MemoryOperand(&index);
if (HasImmediateInput(instr, index)) {
......@@ -1957,7 +1956,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kX64Movl:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
if (instr->HasOutput()) {
if (HasAddressingMode(instr)) {
__ movl(i.OutputRegister(), i.MemoryOperand());
......@@ -1981,7 +1980,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
EmitWordLoadPoisoningIfNeeded(this, opcode, instr, i);
break;
case kX64Movsxlq:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
ASSEMBLE_MOVX(movsxlq);
EmitWordLoadPoisoningIfNeeded(this, opcode, instr, i);
break;
......@@ -2033,7 +2032,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kX64Movq:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
if (instr->HasOutput()) {
__ movq(i.OutputRegister(), i.MemoryOperand());
} else {
......@@ -2048,7 +2047,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
EmitWordLoadPoisoningIfNeeded(this, opcode, instr, i);
break;
case kX64Movss:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
if (instr->HasOutput()) {
__ Movss(i.OutputDoubleRegister(), i.MemoryOperand());
} else {
......@@ -2058,7 +2057,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
}
break;
case kX64Movsd: {
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
if (instr->HasOutput()) {
const MemoryAccessMode access_mode =
static_cast<MemoryAccessMode>(MiscField::decode(opcode));
......@@ -2081,7 +2080,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
}
case kX64Movdqu: {
CpuFeatureScope sse_scope(tasm(), SSSE3);
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
if (instr->HasOutput()) {
__ Movdqu(i.OutputSimd128Register(), i.MemoryOperand());
} else {
......
......@@ -2104,7 +2104,7 @@ struct JumpThreadingPhase {
void Run(PipelineData* data, Zone* temp_zone, bool frame_at_start) {
ZoneVector<RpoNumber> result(temp_zone);
if (JumpThreading::ComputeForwarding(temp_zone, result, data->sequence(),
if (JumpThreading::ComputeForwarding(temp_zone, &result, data->sequence(),
frame_at_start)) {
JumpThreading::ApplyForwarding(temp_zone, result, data->sequence());
}
......
......@@ -113,11 +113,12 @@ void VerifyForwarding(TestCode* code, int count, int* expected) {
v8::internal::AccountingAllocator allocator;
Zone local_zone(&allocator, ZONE_NAME);
ZoneVector<RpoNumber> result(&local_zone);
JumpThreading::ComputeForwarding(&local_zone, result, &code->sequence_, true);
JumpThreading::ComputeForwarding(&local_zone, &result, &code->sequence_,
true);
CHECK(count == static_cast<int>(result.size()));
for (int i = 0; i < count; i++) {
CHECK(expected[i] == result[i].ToInt());
CHECK_EQ(expected[i], result[i].ToInt());
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment