Commit 6b06d24e authored by Junliang Yan's avatar Junliang Yan Committed by V8 LUCI CQ

ppc: Cleanup cmp/cmpi as CmpS64

Change-Id: Iaab1eba1590a4489004880b039e2e8900aab94b4
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3011163Reviewed-by: 's avatarMilad Fa <mfarazma@redhat.com>
Commit-Queue: Junliang Yan <junyan@redhat.com>
Cr-Commit-Position: refs/heads/master@{#75603}
parent cb6218ca
......@@ -375,7 +375,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
__ Move(scratch, debug_suspended_generator);
__ LoadU64(scratch, MemOperand(scratch));
__ cmp(scratch, r4);
__ CmpS64(scratch, r4);
__ beq(&prepare_step_in_suspended_generator);
__ bind(&stepping_prepared);
......@@ -828,7 +828,7 @@ static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
// If actual is bigger than formal, then we should use it to free up the stack
// arguments.
Label corrected_args_count;
__ cmp(params_size, actual_params_size);
__ CmpS64(params_size, actual_params_size);
__ bge(&corrected_args_count);
__ mr(params_size, actual_params_size);
__ bind(&corrected_args_count);
......@@ -2881,13 +2881,13 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
__ StoreU64(r14, MemOperand(r17, kNextOffset));
if (FLAG_debug_code) {
__ lwz(r4, MemOperand(r17, kLevelOffset));
__ cmp(r4, r16);
__ CmpS64(r4, r16);
__ Check(eq, AbortReason::kUnexpectedLevelAfterReturnFromApiCall);
}
__ subi(r16, r16, Operand(1));
__ stw(r16, MemOperand(r17, kLevelOffset));
__ LoadU64(r0, MemOperand(r17, kLimitOffset));
__ cmp(r15, r0);
__ CmpS64(r15, r0);
__ bne(&delete_allocated_handles);
// Leave the API exit frame.
......@@ -2904,7 +2904,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
__ LoadRoot(r14, RootIndex::kTheHoleValue);
__ Move(r15, ExternalReference::scheduled_exception_address(isolate));
__ LoadU64(r15, MemOperand(r15));
__ cmp(r14, r15);
__ CmpS64(r14, r15);
__ bne(&promote_scheduled_exception);
__ blr();
......@@ -3304,7 +3304,7 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
__ StoreU64(r7, MemOperand(r6, 0));
__ addi(r6, r6, Operand(kSystemPointerSize));
__ bind(&pop_loop_header);
__ cmp(r5, sp);
__ CmpS64(r5, sp);
__ bne(&pop_loop);
// Compute the output frame in the deoptimizer.
......@@ -3349,7 +3349,7 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm,
__ addi(r7, r7, Operand(kSystemPointerSize));
__ bind(&outer_loop_header);
__ cmp(r7, r4);
__ CmpS64(r7, r4);
__ blt(&outer_push_loop);
__ LoadU64(r4, MemOperand(r3, Deoptimizer::input_offset()));
......
......@@ -801,7 +801,7 @@ void MacroAssembler::RecordWrite(Register object, Register slot_address,
DCHECK(!AreAliased(object, value, slot_address));
if (FLAG_debug_code) {
LoadTaggedPointerField(r0, MemOperand(slot_address));
cmp(r0, value);
CmpS64(r0, value);
Check(eq, AbortReason::kWrongAddressOrValuePassedToRecordWrite);
}
......@@ -1459,7 +1459,7 @@ void MacroAssembler::StackOverflowCheck(Register num_args, Register scratch,
sub(scratch, sp, scratch);
// Check if the arguments will overflow the stack.
ShiftLeftImm(r0, num_args, Operand(kSystemPointerSizeLog2));
cmp(scratch, r0);
CmpS64(scratch, r0);
ble(stack_overflow); // Signed comparison.
}
......@@ -1478,7 +1478,7 @@ void MacroAssembler::InvokePrologue(Register expected_parameter_count,
// If the expected parameter count is equal to the adaptor sentinel, no need
// to push undefined value as arguments.
mov(r0, Operand(kDontAdaptArgumentsSentinel));
cmp(expected_parameter_count, r0);
CmpS64(expected_parameter_count, r0);
beq(&regular_invoke);
// If overapplication or if the actual argument count is equal to the
......@@ -1720,7 +1720,7 @@ void MacroAssembler::CompareInstanceTypeRange(Register map, Register type_reg,
void MacroAssembler::CompareRoot(Register obj, RootIndex index) {
DCHECK(obj != r0);
LoadRoot(r0, index);
cmp(obj, r0);
CmpS64(obj, r0);
}
void TurboAssembler::AddAndCheckForOverflow(Register dst, Register left,
......@@ -2610,14 +2610,18 @@ void TurboAssembler::AddS64(Register dst, Register src, const Operand& value,
}
}
void TurboAssembler::Cmpi(Register src1, const Operand& src2, Register scratch,
CRegister cr) {
void TurboAssembler::CmpS64(Register src1, Register src2, CRegister cr) {
cmp(src1, src2, cr);
}
void TurboAssembler::CmpS64(Register src1, const Operand& src2,
Register scratch, CRegister cr) {
intptr_t value = src2.immediate();
if (is_int16(value)) {
cmpi(src1, src2, cr);
} else {
mov(scratch, src2);
cmp(src1, scratch, cr);
CmpS64(src1, scratch, cr);
}
}
......@@ -2710,7 +2714,7 @@ void MacroAssembler::CmpSmiLiteral(Register src1, Smi smi, Register scratch,
Cmpwi(src1, Operand(smi), scratch, cr);
#else
LoadSmiLiteral(scratch, smi);
cmp(src1, scratch, cr);
CmpS64(src1, scratch, cr);
#endif
}
......@@ -3158,12 +3162,12 @@ void TurboAssembler::ResetSpeculationPoisonRegister() {
}
void TurboAssembler::JumpIfEqual(Register x, int32_t y, Label* dest) {
Cmpi(x, Operand(y), r0);
CmpS64(x, Operand(y), r0);
beq(dest);
}
void TurboAssembler::JumpIfLessThan(Register x, int32_t y, Label* dest) {
Cmpi(x, Operand(y), r0);
CmpS64(x, Operand(y), r0);
blt(dest);
}
......
......@@ -152,8 +152,9 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
void LoadPC(Register dst);
void ComputeCodeStartAddress(Register dst);
void Cmpi(Register src1, const Operand& src2, Register scratch,
CRegister cr = cr7);
void CmpS64(Register src1, const Operand& src2, Register scratch,
CRegister cr = cr7);
void CmpS64(Register src1, Register src2, CRegister cr = cr7);
void Cmpli(Register src1, const Operand& src2, Register scratch,
CRegister cr = cr7);
void Cmpwi(Register src1, const Operand& src2, Register scratch,
......@@ -162,7 +163,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
if (COMPRESS_POINTERS_BOOL) {
cmpw(src1, src2, cr);
} else {
cmp(src1, src2, cr);
CmpS64(src1, src2, cr);
}
}
......@@ -592,14 +593,14 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
CRegister cr = cr7) {
// High bits must be identical to fit into an 32-bit integer
extsw(scratch, value);
cmp(scratch, value, cr);
CmpS64(scratch, value, cr);
}
#else
inline void TestIfInt32(Register hi_word, Register lo_word, Register scratch,
CRegister cr = cr7) {
// High bits must be identical to fit into an 32-bit integer
srawi(scratch, lo_word, 31);
cmp(scratch, hi_word, cr);
CmpS64(scratch, hi_word, cr);
}
#endif
......
......@@ -832,7 +832,7 @@ void CodeGenerator::AssembleTailCallAfterGap(Instruction* instr,
void CodeGenerator::AssembleCodeStartRegisterCheck() {
Register scratch = kScratchReg;
__ ComputeCodeStartAddress(scratch);
__ cmp(scratch, kJavaScriptCallCodeStartRegister);
__ CmpS64(scratch, kJavaScriptCallCodeStartRegister);
__ Assert(eq, AbortReason::kWrongFunctionCodeStart);
}
......@@ -847,7 +847,7 @@ void CodeGenerator::BailoutIfDeoptimized() {
if (FLAG_debug_code) {
// Check that {kJavaScriptCallCodeStartRegister} is correct.
__ ComputeCodeStartAddress(ip);
__ cmp(ip, kJavaScriptCallCodeStartRegister);
__ CmpS64(ip, kJavaScriptCallCodeStartRegister);
__ Assert(eq, AbortReason::kWrongFunctionCodeStart);
}
......@@ -869,7 +869,7 @@ void CodeGenerator::GenerateSpeculationPoisonFromCodeStartRegister() {
// Calculate a mask which has all bits set in the normal case, but has all
// bits cleared if we are speculatively executing the wrong PC.
__ cmp(kJavaScriptCallCodeStartRegister, scratch);
__ CmpS64(kJavaScriptCallCodeStartRegister, scratch);
__ li(scratch, Operand::Zero());
__ notx(kSpeculationPoisonRegister, scratch);
__ isel(eq, kSpeculationPoisonRegister, kSpeculationPoisonRegister, scratch);
......@@ -990,7 +990,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// Check the function's context matches the context argument.
__ LoadTaggedPointerField(
kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset), r0);
__ cmp(cp, kScratchReg);
__ CmpS64(cp, kScratchReg);
__ Assert(eq, AbortReason::kWrongFunctionContext);
}
static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
......@@ -2183,22 +2183,22 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
ASSEMBLE_ATOMIC_EXCHANGE_INTEGER(ldarx, stdcx);
break;
case kWord32AtomicCompareExchangeInt8:
ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_SIGN_EXT(cmp, lbarx, stbcx, extsb);
ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_SIGN_EXT(CmpS64, lbarx, stbcx, extsb);
break;
case kPPC_AtomicCompareExchangeUint8:
ASSEMBLE_ATOMIC_COMPARE_EXCHANGE(cmp, lbarx, stbcx, ZeroExtByte);
ASSEMBLE_ATOMIC_COMPARE_EXCHANGE(CmpS64, lbarx, stbcx, ZeroExtByte);
break;
case kWord32AtomicCompareExchangeInt16:
ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_SIGN_EXT(cmp, lharx, sthcx, extsh);
ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_SIGN_EXT(CmpS64, lharx, sthcx, extsh);
break;
case kPPC_AtomicCompareExchangeUint16:
ASSEMBLE_ATOMIC_COMPARE_EXCHANGE(cmp, lharx, sthcx, ZeroExtHalfWord);
ASSEMBLE_ATOMIC_COMPARE_EXCHANGE(CmpS64, lharx, sthcx, ZeroExtHalfWord);
break;
case kPPC_AtomicCompareExchangeWord32:
ASSEMBLE_ATOMIC_COMPARE_EXCHANGE(cmpw, lwarx, stwcx, ZeroExtWord32);
break;
case kPPC_AtomicCompareExchangeWord64:
ASSEMBLE_ATOMIC_COMPARE_EXCHANGE(cmp, ldarx, stdcx, mr);
ASSEMBLE_ATOMIC_COMPARE_EXCHANGE(CmpS64, ldarx, stdcx, mr);
break;
#define ATOMIC_BINOP_CASE(op, inst) \
......@@ -4300,7 +4300,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) {
__ addi(argc_reg, argc_reg, Operand(1)); // Also pop the receiver.
if (parameter_slots > 1) {
Label skip;
__ Cmpi(argc_reg, Operand(parameter_slots), r0);
__ CmpS64(argc_reg, Operand(parameter_slots), r0);
__ bgt(&skip);
__ mov(argc_reg, Operand(parameter_slots));
__ bind(&skip);
......
......@@ -178,7 +178,7 @@ void RegExpMacroAssemblerPPC::Backtrack() {
__ addi(r3, r3, Operand(1));
__ StoreU64(r3, MemOperand(frame_pointer(), kBacktrackCount), r0);
__ mov(r0, Operand(backtrack_limit()));
__ cmp(r3, r0);
__ CmpS64(r3, r0);
__ bne(&next);
// Backtrack limit exceeded.
......@@ -216,7 +216,7 @@ void RegExpMacroAssemblerPPC::CheckAtStart(int cp_offset, Label* on_at_start) {
__ LoadU64(r4, MemOperand(frame_pointer(), kStringStartMinusOne));
__ addi(r3, current_input_offset(),
Operand(-char_size() + cp_offset * char_size()));
__ cmp(r3, r4);
__ CmpS64(r3, r4);
BranchOrBacktrack(eq, on_at_start);
}
......@@ -225,7 +225,7 @@ void RegExpMacroAssemblerPPC::CheckNotAtStart(int cp_offset,
__ LoadU64(r4, MemOperand(frame_pointer(), kStringStartMinusOne));
__ addi(r3, current_input_offset(),
Operand(-char_size() + cp_offset * char_size()));
__ cmp(r3, r4);
__ CmpS64(r3, r4);
BranchOrBacktrack(ne, on_not_at_start);
}
......@@ -238,7 +238,7 @@ void RegExpMacroAssemblerPPC::CheckCharacterLT(base::uc16 limit,
void RegExpMacroAssemblerPPC::CheckGreedyLoop(Label* on_equal) {
Label backtrack_non_equal;
__ LoadU64(r3, MemOperand(backtrack_stackpointer(), 0));
__ cmp(current_input_offset(), r3);
__ CmpS64(current_input_offset(), r3);
__ bne(&backtrack_non_equal);
__ addi(backtrack_stackpointer(), backtrack_stackpointer(),
Operand(kSystemPointerSize));
......@@ -264,7 +264,7 @@ void RegExpMacroAssemblerPPC::CheckNotBackReferenceIgnoreCase(
if (read_backward) {
__ LoadU64(r6, MemOperand(frame_pointer(), kStringStartMinusOne));
__ add(r6, r6, r4);
__ cmp(current_input_offset(), r6);
__ CmpS64(current_input_offset(), r6);
BranchOrBacktrack(le, on_no_match);
} else {
__ add(r0, r4, current_input_offset(), LeaveOE, SetRC);
......@@ -295,13 +295,13 @@ void RegExpMacroAssemblerPPC::CheckNotBackReferenceIgnoreCase(
__ addi(r3, r3, Operand(char_size()));
__ lbz(r25, MemOperand(r5));
__ addi(r5, r5, Operand(char_size()));
__ cmp(r25, r6);
__ CmpS64(r25, r6);
__ beq(&loop_check);
// Mismatch, try case-insensitive match (converting letters to lower-case).
__ ori(r6, r6, Operand(0x20)); // Convert capture character to lower-case.
__ ori(r25, r25, Operand(0x20)); // Also convert input character.
__ cmp(r25, r6);
__ CmpS64(r25, r6);
__ bne(&fail);
__ subi(r6, r6, Operand('a'));
__ cmpli(r6, Operand('z' - 'a')); // Is r6 a lowercase letter?
......@@ -314,7 +314,7 @@ void RegExpMacroAssemblerPPC::CheckNotBackReferenceIgnoreCase(
__ beq(&fail);
__ bind(&loop_check);
__ cmp(r3, r4);
__ CmpS64(r3, r4);
__ blt(&loop);
__ b(&success);
......@@ -405,7 +405,7 @@ void RegExpMacroAssemblerPPC::CheckNotBackReference(int start_reg,
if (read_backward) {
__ LoadU64(r6, MemOperand(frame_pointer(), kStringStartMinusOne));
__ add(r6, r6, r4);
__ cmp(current_input_offset(), r6);
__ CmpS64(current_input_offset(), r6);
BranchOrBacktrack(le, on_no_match);
} else {
__ add(r0, r4, current_input_offset(), LeaveOE, SetRC);
......@@ -435,9 +435,9 @@ void RegExpMacroAssemblerPPC::CheckNotBackReference(int start_reg,
__ lhz(r25, MemOperand(r5));
__ addi(r5, r5, Operand(char_size()));
}
__ cmp(r6, r25);
__ CmpS64(r6, r25);
BranchOrBacktrack(ne, on_no_match);
__ cmp(r3, r4);
__ CmpS64(r3, r4);
__ blt(&loop);
// Move current character position to position after match.
......@@ -869,7 +869,7 @@ Handle<HeapObject> RegExpMacroAssemblerPPC::GetCode(Handle<String> source) {
if (global_with_zero_length_check()) {
// Special case for zero-length matches.
// r25: capture start index
__ cmp(current_input_offset(), r25);
__ CmpS64(current_input_offset(), r25);
// Not a zero-length match, restart.
__ bne(&load_char_start_regexp);
// Offset from the end is zero if we already reached the end.
......@@ -984,7 +984,7 @@ void RegExpMacroAssemblerPPC::GoTo(Label* to) { BranchOrBacktrack(al, to); }
void RegExpMacroAssemblerPPC::IfRegisterGE(int reg, int comparand,
Label* if_ge) {
__ LoadU64(r3, register_location(reg), r0);
__ Cmpi(r3, Operand(comparand), r0);
__ CmpS64(r3, Operand(comparand), r0);
BranchOrBacktrack(ge, if_ge);
}
......@@ -992,14 +992,14 @@ void RegExpMacroAssemblerPPC::IfRegisterGE(int reg, int comparand,
void RegExpMacroAssemblerPPC::IfRegisterLT(int reg, int comparand,
Label* if_lt) {
__ LoadU64(r3, register_location(reg), r0);
__ Cmpi(r3, Operand(comparand), r0);
__ CmpS64(r3, Operand(comparand), r0);
BranchOrBacktrack(lt, if_lt);
}
void RegExpMacroAssemblerPPC::IfRegisterEqPos(int reg, Label* if_eq) {
__ LoadU64(r3, register_location(reg), r0);
__ cmp(r3, current_input_offset());
__ CmpS64(r3, current_input_offset());
BranchOrBacktrack(eq, if_eq);
}
......@@ -1055,7 +1055,7 @@ void RegExpMacroAssemblerPPC::ReadStackPointerFromRegister(int reg) {
void RegExpMacroAssemblerPPC::SetCurrentPositionFromEnd(int by) {
Label after_position;
__ Cmpi(current_input_offset(), Operand(-by * char_size()), r0);
__ CmpS64(current_input_offset(), Operand(-by * char_size()), r0);
__ bge(&after_position);
__ mov(current_input_offset(), Operand(-by * char_size()));
// On RegExp code entry (where this operation is used), the character before
......@@ -1209,12 +1209,12 @@ MemOperand RegExpMacroAssemblerPPC::register_location(int register_index) {
void RegExpMacroAssemblerPPC::CheckPosition(int cp_offset,
Label* on_outside_input) {
if (cp_offset >= 0) {
__ Cmpi(current_input_offset(), Operand(-cp_offset * char_size()), r0);
__ CmpS64(current_input_offset(), Operand(-cp_offset * char_size()), r0);
BranchOrBacktrack(ge, on_outside_input);
} else {
__ LoadU64(r4, MemOperand(frame_pointer(), kStringStartMinusOne));
__ addi(r3, current_input_offset(), Operand(cp_offset * char_size()));
__ cmp(r3, r4);
__ CmpS64(r3, r4);
BranchOrBacktrack(le, on_outside_input);
}
}
......
......@@ -714,7 +714,7 @@ void LiftoffAssembler::FillStackSlotsWithZero(int start, int size) {
bind(&loop);
StoreU64(r0, MemOperand(r0));
addi(r0, r0, Operand(kSystemPointerSize));
cmp(r4, r5);
CmpS64(r4, r5);
bne(&loop);
pop(r4);
......@@ -1024,7 +1024,7 @@ void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
V8_FALLTHROUGH;
case kI64:
if (use_signed) {
cmp(lhs, rhs);
CmpS64(lhs, rhs);
} else {
cmpl(lhs, rhs);
}
......@@ -1088,7 +1088,7 @@ void LiftoffAssembler::emit_i64_set_cond(LiftoffCondition liftoff_cond,
LiftoffRegister rhs) {
bool use_signed = liftoff::UseSignedOp(liftoff_cond);
if (use_signed) {
cmp(lhs.gp(), rhs.gp());
CmpS64(lhs.gp(), rhs.gp());
} else {
cmpl(lhs.gp(), rhs.gp());
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment