Commit 5165e3f4 authored by Junliang Yan's avatar Junliang Yan Committed by V8 LUCI CQ

ppc: [liftoff] implement SubS64 function

Drive-by: clean up SubS64/AddS64 macroassembler
Change-Id: I31a15b1f3f3825122f6857861845c8961ece3649
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3024152
Commit-Queue: Junliang Yan <junyan@redhat.com>
Reviewed-by: 's avatarMilad Fa <mfarazma@redhat.com>
Cr-Commit-Position: refs/heads/master@{#75711}
parent a1d64830
...@@ -156,11 +156,8 @@ void TurboAssembler::LoadRootRegisterOffset(Register destination, ...@@ -156,11 +156,8 @@ void TurboAssembler::LoadRootRegisterOffset(Register destination,
intptr_t offset) { intptr_t offset) {
if (offset == 0) { if (offset == 0) {
mr(destination, kRootRegister); mr(destination, kRootRegister);
} else if (is_int16(offset)) {
addi(destination, kRootRegister, Operand(offset));
} else { } else {
mov(destination, Operand(offset)); AddS64(destination, kRootRegister, Operand(offset), destination);
add(destination, kRootRegister, destination);
} }
} }
...@@ -1299,7 +1296,7 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space, ...@@ -1299,7 +1296,7 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space,
// since the sp slot and code slot were pushed after the fp. // since the sp slot and code slot were pushed after the fp.
} }
addi(sp, sp, Operand(-stack_space * kSystemPointerSize)); AddS64(sp, sp, Operand(-stack_space * kSystemPointerSize));
// Allocate and align the frame preparing for calling the runtime // Allocate and align the frame preparing for calling the runtime
// function. // function.
...@@ -1315,7 +1312,8 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space, ...@@ -1315,7 +1312,8 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space,
// Set the exit frame sp value to point just before the return address // Set the exit frame sp value to point just before the return address
// location. // location.
addi(r8, sp, Operand((kStackFrameExtraParamSlot + 1) * kSystemPointerSize)); AddS64(r8, sp, Operand((kStackFrameExtraParamSlot + 1) * kSystemPointerSize),
r0);
StoreU64(r8, MemOperand(fp, ExitFrameConstants::kSPOffset)); StoreU64(r8, MemOperand(fp, ExitFrameConstants::kSPOffset));
} }
...@@ -1344,7 +1342,7 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count, ...@@ -1344,7 +1342,7 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
const int kNumRegs = kNumCallerSavedDoubles; const int kNumRegs = kNumCallerSavedDoubles;
const int offset = const int offset =
(ExitFrameConstants::kFixedFrameSizeFromFp + kNumRegs * kDoubleSize); (ExitFrameConstants::kFixedFrameSizeFromFp + kNumRegs * kDoubleSize);
addi(r6, fp, Operand(-offset)); AddS64(r6, fp, Operand(-offset), r0);
MultiPopDoubles(kCallerSavedDoubles, r6); MultiPopDoubles(kCallerSavedDoubles, r6);
} }
...@@ -1397,14 +1395,15 @@ void TurboAssembler::PrepareForTailCall(Register callee_args_count, ...@@ -1397,14 +1395,15 @@ void TurboAssembler::PrepareForTailCall(Register callee_args_count,
Register dst_reg = scratch0; Register dst_reg = scratch0;
ShiftLeftImm(dst_reg, caller_args_count, Operand(kSystemPointerSizeLog2)); ShiftLeftImm(dst_reg, caller_args_count, Operand(kSystemPointerSizeLog2));
add(dst_reg, fp, dst_reg); add(dst_reg, fp, dst_reg);
addi(dst_reg, dst_reg, AddS64(dst_reg, dst_reg,
Operand(StandardFrameConstants::kCallerSPOffset + kSystemPointerSize)); Operand(StandardFrameConstants::kCallerSPOffset + kSystemPointerSize),
scratch0);
Register src_reg = caller_args_count; Register src_reg = caller_args_count;
// Calculate the end of source area. +kSystemPointerSize is for the receiver. // Calculate the end of source area. +kSystemPointerSize is for the receiver.
ShiftLeftImm(src_reg, callee_args_count, Operand(kSystemPointerSizeLog2)); ShiftLeftImm(src_reg, callee_args_count, Operand(kSystemPointerSizeLog2));
add(src_reg, sp, src_reg); add(src_reg, sp, src_reg);
addi(src_reg, src_reg, Operand(kSystemPointerSize)); AddS64(src_reg, src_reg, Operand(kSystemPointerSize), scratch0);
if (FLAG_debug_code) { if (FLAG_debug_code) {
CmpU64(src_reg, dst_reg); CmpU64(src_reg, dst_reg);
...@@ -2237,7 +2236,8 @@ void TurboAssembler::PrepareCallCFunction(int num_reg_arguments, ...@@ -2237,7 +2236,8 @@ void TurboAssembler::PrepareCallCFunction(int num_reg_arguments,
// Make stack end at alignment and make room for stack arguments // Make stack end at alignment and make room for stack arguments
// -- preserving original value of sp. // -- preserving original value of sp.
mr(scratch, sp); mr(scratch, sp);
addi(sp, sp, Operand(-(stack_passed_arguments + 1) * kSystemPointerSize)); AddS64(sp, sp, Operand(-(stack_passed_arguments + 1) * kSystemPointerSize),
scratch);
DCHECK(base::bits::IsPowerOfTwo(frame_alignment)); DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
ClearRightImm(sp, sp, ClearRightImm(sp, sp,
Operand(base::bits::WhichPowerOfTwo(frame_alignment))); Operand(base::bits::WhichPowerOfTwo(frame_alignment)));
...@@ -2378,9 +2378,9 @@ void TurboAssembler::CallCFunctionHelper(Register function, ...@@ -2378,9 +2378,9 @@ void TurboAssembler::CallCFunctionHelper(Register function,
CalculateStackPassedWords(num_reg_arguments, num_double_arguments); CalculateStackPassedWords(num_reg_arguments, num_double_arguments);
int stack_space = kNumRequiredStackFrameSlots + stack_passed_arguments; int stack_space = kNumRequiredStackFrameSlots + stack_passed_arguments;
if (ActivationFrameAlignment() > kSystemPointerSize) { if (ActivationFrameAlignment() > kSystemPointerSize) {
LoadU64(sp, MemOperand(sp, stack_space * kSystemPointerSize)); LoadU64(sp, MemOperand(sp, stack_space * kSystemPointerSize), r0);
} else { } else {
addi(sp, sp, Operand(stack_space * kSystemPointerSize)); AddS64(sp, sp, Operand(stack_space * kSystemPointerSize), r0);
} }
} }
...@@ -2675,17 +2675,33 @@ void TurboAssembler::MovFloatToInt(Register dst, DoubleRegister src) { ...@@ -2675,17 +2675,33 @@ void TurboAssembler::MovFloatToInt(Register dst, DoubleRegister src) {
addi(sp, sp, Operand(kFloatSize)); addi(sp, sp, Operand(kFloatSize));
} }
void TurboAssembler::AddS64(Register dst, Register src, Register value) { void TurboAssembler::AddS64(Register dst, Register src, Register value, OEBit s,
add(dst, src, value); RCBit r) {
add(dst, src, value, s, r);
} }
void TurboAssembler::AddS64(Register dst, Register src, const Operand& value, void TurboAssembler::AddS64(Register dst, Register src, const Operand& value,
Register scratch) { Register scratch, OEBit s, RCBit r) {
if (is_int16(value.immediate())) { if (is_int16(value.immediate()) && s == LeaveOE && r == LeaveRC) {
addi(dst, src, value); addi(dst, src, value);
} else { } else {
mov(scratch, value); mov(scratch, value);
add(dst, src, scratch); add(dst, src, scratch, s, r);
}
}
void TurboAssembler::SubS64(Register dst, Register src, Register value, OEBit s,
RCBit r) {
sub(dst, src, value, s, r);
}
void TurboAssembler::SubS64(Register dst, Register src, const Operand& value,
Register scratch, OEBit s, RCBit r) {
if (is_int16(value.immediate()) && s == LeaveOE && r == LeaveRC) {
subi(dst, src, value);
} else {
mov(scratch, value);
sub(dst, src, scratch, s, r);
} }
} }
...@@ -3276,8 +3292,8 @@ void TurboAssembler::LoadEntryFromBuiltinIndex(Register builtin_index) { ...@@ -3276,8 +3292,8 @@ void TurboAssembler::LoadEntryFromBuiltinIndex(Register builtin_index) {
ShiftLeftImm(builtin_index, builtin_index, ShiftLeftImm(builtin_index, builtin_index,
Operand(kSystemPointerSizeLog2 - kSmiShift)); Operand(kSystemPointerSizeLog2 - kSmiShift));
} }
addi(builtin_index, builtin_index, AddS64(builtin_index, builtin_index,
Operand(IsolateData::builtin_entry_table_offset())); Operand(IsolateData::builtin_entry_table_offset()));
LoadU64(builtin_index, MemOperand(kRootRegister, builtin_index)); LoadU64(builtin_index, MemOperand(kRootRegister, builtin_index));
} }
......
...@@ -184,8 +184,13 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { ...@@ -184,8 +184,13 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
void ResetRoundingMode(); void ResetRoundingMode();
void AddS64(Register dst, Register src, const Operand& value, void AddS64(Register dst, Register src, const Operand& value,
Register scratch = r0); Register scratch = r0, OEBit s = LeaveOE, RCBit r = LeaveRC);
void AddS64(Register dst, Register src, Register value); void AddS64(Register dst, Register src, Register value, OEBit s = LeaveOE,
RCBit r = LeaveRC);
void SubS64(Register dst, Register src, const Operand& value,
Register scratch = r0, OEBit s = LeaveOE, RCBit r = LeaveRC);
void SubS64(Register dst, Register src, Register value, OEBit s = LeaveOE,
RCBit r = LeaveRC);
void Push(Register src) { push(src); } void Push(Register src) { push(src); }
// Push a handle. // Push a handle.
......
...@@ -1106,12 +1106,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -1106,12 +1106,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
if (ShouldApplyOffsetToStackCheck(instr, &offset)) { if (ShouldApplyOffsetToStackCheck(instr, &offset)) {
lhs_register = i.TempRegister(0); lhs_register = i.TempRegister(0);
if (is_int16(offset)) { __ SubS64(lhs_register, sp, Operand(offset), kScratchReg);
__ subi(lhs_register, sp, Operand(offset));
} else {
__ mov(kScratchReg, Operand(offset));
__ sub(lhs_register, sp, kScratchReg);
}
} }
constexpr size_t kValueIndex = 0; constexpr size_t kValueIndex = 0;
...@@ -1165,8 +1160,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -1165,8 +1160,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
case kArchStackSlot: { case kArchStackSlot: {
FrameOffset offset = FrameOffset offset =
frame_access_state()->GetFrameOffset(i.InputInt32(0)); frame_access_state()->GetFrameOffset(i.InputInt32(0));
__ addi(i.OutputRegister(), offset.from_stack_pointer() ? sp : fp, __ AddS64(i.OutputRegister(), offset.from_stack_pointer() ? sp : fp,
Operand(offset.offset())); Operand(offset.offset()), r0);
break; break;
} }
case kArchWordPoisonOnSpeculation: case kArchWordPoisonOnSpeculation:
...@@ -1380,8 +1375,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -1380,8 +1375,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ add(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), __ add(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
LeaveOE, i.OutputRCBit()); LeaveOE, i.OutputRCBit());
} else { } else {
__ addi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1)); __ AddS64(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1),
DCHECK_EQ(LeaveRC, i.OutputRCBit()); r0, LeaveOE, i.OutputRCBit());
} }
__ extsw(i.OutputRegister(), i.OutputRegister()); __ extsw(i.OutputRegister(), i.OutputRegister());
#if V8_TARGET_ARCH_PPC64 #if V8_TARGET_ARCH_PPC64
...@@ -1397,8 +1392,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -1397,8 +1392,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ add(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), __ add(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
LeaveOE, i.OutputRCBit()); LeaveOE, i.OutputRCBit());
} else { } else {
__ addi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1)); __ AddS64(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1),
DCHECK_EQ(LeaveRC, i.OutputRCBit()); r0, LeaveOE, i.OutputRCBit());
} }
} }
break; break;
...@@ -1419,15 +1414,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -1419,15 +1414,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ sub(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), __ sub(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
LeaveOE, i.OutputRCBit()); LeaveOE, i.OutputRCBit());
} else { } else {
if (is_int16(i.InputImmediate(1).immediate())) { __ SubS64(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1),
__ subi(i.OutputRegister(), i.InputRegister(0), r0, LeaveOE, i.OutputRCBit());
i.InputImmediate(1));
DCHECK_EQ(LeaveRC, i.OutputRCBit());
} else {
__ mov(kScratchReg, i.InputImmediate(1));
__ sub(i.OutputRegister(), i.InputRegister(0), kScratchReg, LeaveOE,
i.OutputRCBit());
}
} }
#if V8_TARGET_ARCH_PPC64 #if V8_TARGET_ARCH_PPC64
} }
...@@ -4006,7 +3994,8 @@ void CodeGenerator::AssembleConstructFrame() { ...@@ -4006,7 +3994,8 @@ void CodeGenerator::AssembleConstructFrame() {
if (FLAG_enable_embedded_constant_pool) { if (FLAG_enable_embedded_constant_pool) {
__ Push(r0, fp, kConstantPoolRegister); __ Push(r0, fp, kConstantPoolRegister);
// Adjust FP to point to saved FP. // Adjust FP to point to saved FP.
__ subi(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset)); __ SubS64(fp, sp,
Operand(StandardFrameConstants::kConstantPoolOffset), r0);
} else { } else {
__ Push(r0, fp); __ Push(r0, fp);
__ mr(fp, sp); __ mr(fp, sp);
......
...@@ -707,8 +707,8 @@ void LiftoffAssembler::FillStackSlotsWithZero(int start, int size) { ...@@ -707,8 +707,8 @@ void LiftoffAssembler::FillStackSlotsWithZero(int start, int size) {
// Use r4 for start address (inclusive), r5 for end address (exclusive). // Use r4 for start address (inclusive), r5 for end address (exclusive).
push(r4); push(r4);
push(r5); push(r5);
subi(r4, fp, Operand(start + size)); SubS64(r4, fp, Operand(start + size), r0);
subi(r5, fp, Operand(start)); SubS64(r5, fp, Operand(start), r0);
Label loop; Label loop;
bind(&loop); bind(&loop);
...@@ -792,7 +792,6 @@ UNIMPLEMENTED_I32_BINOP_I(i32_xor) ...@@ -792,7 +792,6 @@ UNIMPLEMENTED_I32_BINOP_I(i32_xor)
UNIMPLEMENTED_I32_SHIFTOP(i32_shl) UNIMPLEMENTED_I32_SHIFTOP(i32_shl)
UNIMPLEMENTED_I32_SHIFTOP(i32_sar) UNIMPLEMENTED_I32_SHIFTOP(i32_sar)
UNIMPLEMENTED_I32_SHIFTOP(i32_shr) UNIMPLEMENTED_I32_SHIFTOP(i32_shr)
UNIMPLEMENTED_I64_BINOP(i64_sub)
UNIMPLEMENTED_I64_BINOP(i64_mul) UNIMPLEMENTED_I64_BINOP(i64_mul)
#ifdef V8_TARGET_ARCH_PPC64 #ifdef V8_TARGET_ARCH_PPC64
UNIMPLEMENTED_I64_BINOP_I(i64_and) UNIMPLEMENTED_I64_BINOP_I(i64_and)
...@@ -880,6 +879,8 @@ UNOP_LIST(EMIT_UNOP_FUNCTION) ...@@ -880,6 +879,8 @@ UNOP_LIST(EMIT_UNOP_FUNCTION)
USE, , void) \ USE, , void) \
V(f64_max, MaxF64, DoubleRegister, DoubleRegister, DoubleRegister, , , , \ V(f64_max, MaxF64, DoubleRegister, DoubleRegister, DoubleRegister, , , , \
USE, , void) \ USE, , void) \
V(i64_sub, SubS64, LiftoffRegister, LiftoffRegister, LiftoffRegister, \
LFR_TO_REG, LFR_TO_REG, LFR_TO_REG, USE, , void) \
V(i64_add, AddS64, LiftoffRegister, LiftoffRegister, LiftoffRegister, \ V(i64_add, AddS64, LiftoffRegister, LiftoffRegister, LiftoffRegister, \
LFR_TO_REG, LFR_TO_REG, LFR_TO_REG, USE, , void) \ LFR_TO_REG, LFR_TO_REG, LFR_TO_REG, USE, , void) \
V(i64_addi, AddS64, LiftoffRegister, LiftoffRegister, int64_t, LFR_TO_REG, \ V(i64_addi, AddS64, LiftoffRegister, LiftoffRegister, int64_t, LFR_TO_REG, \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment