Commit 37b59991 authored by Jiayao Lin's avatar Jiayao Lin Committed by Commit Bot

[ppc] Port native routines to use UseScratchRegisterScope

Change-Id: I8034f64ba412a7d880fdc1b7bc4dce0b41fe3114
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1696915Reviewed-by: 's avatarJunliang Yan <jyan@ca.ibm.com>
Commit-Queue: Junliang Yan <jyan@ca.ibm.com>
Cr-Commit-Position: refs/heads/master@{#62786}
parent 7696e532
......@@ -106,6 +106,8 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
// -- sp[...]: constructor arguments
// -----------------------------------
Register scratch = r5;
Label stack_overflow;
Generate_StackOverflowCheck(masm, r3, r8, &stack_overflow);
......@@ -137,13 +139,13 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
// -- sp[2*kPointerSize]: context
// -----------------------------------
__ beq(&no_args, cr0);
__ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
__ sub(sp, sp, ip);
__ ShiftLeftImm(scratch, r3, Operand(kPointerSizeLog2));
__ sub(sp, sp, scratch);
__ mtctr(r3);
__ bind(&loop);
__ subi(ip, ip, Operand(kPointerSize));
__ LoadPX(r0, MemOperand(r7, ip));
__ StorePX(r0, MemOperand(sp, ip));
__ subi(scratch, scratch, Operand(kPointerSize));
__ LoadPX(r0, MemOperand(r7, scratch));
__ StorePX(r0, MemOperand(sp, scratch));
__ bdnz(&loop);
__ bind(&no_args);
......@@ -296,13 +298,13 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// -----------------------------------
__ cmpi(r3, Operand::Zero());
__ beq(&no_args);
__ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
__ sub(sp, sp, ip);
__ ShiftLeftImm(r9, r3, Operand(kPointerSizeLog2));
__ sub(sp, sp, r9);
__ mtctr(r3);
__ bind(&loop);
__ subi(ip, ip, Operand(kPointerSize));
__ LoadPX(r0, MemOperand(r7, ip));
__ StorePX(r0, MemOperand(sp, ip));
__ subi(r9, r9, Operand(kPointerSize));
__ LoadPX(r0, MemOperand(r7, r9));
__ StorePX(r0, MemOperand(sp, r9));
__ bdnz(&loop);
__ bind(&no_args);
......@@ -412,12 +414,13 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// Flood function if we are stepping.
Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
Label stepping_prepared;
Register scratch = r8;
ExternalReference debug_hook =
ExternalReference::debug_hook_on_function_call_address(masm->isolate());
__ Move(ip, debug_hook);
__ LoadByte(ip, MemOperand(ip), r0);
__ extsb(ip, ip);
__ CmpSmiLiteral(ip, Smi::zero(), r0);
__ Move(scratch, debug_hook);
__ LoadByte(scratch, MemOperand(scratch), r0);
__ extsb(scratch, scratch);
__ CmpSmiLiteral(scratch, Smi::zero(), r0);
__ bne(&prepare_step_in_if_stepping);
// Flood function if we need to continue stepping in the suspended generator.
......@@ -425,9 +428,9 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
ExternalReference debug_suspended_generator =
ExternalReference::debug_suspended_generator_address(masm->isolate());
__ Move(ip, debug_suspended_generator);
__ LoadP(ip, MemOperand(ip));
__ cmp(ip, r4);
__ Move(scratch, debug_suspended_generator);
__ LoadP(scratch, MemOperand(scratch));
__ cmp(scratch, r4);
__ beq(&prepare_step_in_suspended_generator);
__ bind(&stepping_prepared);
......@@ -438,8 +441,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
__ blt(&stack_overflow);
// Push receiver.
__ LoadP(ip, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
__ Push(ip);
__ LoadP(scratch, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
__ Push(scratch);
// ----------- S t a t e -------------
// -- r4 : the JSGeneratorObject to resume
......@@ -466,8 +469,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
__ mtctr(r6);
__ bind(&loop);
__ LoadPU(ip, MemOperand(r9, kPointerSize));
__ push(ip);
__ LoadPU(scratch, MemOperand(r9, kPointerSize));
__ push(scratch);
__ bdnz(&loop);
__ bind(&done_loop);
......@@ -598,6 +601,7 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
__ LoadP(r0, MemOperand(r3));
__ push(r0);
Register scratch = r9;
// Set up frame pointer for the frame to be pushed.
__ addi(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
......@@ -607,17 +611,17 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
ExternalReference::Create(IsolateAddressId::kJSEntrySPAddress,
masm->isolate());
__ Move(r3, js_entry_sp);
__ LoadP(r9, MemOperand(r3));
__ cmpi(r9, Operand::Zero());
__ LoadP(scratch, MemOperand(r3));
__ cmpi(scratch, Operand::Zero());
__ bne(&non_outermost_js);
__ StoreP(fp, MemOperand(r3));
__ mov(ip, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
__ mov(scratch, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
Label cont;
__ b(&cont);
__ bind(&non_outermost_js);
__ mov(ip, Operand(StackFrame::INNER_JSENTRY_FRAME));
__ mov(scratch, Operand(StackFrame::INNER_JSENTRY_FRAME));
__ bind(&cont);
__ push(ip); // frame-type
__ push(scratch); // frame-type
// Jump to a faked try block that does the invoke, with a faked catch
// block that sets the pending exception.
......@@ -638,12 +642,12 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
// field in the JSEnv and return a failure sentinel. Coming in here the
// fp will be invalid because the PushStackHandler below sets it to 0 to
// signal the existence of the JSEntry frame.
__ Move(ip,
ExternalReference::Create(IsolateAddressId::kPendingExceptionAddress,
masm->isolate()));
__ Move(scratch,
ExternalReference::Create(
IsolateAddressId::kPendingExceptionAddress, masm->isolate()));
}
__ StoreP(r3, MemOperand(ip));
__ StoreP(r3, MemOperand(scratch));
__ LoadRoot(r3, RootIndex::kException);
__ b(&exit);
......@@ -675,16 +679,16 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
__ pop(r8);
__ cmpi(r8, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
__ bne(&non_outermost_js_2);
__ mov(r9, Operand::Zero());
__ mov(scratch, Operand::Zero());
__ Move(r8, js_entry_sp);
__ StoreP(r9, MemOperand(r8));
__ StoreP(scratch, MemOperand(r8));
__ bind(&non_outermost_js_2);
// Restore the top frame descriptors from the stack.
__ pop(r6);
__ Move(ip, ExternalReference::Create(
IsolateAddressId::kCEntryFPAddress, masm->isolate()));
__ StoreP(r6, MemOperand(ip));
__ Move(scratch, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
masm->isolate()));
__ StoreP(r6, MemOperand(scratch));
// Reset the stack to the callee saved registers.
__ addi(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
......@@ -1401,11 +1405,13 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
__ SmiUntag(kInterpreterBytecodeOffsetRegister);
// Dispatch to the target bytecode.
UseScratchRegisterScope temps(masm);
Register scratch = temps.Acquire();
__ lbzx(ip, MemOperand(kInterpreterBytecodeArrayRegister,
kInterpreterBytecodeOffsetRegister));
__ ShiftLeftImm(ip, ip, Operand(kPointerSizeLog2));
__ ShiftLeftImm(scratch, scratch, Operand(kPointerSizeLog2));
__ LoadPX(kJavaScriptCallCodeStartRegister,
MemOperand(kInterpreterDispatchTableRegister, ip));
MemOperand(kInterpreterDispatchTableRegister, scratch));
__ Jump(kJavaScriptCallCodeStartRegister);
}
......@@ -1534,13 +1540,15 @@ void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
MemOperand(sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
// Load builtin index (stored as a Smi) and use it to get the builtin start
// address from the builtins table.
__ Pop(ip);
UseScratchRegisterScope temps(masm);
Register builtin = temps.Acquire();
__ Pop(builtin);
__ addi(sp, sp,
Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
__ Pop(r0);
__ mtlr(r0);
__ LoadEntryFromBuiltinIndex(ip);
__ Jump(ip);
__ LoadEntryFromBuiltinIndex(builtin);
__ Jump(builtin);
}
} // namespace
......@@ -1710,14 +1718,15 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
// r3: actual number of arguments
// r4: callable
{
Register scratch = r6;
Label loop;
// Calculate the copy start address (destination). Copy end address is sp.
__ add(r5, sp, r5);
__ mtctr(r3);
__ bind(&loop);
__ LoadP(ip, MemOperand(r5, -kPointerSize));
__ StoreP(ip, MemOperand(r5));
__ LoadP(scratch, MemOperand(r5, -kPointerSize));
__ StoreP(scratch, MemOperand(r5));
__ subi(r5, r5, Operand(kPointerSize));
__ bdnz(&loop);
// Adjust the actual number of arguments and remove the top element
......@@ -1899,7 +1908,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
// Check for stack overflow.
Label stack_overflow;
Generate_StackOverflowCheck(masm, r7, ip, &stack_overflow);
Generate_StackOverflowCheck(masm, r7, scratch, &stack_overflow);
// Push arguments onto the stack (thisArgument is already on the stack).
{
......@@ -1910,12 +1919,12 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
__ mtctr(r7);
__ bind(&loop);
__ LoadPU(ip, MemOperand(r5, kPointerSize));
__ CompareRoot(ip, RootIndex::kTheHoleValue);
__ LoadPU(scratch, MemOperand(r5, kPointerSize));
__ CompareRoot(scratch, RootIndex::kTheHoleValue);
__ bne(&skip);
__ LoadRoot(ip, RootIndex::kUndefinedValue);
__ LoadRoot(scratch, RootIndex::kUndefinedValue);
__ bind(&skip);
__ push(ip);
__ push(scratch);
__ bdnz(&loop);
__ bind(&no_args);
__ add(r3, r3, r7);
......@@ -1961,8 +1970,10 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ LoadP(ip, MemOperand(r7, CommonFrameConstants::kContextOrFrameTypeOffset));
__ cmpi(ip, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
__ LoadP(scratch,
MemOperand(r7, CommonFrameConstants::kContextOrFrameTypeOffset));
__ cmpi(scratch,
Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
__ beq(&arguments_adaptor);
{
__ LoadP(r8, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
......@@ -1996,9 +2007,9 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ add(r3, r3, r8);
__ bind(&loop);
{
__ ShiftLeftImm(ip, r8, Operand(kPointerSizeLog2));
__ LoadPX(ip, MemOperand(r7, ip));
__ push(ip);
__ ShiftLeftImm(scratch, r8, Operand(kPointerSizeLog2));
__ LoadPX(scratch, MemOperand(r7, scratch));
__ push(scratch);
__ subi(r8, r8, Operand(1));
__ cmpi(r8, Operand::Zero());
__ bne(&loop);
......@@ -2142,10 +2153,11 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
// -- r7 : the number of [[BoundArguments]]
// -----------------------------------
Register scratch = r9;
// Reserve stack space for the [[BoundArguments]].
{
Label done;
__ mr(r9, sp); // preserve previous stack pointer
__ mr(scratch, sp); // preserve previous stack pointer
__ ShiftLeftImm(r10, r7, Operand(kPointerSizeLog2));
__ sub(sp, sp, r10);
// Check the stack for overflow. We are not trying to catch interruptions
......@@ -2154,7 +2166,7 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
__ CompareRoot(sp, RootIndex::kRealStackLimit);
__ bgt(&done); // Signed comparison.
// Restore the stack pointer.
__ mr(sp, r9);
__ mr(sp, scratch);
{
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterFrame(StackFrame::INTERNAL);
......@@ -2174,7 +2186,7 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
__ beq(&skip);
__ mtctr(r3);
__ bind(&loop);
__ LoadPX(r0, MemOperand(r9, r8));
__ LoadPX(r0, MemOperand(scratch, r8));
__ StorePX(r0, MemOperand(sp, r8));
__ addi(r8, r8, Operand(kPointerSize));
__ bdnz(&loop);
......@@ -2209,9 +2221,9 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
__ AssertBoundFunction(r4);
// Patch the receiver to [[BoundThis]].
__ LoadP(ip, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset));
__ LoadP(r6, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset));
__ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
__ StorePX(ip, MemOperand(sp, r0));
__ StorePX(r6, MemOperand(sp, r0));
// Push the [[BoundArguments]] onto the stack.
Generate_PushBoundArguments(masm);
......@@ -3281,6 +3293,8 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
}
void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
UseScratchRegisterScope temps(masm);
Register temp2 = temps.Acquire();
// Place the return address on the stack, making the call
// GC safe. The RegExp backend also relies on this.
__ mflr(r0);
......@@ -3288,11 +3302,11 @@ void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
if (ABI_USES_FUNCTION_DESCRIPTORS && FLAG_embedded_builtins) {
// AIX/PPC64BE Linux use a function descriptor;
__ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(ip, kPointerSize));
__ LoadP(ip, MemOperand(ip, 0)); // Instruction address
__ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(temp2, kPointerSize));
__ LoadP(temp2, MemOperand(temp2, 0)); // Instruction address
}
__ Call(ip); // Call the C++ function.
__ Call(temp2); // Call the C++ function.
__ LoadP(r0, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize));
__ mtlr(r0);
__ blr();
......
......@@ -224,6 +224,7 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
Assembler::Assembler(const AssemblerOptions& options,
std::unique_ptr<AssemblerBuffer> buffer)
: AssemblerBase(options, std::move(buffer)),
scratch_register_list_(ip.bit()),
constant_pool_builder_(kLoadPtrMaxReachBits, kLoadDoubleMaxReachBits) {
reloc_info_writer.Reposition(buffer_start_ + buffer_->size(), pc_);
......@@ -1947,6 +1948,24 @@ PatchingAssembler::~PatchingAssembler() {
DCHECK_EQ(reloc_info_writer.pos(), buffer_start_ + buffer_->size());
}
UseScratchRegisterScope::UseScratchRegisterScope(Assembler* assembler)
: assembler_(assembler),
old_available_(*assembler->GetScratchRegisterList()) {}
UseScratchRegisterScope::~UseScratchRegisterScope() {
*assembler_->GetScratchRegisterList() = old_available_;
}
Register UseScratchRegisterScope::Acquire() {
RegList* available = assembler_->GetScratchRegisterList();
DCHECK_NOT_NULL(available);
DCHECK_NE(*available, 0);
int index = static_cast<int>(base::bits::CountTrailingZeros32(*available));
Register reg = Register::from_code(index);
*available &= ~reg.bit();
return reg;
}
} // namespace internal
} // namespace v8
......
......@@ -437,6 +437,7 @@ class Assembler : public AssemblerBase {
PPC_XX3_OPCODE_LIST(DECLARE_PPC_XX3_INSTRUCTIONS)
#undef DECLARE_PPC_XX3_INSTRUCTIONS
RegList* GetScratchRegisterList() { return &scratch_register_list_; }
// ---------------------------------------------------------------------------
// Code generation
......@@ -1182,6 +1183,9 @@ class Assembler : public AssemblerBase {
static constexpr int kMaxRelocSize = RelocInfoWriter::kMaxSize;
std::vector<DeferredRelocInfo> relocations_;
// Scratch registers available for use by the Assembler.
RegList scratch_register_list_;
// The bound position, before this we cannot do instruction elimination.
int last_bound_pos_;
// Optimizable cmpi information.
......@@ -1297,6 +1301,7 @@ class Assembler : public AssemblerBase {
friend class RelocInfo;
friend class BlockTrampolinePoolScope;
friend class EnsureSpace;
friend class UseScratchRegisterScope;
};
class EnsureSpace {
......@@ -1311,6 +1316,24 @@ class PatchingAssembler : public Assembler {
~PatchingAssembler();
};
class V8_EXPORT_PRIVATE UseScratchRegisterScope {
public:
explicit UseScratchRegisterScope(Assembler* assembler);
~UseScratchRegisterScope();
Register Acquire();
// Check if we have registers available to acquire.
bool CanAcquire() const { return *assembler_->GetScratchRegisterList() != 0; }
private:
friend class Assembler;
friend class TurboAssembler;
Assembler* assembler_;
RegList old_available_;
};
} // namespace internal
} // namespace v8
......
......@@ -56,11 +56,13 @@ void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm,
__ StoreP(ToRegister(i), MemOperand(sp, kPointerSize * i));
}
}
__ mov(ip, Operand(ExternalReference::Create(
IsolateAddressId::kCEntryFPAddress, isolate)));
__ StoreP(fp, MemOperand(ip));
{
UseScratchRegisterScope temps(masm);
Register scratch = temps.Acquire();
__ mov(scratch, Operand(ExternalReference::Create(
IsolateAddressId::kCEntryFPAddress, isolate)));
__ StoreP(fp, MemOperand(scratch));
}
const int kSavedRegistersAreaSize =
(kNumberOfRegisters * kPointerSize) + kDoubleRegsSize + kFloatRegsSize;
......@@ -210,19 +212,27 @@ void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm,
__ push(r9);
// Restore the registers from the last output frame.
DCHECK(!(ip.bit() & restored_regs));
__ mr(ip, r5);
for (int i = kNumberOfRegisters - 1; i >= 0; i--) {
int offset = (i * kPointerSize) + FrameDescription::registers_offset();
if ((restored_regs & (1 << i)) != 0) {
__ LoadP(ToRegister(i), MemOperand(ip, offset));
{
UseScratchRegisterScope temps(masm);
Register scratch = temps.Acquire();
DCHECK(!(scratch.bit() & restored_regs));
__ mr(scratch, r5);
for (int i = kNumberOfRegisters - 1; i >= 0; i--) {
int offset = (i * kPointerSize) + FrameDescription::registers_offset();
if ((restored_regs & (1 << i)) != 0) {
__ LoadP(ToRegister(i), MemOperand(scratch, offset));
}
}
}
__ pop(ip); // get continuation, leave pc on stack
__ pop(r0);
__ mtlr(r0);
__ Jump(ip);
{
UseScratchRegisterScope temps(masm);
Register scratch = temps.Acquire();
__ pop(scratch); // get continuation, leave pc on stack
__ pop(r0);
__ mtlr(r0);
__ Jump(scratch);
}
__ stop();
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment