Commit d74cd491 authored by Jiayao Lin's avatar Jiayao Lin Committed by Commit Bot

[s390] Port native routines to use UseScratchRegisterScope

Change-Id: If012756df78646769fb89200f2d10d71827d01a5
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1687063
Commit-Queue: Junliang Yan <jyan@ca.ibm.com>
Reviewed-by: 's avatarJunliang Yan <jyan@ca.ibm.com>
Cr-Commit-Position: refs/heads/master@{#62787}
parent 37b59991
......@@ -106,6 +106,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
// -- sp[...]: constructor arguments
// -----------------------------------
Register scratch = r4;
Label stack_overflow;
Generate_StackOverflowCheck(masm, r2, r7, &stack_overflow);
......@@ -134,13 +135,13 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
// sp[2]: number of arguments (smi-tagged)
Label loop, no_args;
__ beq(&no_args);
__ ShiftLeftP(ip, r2, Operand(kPointerSizeLog2));
__ SubP(sp, sp, ip);
__ ShiftLeftP(scratch, r2, Operand(kPointerSizeLog2));
__ SubP(sp, sp, scratch);
__ LoadRR(r1, r2);
__ bind(&loop);
__ lay(ip, MemOperand(ip, -kPointerSize));
__ LoadP(r0, MemOperand(ip, r6));
__ StoreP(r0, MemOperand(ip, sp));
__ lay(scratch, MemOperand(scratch, -kPointerSize));
__ LoadP(r0, MemOperand(scratch, r6));
__ StoreP(r0, MemOperand(scratch, sp));
__ BranchOnCount(r1, &loop);
__ bind(&no_args);
......@@ -155,15 +156,15 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
// Restore context from the frame.
__ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
// Restore smi-tagged arguments count from the frame.
__ LoadP(r3, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
__ LoadP(scratch, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
// Leave construct frame.
}
// Remove caller arguments from the stack and return.
STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ SmiToPtrArrayOffset(r3, r3);
__ AddP(sp, sp, r3);
__ SmiToPtrArrayOffset(scratch, scratch);
__ AddP(sp, sp, scratch);
__ AddP(sp, sp, Operand(kPointerSize));
__ Ret();
......@@ -292,13 +293,13 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
__ ltgr(r2, r2);
__ beq(&no_args);
__ ShiftLeftP(ip, r2, Operand(kPointerSizeLog2));
__ SubP(sp, sp, ip);
__ ShiftLeftP(r8, r2, Operand(kPointerSizeLog2));
__ SubP(sp, sp, r8);
__ LoadRR(r1, r2);
__ bind(&loop);
__ lay(ip, MemOperand(ip, -kPointerSize));
__ LoadP(r0, MemOperand(ip, r6));
__ StoreP(r0, MemOperand(ip, sp));
__ lay(r8, MemOperand(r8, -kPointerSize));
__ LoadP(r0, MemOperand(r8, r6));
__ StoreP(r0, MemOperand(r8, sp));
__ BranchOnCount(r1, &loop);
__ bind(&no_args);
......@@ -405,11 +406,13 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// Flood function if we are stepping.
Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
Label stepping_prepared;
Register scratch = r7;
ExternalReference debug_hook =
ExternalReference::debug_hook_on_function_call_address(masm->isolate());
__ Move(ip, debug_hook);
__ LoadB(ip, MemOperand(ip));
__ CmpSmiLiteral(ip, Smi::zero(), r0);
__ Move(scratch, debug_hook);
__ LoadB(scratch, MemOperand(scratch));
__ CmpSmiLiteral(scratch, Smi::zero(), r0);
__ bne(&prepare_step_in_if_stepping);
// Flood function if we need to continue stepping in the suspended generator.
......@@ -417,9 +420,9 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
ExternalReference debug_suspended_generator =
ExternalReference::debug_suspended_generator_address(masm->isolate());
__ Move(ip, debug_suspended_generator);
__ LoadP(ip, MemOperand(ip));
__ CmpP(ip, r3);
__ Move(scratch, debug_suspended_generator);
__ LoadP(scratch, MemOperand(scratch));
__ CmpP(scratch, r3);
__ beq(&prepare_step_in_suspended_generator);
__ bind(&stepping_prepared);
......@@ -430,8 +433,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
__ blt(&stack_overflow);
// Push receiver.
__ LoadP(ip, FieldMemOperand(r3, JSGeneratorObject::kReceiverOffset));
__ Push(ip);
__ LoadP(scratch, FieldMemOperand(r3, JSGeneratorObject::kReceiverOffset));
__ Push(scratch);
// ----------- S t a t e -------------
// -- r3 : the JSGeneratorObject to resume
......@@ -622,6 +625,9 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
IsolateAddressId::kCEntryFPAddress, masm->isolate()));
__ LoadP(r6, MemOperand(r6));
__ StoreMultipleP(r6, r9, MemOperand(sp, kPointerSize));
Register scrach = r8;
// Set up frame pointer for the frame to be pushed.
// Need to add kPointerSize, because sp has one extra
// frame already for the frame type being pushed later.
......@@ -638,17 +644,17 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
ExternalReference::Create(IsolateAddressId::kJSEntrySPAddress,
masm->isolate());
__ Move(r7, js_entry_sp);
__ LoadAndTestP(r8, MemOperand(r7));
__ LoadAndTestP(scrach, MemOperand(r7));
__ bne(&non_outermost_js, Label::kNear);
__ StoreP(fp, MemOperand(r7));
__ Load(ip, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
__ Load(scrach, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
Label cont;
__ b(&cont, Label::kNear);
__ bind(&non_outermost_js);
__ Load(ip, Operand(StackFrame::INNER_JSENTRY_FRAME));
__ Load(scrach, Operand(StackFrame::INNER_JSENTRY_FRAME));
__ bind(&cont);
__ StoreP(ip, MemOperand(sp)); // frame-type
__ StoreP(scrach, MemOperand(sp)); // frame-type
// Jump to a faked try block that does the invoke, with a faked catch
// block that sets the pending exception.
......@@ -664,10 +670,11 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
// field in the JSEnv and return a failure sentinel. Coming in here the
// fp will be invalid because the PushStackHandler below sets it to 0 to
// signal the existence of the JSEntry frame.
__ Move(ip, ExternalReference::Create(
IsolateAddressId::kPendingExceptionAddress, masm->isolate()));
__ Move(scrach,
ExternalReference::Create(IsolateAddressId::kPendingExceptionAddress,
masm->isolate()));
__ StoreP(r2, MemOperand(ip));
__ StoreP(r2, MemOperand(scrach));
__ LoadRoot(r2, RootIndex::kException);
__ b(&exit, Label::kNear);
......@@ -700,16 +707,16 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
__ pop(r7);
__ CmpP(r7, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
__ bne(&non_outermost_js_2, Label::kNear);
__ mov(r8, Operand::Zero());
__ mov(scrach, Operand::Zero());
__ Move(r7, js_entry_sp);
__ StoreP(r8, MemOperand(r7));
__ StoreP(scrach, MemOperand(r7));
__ bind(&non_outermost_js_2);
// Restore the top frame descriptors from the stack.
__ pop(r5);
__ Move(ip, ExternalReference::Create(
IsolateAddressId::kCEntryFPAddress, masm->isolate()));
__ StoreP(r5, MemOperand(ip));
__ Move(scrach, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
masm->isolate()));
__ StoreP(r5, MemOperand(scrach));
// Reset the stack to the callee saved registers.
__ lay(sp, MemOperand(sp, -EntryFrameConstants::kCallerFPOffset));
......@@ -1451,11 +1458,13 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
__ SmiUntag(kInterpreterBytecodeOffsetRegister);
// Dispatch to the target bytecode.
__ LoadlB(ip, MemOperand(kInterpreterBytecodeArrayRegister,
kInterpreterBytecodeOffsetRegister));
__ ShiftLeftP(ip, ip, Operand(kPointerSizeLog2));
UseScratchRegisterScope temps(masm);
Register scratch = temps.Acquire();
__ LoadlB(scratch, MemOperand(kInterpreterBytecodeArrayRegister,
kInterpreterBytecodeOffsetRegister));
__ ShiftLeftP(scratch, scratch, Operand(kPointerSizeLog2));
__ LoadP(kJavaScriptCallCodeStartRegister,
MemOperand(kInterpreterDispatchTableRegister, ip));
MemOperand(kInterpreterDispatchTableRegister, scratch));
__ Jump(kJavaScriptCallCodeStartRegister);
}
......@@ -1584,13 +1593,15 @@ void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
MemOperand(sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
// Load builtin index (stored as a Smi) and use it to get the builtin start
// address from the builtins table.
__ Pop(ip);
UseScratchRegisterScope temps(masm);
Register builtin = temps.Acquire();
__ Pop(builtin);
__ AddP(sp, sp,
Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
__ Pop(r0);
__ LoadRR(r14, r0);
__ LoadEntryFromBuiltinIndex(ip);
__ Jump(ip);
__ LoadEntryFromBuiltinIndex(builtin);
__ Jump(builtin);
}
} // namespace
......@@ -1751,13 +1762,14 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
// r2: actual number of arguments
// r3: callable
{
Register scratch = r5;
Label loop;
// Calculate the copy start address (destination). Copy end address is sp.
__ AddP(r4, sp, r4);
__ bind(&loop);
__ LoadP(ip, MemOperand(r4, -kPointerSize));
__ StoreP(ip, MemOperand(r4));
__ LoadP(scratch, MemOperand(r4, -kPointerSize));
__ StoreP(scratch, MemOperand(r4));
__ SubP(r4, Operand(kPointerSize));
__ CmpP(r4, sp);
__ bne(&loop);
......@@ -1950,7 +1962,7 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
// Check for stack overflow.
Label stack_overflow;
Generate_StackOverflowCheck(masm, r6, ip, &stack_overflow);
Generate_StackOverflowCheck(masm, r6, scratch, &stack_overflow);
// Push arguments onto the stack (thisArgument is already on the stack).
{
......@@ -1961,13 +1973,13 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
__ LoadRR(r1, r6);
__ bind(&loop);
__ LoadP(ip, MemOperand(r4, kPointerSize));
__ LoadP(scratch, MemOperand(r4, kPointerSize));
__ la(r4, MemOperand(r4, kPointerSize));
__ CompareRoot(ip, RootIndex::kTheHoleValue);
__ CompareRoot(scratch, RootIndex::kTheHoleValue);
__ bne(&skip, Label::kNear);
__ LoadRoot(ip, RootIndex::kUndefinedValue);
__ LoadRoot(scratch, RootIndex::kUndefinedValue);
__ bind(&skip);
__ push(ip);
__ push(scratch);
__ BranchOnCount(r1, &loop);
__ bind(&no_args);
__ AddP(r2, r2, r6);
......@@ -2013,8 +2025,10 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ LoadP(r6, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ LoadP(ip, MemOperand(r6, CommonFrameConstants::kContextOrFrameTypeOffset));
__ CmpP(ip, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
__ LoadP(scratch,
MemOperand(r6, CommonFrameConstants::kContextOrFrameTypeOffset));
__ CmpP(scratch,
Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
__ beq(&arguments_adaptor);
{
__ LoadP(r7, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
......@@ -2048,9 +2062,9 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ AddP(r2, r2, r7);
__ bind(&loop);
{
__ ShiftLeftP(ip, r7, Operand(kPointerSizeLog2));
__ LoadP(ip, MemOperand(r6, ip));
__ push(ip);
__ ShiftLeftP(scratch, r7, Operand(kPointerSizeLog2));
__ LoadP(scratch, MemOperand(r6, scratch));
__ push(scratch);
__ SubP(r7, r7, Operand(1));
__ CmpP(r7, Operand::Zero());
__ bne(&loop);
......@@ -2195,10 +2209,11 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
// -- r6 : the number of [[BoundArguments]]
// -----------------------------------
Register scratch = r8;
// Reserve stack space for the [[BoundArguments]].
{
Label done;
__ LoadRR(r8, sp); // preserve previous stack pointer
__ LoadRR(scratch, sp); // preserve previous stack pointer
__ ShiftLeftP(r9, r6, Operand(kPointerSizeLog2));
__ SubP(sp, sp, r9);
// Check the stack for overflow. We are not trying to catch interruptions
......@@ -2207,7 +2222,7 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
__ CompareRoot(sp, RootIndex::kRealStackLimit);
__ bgt(&done); // Signed comparison.
// Restore the stack pointer.
__ LoadRR(sp, r8);
__ LoadRR(sp, scratch);
{
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterFrame(StackFrame::INTERNAL);
......@@ -2227,7 +2242,7 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
__ beq(&skip);
__ LoadRR(r1, r2);
__ bind(&loop);
__ LoadP(r0, MemOperand(r8, r7));
__ LoadP(r0, MemOperand(scratch, r7));
__ StoreP(r0, MemOperand(sp, r7));
__ AddP(r7, r7, Operand(kPointerSize));
__ BranchOnCount(r1, &loop);
......@@ -2263,9 +2278,9 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
__ AssertBoundFunction(r3);
// Patch the receiver to [[BoundThis]].
__ LoadP(ip, FieldMemOperand(r3, JSBoundFunction::kBoundThisOffset));
__ LoadP(r5, FieldMemOperand(r3, JSBoundFunction::kBoundThisOffset));
__ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2));
__ StoreP(ip, MemOperand(sp, r1));
__ StoreP(r5, MemOperand(sp, r1));
// Push the [[BoundArguments]] onto the stack.
Generate_PushBoundArguments(masm);
......
......@@ -351,7 +351,8 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
Assembler::Assembler(const AssemblerOptions& options,
std::unique_ptr<AssemblerBuffer> buffer)
: AssemblerBase(options, std::move(buffer)) {
: AssemblerBase(options, std::move(buffer)),
scratch_register_list_(ip.bit()) {
reloc_info_writer.Reposition(buffer_start_ + buffer_->size(), pc_);
last_bound_pos_ = 0;
relocations_.reserve(128);
......@@ -830,6 +831,23 @@ void Assembler::EmitRelocations() {
}
}
UseScratchRegisterScope::UseScratchRegisterScope(Assembler* assembler)
: assembler_(assembler),
old_available_(*assembler->GetScratchRegisterList()) {}
UseScratchRegisterScope::~UseScratchRegisterScope() {
*assembler_->GetScratchRegisterList() = old_available_;
}
Register UseScratchRegisterScope::Acquire() {
RegList* available = assembler_->GetScratchRegisterList();
DCHECK_NOT_NULL(available);
DCHECK_NE(*available, 0);
int index = static_cast<int>(base::bits::CountTrailingZeros32(*available));
Register reg = Register::from_code(index);
*available &= ~reg.bit();
return reg;
}
} // namespace internal
} // namespace v8
#endif // V8_TARGET_ARCH_S390
......@@ -307,7 +307,6 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// in the code, so the serializer should not step forwards in memory after
// a target is resolved and written.
static constexpr int kSpecialTargetSize = 0;
// Number of bytes for instructions used to store pointer sized constant.
#if V8_TARGET_ARCH_S390X
static constexpr int kBytesForPtrConstant = 12; // IIHF + IILF
......@@ -315,6 +314,8 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
static constexpr int kBytesForPtrConstant = 6; // IILF
#endif
RegList* GetScratchRegisterList() { return &scratch_register_list_; }
// ---------------------------------------------------------------------------
// Code generation
......@@ -1376,6 +1377,9 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
RelocInfoWriter reloc_info_writer;
std::vector<DeferredRelocInfo> relocations_;
// Scratch registers available for use by the Assembler.
RegList scratch_register_list_;
// The bound position, before this we cannot do instruction elimination.
int last_bound_pos_;
......@@ -1455,6 +1459,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
friend class RegExpMacroAssemblerS390;
friend class RelocInfo;
friend class EnsureSpace;
friend class UseScratchRegisterScope;
};
class EnsureSpace {
......@@ -1462,6 +1467,24 @@ class EnsureSpace {
explicit EnsureSpace(Assembler* assembler) { assembler->CheckBuffer(); }
};
class V8_EXPORT_PRIVATE UseScratchRegisterScope {
public:
explicit UseScratchRegisterScope(Assembler* assembler);
~UseScratchRegisterScope();
Register Acquire();
// Check if we have registers available to acquire.
bool CanAcquire() const { return *assembler_->GetScratchRegisterList() != 0; }
private:
friend class Assembler;
friend class TurboAssembler;
Assembler* assembler_;
RegList old_available_;
};
} // namespace internal
} // namespace v8
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment