Commit 711ff725 authored by Junliang Yan's avatar Junliang Yan Committed by Commit Bot

PPC/s390: [builtins] Move CallApiGetter and CallApiCallback stubs to builtins

Port 70cede39

Original Commit Message:

    Calls from embedded builtins to stubs are expensive due to the
    indirection through the builtins constants table. This moves
    CallApiGetter and the 0/1 argument case of CallApiCallback to
    builtins.

R=jgruber@chromium.org, joransiu@ca.ibm.com, michael_dawson@ca.ibm.com
BUG=
LOG=N

Change-Id: I15677f91ad1f5cac05d4760f4cdd1561982a0621
Reviewed-on: https://chromium-review.googlesource.com/1073055Reviewed-by: 's avatarJoran Siu <joransiu@ca.ibm.com>
Commit-Queue: Junliang Yan <jyan@ca.ibm.com>
Cr-Commit-Position: refs/heads/master@{#53369}
parent ae6e9cc7
...@@ -202,6 +202,19 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) { ...@@ -202,6 +202,19 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) {
void DirectCEntryStub::GenerateCall(MacroAssembler* masm, Register target) { void DirectCEntryStub::GenerateCall(MacroAssembler* masm, Register target) {
#ifdef V8_EMBEDDED_BUILTINS
if (masm->root_array_available() &&
isolate()->ShouldLoadConstantsFromRootList()) {
// This is basically an inlined version of Call(Handle<Code>) that loads the
// code object into lr instead of ip.
DCHECK_NE(ip, target);
__ LookupConstant(ip, GetCode());
__ addi(r0, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Move(ip, target);
__ Call(r0);
return;
}
#endif
if (ABI_USES_FUNCTION_DESCRIPTORS) { if (ABI_USES_FUNCTION_DESCRIPTORS) {
// AIX/PPC64BE Linux use a function descriptor. // AIX/PPC64BE Linux use a function descriptor.
__ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(target, kPointerSize)); __ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(target, kPointerSize));
...@@ -627,18 +640,18 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -627,18 +640,18 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
DCHECK(function_address == r4 || function_address == r5); DCHECK(function_address == r4 || function_address == r5);
Register scratch = r6; Register scratch = r6;
__ mov(scratch, Operand(ExternalReference::is_profiling_address(isolate))); __ Move(scratch, ExternalReference::is_profiling_address(isolate));
__ lbz(scratch, MemOperand(scratch, 0)); __ lbz(scratch, MemOperand(scratch, 0));
__ cmpi(scratch, Operand::Zero()); __ cmpi(scratch, Operand::Zero());
if (CpuFeatures::IsSupported(ISELECT)) { if (CpuFeatures::IsSupported(ISELECT)) {
__ mov(scratch, Operand(thunk_ref)); __ Move(scratch, thunk_ref);
__ isel(eq, scratch, function_address, scratch); __ isel(eq, scratch, function_address, scratch);
} else { } else {
Label profiler_disabled; Label profiler_disabled;
Label end_profiler_check; Label end_profiler_check;
__ beq(&profiler_disabled); __ beq(&profiler_disabled);
__ mov(scratch, Operand(thunk_ref)); __ Move(scratch, thunk_ref);
__ b(&end_profiler_check); __ b(&end_profiler_check);
__ bind(&profiler_disabled); __ bind(&profiler_disabled);
__ mr(scratch, function_address); __ mr(scratch, function_address);
...@@ -650,7 +663,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -650,7 +663,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
// r14 - next_address->kNextOffset // r14 - next_address->kNextOffset
// r15 - next_address->kLimitOffset // r15 - next_address->kLimitOffset
// r16 - next_address->kLevelOffset // r16 - next_address->kLevelOffset
__ mov(r17, Operand(next_address)); __ Move(r17, next_address);
__ LoadP(r14, MemOperand(r17, kNextOffset)); __ LoadP(r14, MemOperand(r17, kNextOffset));
__ LoadP(r15, MemOperand(r17, kLimitOffset)); __ LoadP(r15, MemOperand(r17, kLimitOffset));
__ lwz(r16, MemOperand(r17, kLevelOffset)); __ lwz(r16, MemOperand(r17, kLevelOffset));
...@@ -661,7 +674,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -661,7 +674,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
FrameScope frame(masm, StackFrame::MANUAL); FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters(); __ PushSafepointRegisters();
__ PrepareCallCFunction(1, r3); __ PrepareCallCFunction(1, r3);
__ mov(r3, Operand(ExternalReference::isolate_address(isolate))); __ Move(r3, ExternalReference::isolate_address(isolate));
__ CallCFunction(ExternalReference::log_enter_external_function(), 1); __ CallCFunction(ExternalReference::log_enter_external_function(), 1);
__ PopSafepointRegisters(); __ PopSafepointRegisters();
} }
...@@ -676,7 +689,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -676,7 +689,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
FrameScope frame(masm, StackFrame::MANUAL); FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters(); __ PushSafepointRegisters();
__ PrepareCallCFunction(1, r3); __ PrepareCallCFunction(1, r3);
__ mov(r3, Operand(ExternalReference::isolate_address(isolate))); __ Move(r3, ExternalReference::isolate_address(isolate));
__ CallCFunction(ExternalReference::log_leave_external_function(), 1); __ CallCFunction(ExternalReference::log_leave_external_function(), 1);
__ PopSafepointRegisters(); __ PopSafepointRegisters();
} }
...@@ -715,7 +728,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -715,7 +728,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
// Check if the function scheduled an exception. // Check if the function scheduled an exception.
__ LoadRoot(r14, Heap::kTheHoleValueRootIndex); __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
__ mov(r15, Operand(ExternalReference::scheduled_exception_address(isolate))); __ Move(r15, ExternalReference::scheduled_exception_address(isolate));
__ LoadP(r15, MemOperand(r15)); __ LoadP(r15, MemOperand(r15));
__ cmp(r14, r15); __ cmp(r14, r15);
__ bne(&promote_scheduled_exception); __ bne(&promote_scheduled_exception);
...@@ -731,7 +744,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -731,7 +744,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
__ StoreP(r15, MemOperand(r17, kLimitOffset)); __ StoreP(r15, MemOperand(r17, kLimitOffset));
__ mr(r14, r3); __ mr(r14, r3);
__ PrepareCallCFunction(1, r15); __ PrepareCallCFunction(1, r15);
__ mov(r3, Operand(ExternalReference::isolate_address(isolate))); __ Move(r3, ExternalReference::isolate_address(isolate));
__ CallCFunction(ExternalReference::delete_handle_scope_extensions(), 1); __ CallCFunction(ExternalReference::delete_handle_scope_extensions(), 1);
__ mr(r3, r14); __ mr(r3, r14);
__ b(&leave_exit_frame); __ b(&leave_exit_frame);
...@@ -777,7 +790,7 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) { ...@@ -777,7 +790,7 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
// return value default // return value default
__ push(scratch); __ push(scratch);
// isolate // isolate
__ mov(scratch, Operand(ExternalReference::isolate_address(masm->isolate()))); __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
__ push(scratch); __ push(scratch);
// holder // holder
__ push(holder); __ push(holder);
...@@ -854,7 +867,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) { ...@@ -854,7 +867,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
__ push(scratch); __ push(scratch);
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
__ Push(scratch, scratch); __ Push(scratch, scratch);
__ mov(scratch, Operand(ExternalReference::isolate_address(isolate()))); __ Move(scratch, ExternalReference::isolate_address(isolate()));
__ Push(scratch, holder); __ Push(scratch, holder);
__ Push(Smi::kZero); // should_throw_on_error -> false __ Push(Smi::kZero); // should_throw_on_error -> false
__ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset)); __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));
......
...@@ -230,6 +230,18 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) { ...@@ -230,6 +230,18 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) {
} }
void DirectCEntryStub::GenerateCall(MacroAssembler* masm, Register target) { void DirectCEntryStub::GenerateCall(MacroAssembler* masm, Register target) {
#ifdef V8_EMBEDDED_BUILTINS
if (masm->root_array_available() &&
isolate()->ShouldLoadConstantsFromRootList()) {
// This is basically an inlined version of Call(Handle<Code>) that loads the
// code object into lr instead of ip.
__ Move(ip, target);
__ LookupConstant(r1, GetCode());
__ AddP(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Call(r1);
return;
}
#endif
#if ABI_USES_FUNCTION_DESCRIPTORS && !defined(USE_SIMULATOR) #if ABI_USES_FUNCTION_DESCRIPTORS && !defined(USE_SIMULATOR)
// Native AIX/S390X Linux use a function descriptor. // Native AIX/S390X Linux use a function descriptor.
__ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(target, kPointerSize)); __ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(target, kPointerSize));
...@@ -656,14 +668,14 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -656,14 +668,14 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
DCHECK(function_address == r3 || function_address == r4); DCHECK(function_address == r3 || function_address == r4);
Register scratch = r5; Register scratch = r5;
__ mov(scratch, Operand(ExternalReference::is_profiling_address(isolate))); __ Move(scratch, ExternalReference::is_profiling_address(isolate));
__ LoadlB(scratch, MemOperand(scratch, 0)); __ LoadlB(scratch, MemOperand(scratch, 0));
__ CmpP(scratch, Operand::Zero()); __ CmpP(scratch, Operand::Zero());
Label profiler_disabled; Label profiler_disabled;
Label end_profiler_check; Label end_profiler_check;
__ beq(&profiler_disabled, Label::kNear); __ beq(&profiler_disabled, Label::kNear);
__ mov(scratch, Operand(thunk_ref)); __ Move(scratch, thunk_ref);
__ b(&end_profiler_check, Label::kNear); __ b(&end_profiler_check, Label::kNear);
__ bind(&profiler_disabled); __ bind(&profiler_disabled);
__ LoadRR(scratch, function_address); __ LoadRR(scratch, function_address);
...@@ -674,7 +686,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -674,7 +686,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
// r6 - next_address->kNextOffset // r6 - next_address->kNextOffset
// r7 - next_address->kLimitOffset // r7 - next_address->kLimitOffset
// r8 - next_address->kLevelOffset // r8 - next_address->kLevelOffset
__ mov(r9, Operand(next_address)); __ Move(r9, next_address);
__ LoadP(r6, MemOperand(r9, kNextOffset)); __ LoadP(r6, MemOperand(r9, kNextOffset));
__ LoadP(r7, MemOperand(r9, kLimitOffset)); __ LoadP(r7, MemOperand(r9, kLimitOffset));
__ LoadlW(r8, MemOperand(r9, kLevelOffset)); __ LoadlW(r8, MemOperand(r9, kLevelOffset));
...@@ -685,7 +697,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -685,7 +697,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
FrameScope frame(masm, StackFrame::MANUAL); FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters(); __ PushSafepointRegisters();
__ PrepareCallCFunction(1, r2); __ PrepareCallCFunction(1, r2);
__ mov(r2, Operand(ExternalReference::isolate_address(isolate))); __ Move(r2, ExternalReference::isolate_address(isolate));
__ CallCFunction(ExternalReference::log_enter_external_function(), 1); __ CallCFunction(ExternalReference::log_enter_external_function(), 1);
__ PopSafepointRegisters(); __ PopSafepointRegisters();
} }
...@@ -700,7 +712,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -700,7 +712,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
FrameScope frame(masm, StackFrame::MANUAL); FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters(); __ PushSafepointRegisters();
__ PrepareCallCFunction(1, r2); __ PrepareCallCFunction(1, r2);
__ mov(r2, Operand(ExternalReference::isolate_address(isolate))); __ Move(r2, ExternalReference::isolate_address(isolate));
__ CallCFunction(ExternalReference::log_leave_external_function(), 1); __ CallCFunction(ExternalReference::log_leave_external_function(), 1);
__ PopSafepointRegisters(); __ PopSafepointRegisters();
} }
...@@ -737,7 +749,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -737,7 +749,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
__ LeaveExitFrame(false, r6, stack_space_operand != nullptr); __ LeaveExitFrame(false, r6, stack_space_operand != nullptr);
// Check if the function scheduled an exception. // Check if the function scheduled an exception.
__ mov(r7, Operand(ExternalReference::scheduled_exception_address(isolate))); __ Move(r7, ExternalReference::scheduled_exception_address(isolate));
__ LoadP(r7, MemOperand(r7)); __ LoadP(r7, MemOperand(r7));
__ CompareRoot(r7, Heap::kTheHoleValueRootIndex); __ CompareRoot(r7, Heap::kTheHoleValueRootIndex);
__ bne(&promote_scheduled_exception, Label::kNear); __ bne(&promote_scheduled_exception, Label::kNear);
...@@ -753,7 +765,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, ...@@ -753,7 +765,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
__ StoreP(r7, MemOperand(r9, kLimitOffset)); __ StoreP(r7, MemOperand(r9, kLimitOffset));
__ LoadRR(r6, r2); __ LoadRR(r6, r2);
__ PrepareCallCFunction(1, r7); __ PrepareCallCFunction(1, r7);
__ mov(r2, Operand(ExternalReference::isolate_address(isolate))); __ Move(r2, ExternalReference::isolate_address(isolate));
__ CallCFunction(ExternalReference::delete_handle_scope_extensions(), 1); __ CallCFunction(ExternalReference::delete_handle_scope_extensions(), 1);
__ LoadRR(r2, r6); __ LoadRR(r2, r6);
__ b(&leave_exit_frame, Label::kNear); __ b(&leave_exit_frame, Label::kNear);
...@@ -799,7 +811,7 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) { ...@@ -799,7 +811,7 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
// return value default // return value default
__ push(scratch); __ push(scratch);
// isolate // isolate
__ mov(scratch, Operand(ExternalReference::isolate_address(masm->isolate()))); __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
__ push(scratch); __ push(scratch);
// holder // holder
__ push(holder); __ push(holder);
...@@ -875,7 +887,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) { ...@@ -875,7 +887,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
__ push(scratch); __ push(scratch);
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
__ Push(scratch, scratch); __ Push(scratch, scratch);
__ mov(scratch, Operand(ExternalReference::isolate_address(isolate()))); __ Move(scratch, ExternalReference::isolate_address(isolate()));
__ Push(scratch, holder); __ Push(scratch, holder);
__ Push(Smi::kZero); // should_throw_on_error -> false __ Push(Smi::kZero); // should_throw_on_error -> false
__ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset)); __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));
......
...@@ -154,7 +154,7 @@ void TurboAssembler::LookupConstant(Register destination, ...@@ -154,7 +154,7 @@ void TurboAssembler::LookupConstant(Register destination,
CHECK(is_uint19(offset)); CHECK(is_uint19(offset));
DCHECK_NE(destination, r0); DCHECK_NE(destination, r0);
LoadRoot(destination, Heap::kBuiltinsConstantsTableRootIndex); LoadRoot(destination, Heap::kBuiltinsConstantsTableRootIndex);
LoadP(destination, MemOperand(destination, offset)); LoadP(destination, MemOperand(destination, offset), r1);
} }
void TurboAssembler::LookupExternalReference(Register destination, void TurboAssembler::LookupExternalReference(Register destination,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment