Commit 70cede39 authored by jgruber's avatar jgruber Committed by Commit Bot

[builtins] Move CallApiGetter and CallApiCallback stubs to builtins

Calls from embedded builtins to stubs are expensive due to the
indirection through the builtins constants table. This moves
CallApiGetter and the 0/1 argument case of CallApiCallback to
builtins.

Bug: v8:6666
Change-Id: I49c4917253f790a3b947f42c50d6308a1ab99d91
Reviewed-on: https://chromium-review.googlesource.com/1070980Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53355}
parent 5674812c
......@@ -216,6 +216,18 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) {
void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
Register target) {
#ifdef V8_EMBEDDED_BUILTINS
if (masm->root_array_available() &&
isolate()->ShouldLoadConstantsFromRootList()) {
// This is basically an inlined version of Call(Handle<Code>) that loads the
// code object into lr instead of ip.
__ Move(ip, target);
__ LookupConstant(lr, GetCode());
__ add(lr, lr, Operand(Code::kHeaderSize - kHeapObjectTag));
__ blx(lr);
return;
}
#endif
intptr_t code =
reinterpret_cast<intptr_t>(GetCode().location());
__ Move(ip, target);
......@@ -617,13 +629,13 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
Label profiler_disabled;
Label end_profiler_check;
__ mov(r9, Operand(ExternalReference::is_profiling_address(isolate)));
__ Move(r9, ExternalReference::is_profiling_address(isolate));
__ ldrb(r9, MemOperand(r9, 0));
__ cmp(r9, Operand(0));
__ b(eq, &profiler_disabled);
// Additional parameter is the address of the actual callback.
__ mov(r3, Operand(thunk_ref));
__ Move(r3, thunk_ref);
__ jmp(&end_profiler_check);
__ bind(&profiler_disabled);
......@@ -631,7 +643,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
__ bind(&end_profiler_check);
// Allocate HandleScope in callee-save registers.
__ mov(r9, Operand(next_address));
__ Move(r9, next_address);
__ ldr(r4, MemOperand(r9, kNextOffset));
__ ldr(r5, MemOperand(r9, kLimitOffset));
__ ldr(r6, MemOperand(r9, kLevelOffset));
......@@ -642,7 +654,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters();
__ PrepareCallCFunction(1);
__ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
__ Move(r0, ExternalReference::isolate_address(isolate));
__ CallCFunction(ExternalReference::log_enter_external_function(), 1);
__ PopSafepointRegisters();
}
......@@ -657,7 +669,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters();
__ PrepareCallCFunction(1);
__ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
__ Move(r0, ExternalReference::isolate_address(isolate));
__ CallCFunction(ExternalReference::log_leave_external_function(), 1);
__ PopSafepointRegisters();
}
......@@ -696,7 +708,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
// Check if the function scheduled an exception.
__ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
__ mov(r6, Operand(ExternalReference::scheduled_exception_address(isolate)));
__ Move(r6, ExternalReference::scheduled_exception_address(isolate));
__ ldr(r5, MemOperand(r6));
__ cmp(r4, r5);
__ b(ne, &promote_scheduled_exception);
......@@ -712,7 +724,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
__ str(r5, MemOperand(r9, kLimitOffset));
__ mov(r4, r0);
__ PrepareCallCFunction(1);
__ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
__ Move(r0, ExternalReference::isolate_address(isolate));
__ CallCFunction(ExternalReference::delete_handle_scope_extensions(), 1);
__ mov(r0, r4);
__ jmp(&leave_exit_frame);
......@@ -759,8 +771,7 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
// return value default
__ push(scratch0);
// isolate
__ mov(scratch1,
Operand(ExternalReference::isolate_address(masm->isolate())));
__ Move(scratch1, ExternalReference::isolate_address(masm->isolate()));
__ push(scratch1);
// holder
__ push(holder);
......@@ -829,7 +840,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
__ push(scratch);
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
__ Push(scratch, scratch);
__ mov(scratch, Operand(ExternalReference::isolate_address(isolate())));
__ Move(scratch, ExternalReference::isolate_address(isolate()));
__ Push(scratch, holder);
__ Push(Smi::kZero); // should_throw_on_error -> false
__ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));
......
......@@ -155,14 +155,14 @@ void TurboAssembler::LookupConstant(Register destination,
DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
Heap::kBuiltinsConstantsTableRootIndex));
// The ldr call below could end up clobbering the destination register when
// the offset does not fit into 12 bits (and thus needs to be loaded from the
// constant pool). In that case, we need to be extra-careful and temporarily
// use another register as the target.
// The ldr call below could end up clobbering ip when the offset does not fit
// into 12 bits (and thus needs to be loaded from the constant pool). In that
// case, we need to be extra-careful and temporarily use another register as
// the target.
const uint32_t offset =
FixedArray::kHeaderSize + index * kPointerSize - kHeapObjectTag;
const bool could_clobber_ip = !is_uint12(offset) && destination == ip;
const bool could_clobber_ip = !is_uint12(offset);
Register reg = destination;
if (could_clobber_ip) {
......
......@@ -894,8 +894,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
__ Ldr(data, FieldMemOperand(callback, AccessorInfo::kDataOffset));
__ LoadRoot(undef, Heap::kUndefinedValueRootIndex);
__ Mov(isolate_address,
Operand(ExternalReference::isolate_address(isolate())));
__ Mov(isolate_address, ExternalReference::isolate_address(isolate()));
__ Ldr(name, FieldMemOperand(callback, AccessorInfo::kNameOffset));
// PropertyCallbackArguments:
......
......@@ -1242,6 +1242,9 @@ namespace internal {
TFS(StringAdd_ConvertRight_NotTenured, kLeft, kRight) \
\
/* Miscellaneous */ \
ASM(CallApiCallback_Argc0) \
ASM(CallApiCallback_Argc1) \
ASM(CallApiGetter) \
ASM(DoubleToI) \
TFC(GetProperty, GetProperty, 1) \
ASM(MathPowInternal)
......
......@@ -1149,6 +1149,34 @@ void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_BuiltinExit(
Generate_CEntry(masm, 2, kSaveFPRegs, kArgvOnStack, true);
}
void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
// CallApiGetterStub only exists as a stub to avoid duplicating code between
// here and code-stubs-<arch>.cc. For example, see CallApiFunctionAndReturn.
// Here we abuse the instantiated stub to generate code.
CallApiGetterStub stub(masm->isolate());
stub.Generate(masm);
}
void Builtins::Generate_CallApiCallback_Argc0(MacroAssembler* masm) {
// The common variants of CallApiCallbackStub (i.e. all that are embedded into
// the snapshot) are generated as builtins. The rest remain available as code
// stubs. Here we abuse the instantiated stub to generate code and avoid
// duplication.
const int kArgc = 0;
CallApiCallbackStub stub(masm->isolate(), kArgc);
stub.Generate(masm);
}
void Builtins::Generate_CallApiCallback_Argc1(MacroAssembler* masm) {
// The common variants of CallApiCallbackStub (i.e. all that are embedded into
// the snapshot) are generated as builtins. The rest remain available as code
// stubs. Here we abuse the instantiated stub to generate code and avoid
// duplication.
const int kArgc = 1;
CallApiCallbackStub stub(masm->isolate(), kArgc);
stub.Generate(masm);
}
// ES6 [[Get]] operation.
TF_BUILTIN(GetProperty, CodeStubAssembler) {
Label call_runtime(this, Label::kDeferred), return_undefined(this), end(this);
......
......@@ -70,14 +70,25 @@ Handle<Code> CodeFactory::CEntry(Isolate* isolate, int result_size,
// static
Callable CodeFactory::ApiGetter(Isolate* isolate) {
CallApiGetterStub stub(isolate);
return make_callable(stub);
return Callable(BUILTIN_CODE(isolate, CallApiGetter),
ApiGetterDescriptor(isolate));
}
// static
Callable CodeFactory::CallApiCallback(Isolate* isolate, int argc) {
CallApiCallbackStub stub(isolate, argc);
return make_callable(stub);
switch (argc) {
case 0:
return Callable(BUILTIN_CODE(isolate, CallApiCallback_Argc0),
ApiCallbackDescriptor(isolate));
case 1:
return Callable(BUILTIN_CODE(isolate, CallApiCallback_Argc1),
ApiCallbackDescriptor(isolate));
default: {
CallApiCallbackStub stub(isolate, argc);
return make_callable(stub);
}
}
UNREACHABLE();
}
// static
......
......@@ -545,7 +545,7 @@ class CallApiCallbackStub : public PlatformCodeStub {
CallApiCallbackStub(Isolate* isolate, int argc)
: PlatformCodeStub(isolate) {
CHECK_LE(0, argc);
CHECK_LE(0, argc); // The argc in {0, 1} cases are covered by builtins.
CHECK_LE(argc, kArgMax);
minor_key_ = ArgumentBits::encode(argc);
}
......@@ -555,14 +555,20 @@ class CallApiCallbackStub : public PlatformCodeStub {
class ArgumentBits : public BitField<int, 0, kArgBits> {};
friend class Builtins; // For generating the related builtin.
DEFINE_CALL_INTERFACE_DESCRIPTOR(ApiCallback);
DEFINE_PLATFORM_CODE_STUB(CallApiCallback, PlatformCodeStub);
};
// TODO(jgruber): Convert this stub into a builtin.
// TODO(jgruber): This stub only exists to avoid code duplication between
// code-stubs-<arch>.cc and builtins-<arch>.cc. If CallApiCallbackStub is ever
// completely removed, CallApiGetterStub can also be deleted.
class CallApiGetterStub : public PlatformCodeStub {
public:
private:
// For generating the related builtin.
explicit CallApiGetterStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
friend class Builtins;
DEFINE_CALL_INTERFACE_DESCRIPTOR(ApiGetter);
DEFINE_PLATFORM_CODE_STUB(CallApiGetter, PlatformCodeStub);
......
......@@ -2887,8 +2887,8 @@ Reduction JSCallReducer::ReduceCallApiFunction(
Handle<CallHandlerInfo> call_handler_info(
CallHandlerInfo::cast(function_template_info->call_code()), isolate());
Handle<Object> data(call_handler_info->data(), isolate());
CallApiCallbackStub stub(isolate(), argc);
CallInterfaceDescriptor cid = stub.GetCallInterfaceDescriptor();
Callable call_api_callback = CodeFactory::CallApiCallback(isolate(), argc);
CallInterfaceDescriptor cid = call_api_callback.descriptor();
auto call_descriptor = Linkage::GetStubCallDescriptor(
isolate(), graph()->zone(), cid,
cid.GetStackParameterCount() + argc + 1 /* implicit receiver */,
......@@ -2901,7 +2901,7 @@ Reduction JSCallReducer::ReduceCallApiFunction(
ExternalReference function_reference = ExternalReference::Create(
&api_function, ExternalReference::DIRECT_API_CALL);
node->InsertInput(graph()->zone(), 0,
jsgraph()->HeapConstant(stub.GetCode()));
jsgraph()->HeapConstant(call_api_callback.code()));
node->ReplaceInput(1, context);
node->InsertInput(graph()->zone(), 2, jsgraph()->Constant(data));
node->InsertInput(graph()->zone(), 3, holder);
......
......@@ -1736,9 +1736,9 @@ Node* JSNativeContextSpecialization::InlineApiCall(
// Only setters have a value.
int const argc = value == nullptr ? 0 : 1;
// The stub always expects the receiver as the first param on the stack.
CallApiCallbackStub stub(isolate(), argc);
Callable call_api_callback = CodeFactory::CallApiCallback(isolate(), argc);
CallInterfaceDescriptor call_interface_descriptor =
stub.GetCallInterfaceDescriptor();
call_api_callback.descriptor();
auto call_descriptor = Linkage::GetStubCallDescriptor(
isolate(), graph()->zone(), call_interface_descriptor,
call_interface_descriptor.GetStackParameterCount() + argc +
......@@ -1751,7 +1751,7 @@ Node* JSNativeContextSpecialization::InlineApiCall(
Node* function_reference =
graph()->NewNode(common()->ExternalConstant(ExternalReference::Create(
&function, ExternalReference::DIRECT_API_CALL)));
Node* code = jsgraph()->HeapConstant(stub.GetCode());
Node* code = jsgraph()->HeapConstant(call_api_callback.code());
// Add CallApiCallbackStub's register argument as well.
Node* context = jsgraph()->Constant(native_context());
......
......@@ -223,6 +223,17 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) {
void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
Register target) {
#ifdef V8_EMBEDDED_BUILTINS
if (masm->root_array_available() &&
isolate()->ShouldLoadConstantsFromRootList()) {
// This is basically an inlined version of Call(Handle<Code>) that loads the
// code object into kScratchReg instead of t9.
__ Move(t9, target);
__ LookupConstant(kScratchReg, GetCode());
__ Call(kScratchReg, Code::kHeaderSize - kHeapObjectTag);
return;
}
#endif
intptr_t loc =
reinterpret_cast<intptr_t>(GetCode().location());
__ Move(t9, target);
......@@ -618,7 +629,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
__ Branch(&profiler_disabled, eq, t9, Operand(zero_reg));
// Additional parameter is the address of the actual callback.
__ li(t9, Operand(thunk_ref));
__ li(t9, thunk_ref);
__ jmp(&end_profiler_check);
__ bind(&profiler_disabled);
......@@ -626,7 +637,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
__ bind(&end_profiler_check);
// Allocate HandleScope in callee-save registers.
__ li(s5, Operand(next_address));
__ li(s5, next_address);
__ lw(s0, MemOperand(s5, kNextOffset));
__ lw(s1, MemOperand(s5, kLimitOffset));
__ lw(s2, MemOperand(s5, kLevelOffset));
......
......@@ -222,6 +222,19 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) {
void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
Register target) {
#ifdef V8_EMBEDDED_BUILTINS
if (masm->root_array_available() &&
isolate()->ShouldLoadConstantsFromRootList()) {
// This is basically an inlined version of Call(Handle<Code>) that loads the
// code object into kScratchReg instead of t9.
__ Move(t9, target);
__ LookupConstant(kScratchReg, GetCode());
__ Daddu(kScratchReg, kScratchReg,
Operand(Code::kHeaderSize - kHeapObjectTag));
__ Call(kScratchReg);
return;
}
#endif
intptr_t loc =
reinterpret_cast<intptr_t>(GetCode().location());
__ Move(t9, target);
......@@ -620,7 +633,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
__ Branch(&profiler_disabled, eq, t9, Operand(zero_reg));
// Additional parameter is the address of the actual callback.
__ li(t9, Operand(thunk_ref));
__ li(t9, thunk_ref);
__ jmp(&end_profiler_check);
__ bind(&profiler_disabled);
......@@ -628,7 +641,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
__ bind(&end_profiler_check);
// Allocate HandleScope in callee-save registers.
__ li(s5, Operand(next_address));
__ li(s5, next_address);
__ Ld(s0, MemOperand(s5, kNextOffset));
__ Ld(s1, MemOperand(s5, kLimitOffset));
__ Lw(s2, MemOperand(s5, kLevelOffset));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment