Commit ee78d118 authored by serya@chromium.org's avatar serya@chromium.org

Port direct API function call to x64 (ia32 CL is http://codereview.chromium.org/4456002/).

Review URL: http://codereview.chromium.org/5004004

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5833 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 84541236
...@@ -506,7 +506,7 @@ static bool GenerateFastApiCall(MacroAssembler* masm, ...@@ -506,7 +506,7 @@ static bool GenerateFastApiCall(MacroAssembler* masm,
__ mov(ApiParameterOperand(2), eax); // v8::Arguments::values_. __ mov(ApiParameterOperand(2), eax); // v8::Arguments::values_.
__ Set(ApiParameterOperand(3), Immediate(argc)); // v8::Arguments::length_. __ Set(ApiParameterOperand(3), Immediate(argc)); // v8::Arguments::length_.
// v8::Arguments::is_construct_call_. // v8::Arguments::is_construct_call_.
__ mov(ApiParameterOperand(4), Immediate(0)); __ Set(ApiParameterOperand(4), Immediate(0));
// v8::InvocationCallback's argument. // v8::InvocationCallback's argument.
__ lea(eax, ApiParameterOperand(1)); __ lea(eax, ApiParameterOperand(1));
......
...@@ -2535,18 +2535,18 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, ...@@ -2535,18 +2535,18 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
#ifdef _WIN64 #ifdef _WIN64
// Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9
// Store Arguments object on stack, below the 4 WIN64 ABI parameter slots. // Store Arguments object on stack, below the 4 WIN64 ABI parameter slots.
__ movq(Operand(rsp, 4 * kPointerSize), r14); // argc. __ movq(StackSpaceOperand(0), r14); // argc.
__ movq(Operand(rsp, 5 * kPointerSize), r12); // argv. __ movq(StackSpaceOperand(1), r12); // argv.
if (result_size_ < 2) { if (result_size_ < 2) {
// Pass a pointer to the Arguments object as the first argument. // Pass a pointer to the Arguments object as the first argument.
// Return result in single register (rax). // Return result in single register (rax).
__ lea(rcx, Operand(rsp, 4 * kPointerSize)); __ lea(rcx, StackSpaceOperand(0));
} else { } else {
ASSERT_EQ(2, result_size_); ASSERT_EQ(2, result_size_);
// Pass a pointer to the result location as the first argument. // Pass a pointer to the result location as the first argument.
__ lea(rcx, Operand(rsp, 6 * kPointerSize)); __ lea(rcx, StackSpaceOperand(2));
// Pass a pointer to the Arguments object as the second argument. // Pass a pointer to the Arguments object as the second argument.
__ lea(rdx, Operand(rsp, 4 * kPointerSize)); __ lea(rdx, StackSpaceOperand(0));
} }
#else // _WIN64 #else // _WIN64
...@@ -2686,7 +2686,12 @@ void CEntryStub::Generate(MacroAssembler* masm) { ...@@ -2686,7 +2686,12 @@ void CEntryStub::Generate(MacroAssembler* masm) {
// builtin once. // builtin once.
// Enter the exit frame that transitions from JavaScript to C++. // Enter the exit frame that transitions from JavaScript to C++.
__ EnterExitFrame(result_size_); #ifdef _WIN64
int arg_stack_space = (result_size_ < 2 ? 2 : 4);
#else
int arg_stack_space = 0;
#endif
__ EnterExitFrame(arg_stack_space);
// rax: Holds the context at this point, but should not be used. // rax: Holds the context at this point, but should not be used.
// On entry to code generated by GenerateCore, it must hold // On entry to code generated by GenerateCore, it must hold
......
...@@ -498,18 +498,17 @@ static int Offset(ExternalReference ref0, ExternalReference ref1) { ...@@ -498,18 +498,17 @@ static int Offset(ExternalReference ref0, ExternalReference ref1) {
} }
void MacroAssembler::PrepareCallApiFunction(int stack_space, int argc) { void MacroAssembler::PrepareCallApiFunction(int stack_space,
int arg_stack_space) {
#ifdef _WIN64 #ifdef _WIN64
// We need to prepare a slot for result handle on stack and put // We need to prepare a slot for result handle on stack and put
// a pointer to it into 1st arg register. // a pointer to it into 1st arg register.
int register_based_args = argc > 3 ? 3 : argc; EnterApiExitFrame(stack_space, arg_stack_space + 1);
EnterApiExitFrame(stack_space, argc - register_based_args + 1);
int return_value_slot = (argc > 3 ? argc - 3 + 1 : 4);
// rcx must be used to pass the pointer to the return value slot. // rcx must be used to pass the pointer to the return value slot.
lea(rcx, Operand(rsp, return_value_slot * kPointerSize)); lea(rcx, StackSpaceOperand(arg_stack_space));
#else #else
EnterApiExitFrame(stack_space, argc); EnterApiExitFrame(stack_space, arg_stack_space);
#endif #endif
} }
...@@ -1744,22 +1743,15 @@ void MacroAssembler::EnterExitFramePrologue(bool save_rax) { ...@@ -1744,22 +1743,15 @@ void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
store_rax(context_address); store_rax(context_address);
} }
void MacroAssembler::EnterExitFrameEpilogue(int result_size,
int argc) { void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space) {
#ifdef _WIN64 #ifdef _WIN64
// Reserve space on stack for result and argument structures, if necessary. const int kShaddowSpace = 4;
int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize; arg_stack_space += kShaddowSpace;
// Reserve space for the Arguments object. The Windows 64-bit ABI
// requires us to pass this structure as a pointer to its location on
// the stack. The structure contains 2 values.
int argument_stack_space = argc * kPointerSize;
// We also need backing space for 4 parameters, even though
// we only pass one or two parameter, and it is in a register.
int argument_mirror_space = 4 * kPointerSize;
int total_stack_space =
argument_mirror_space + argument_stack_space + result_stack_space;
subq(rsp, Immediate(total_stack_space));
#endif #endif
if (arg_stack_space > 0) {
subq(rsp, Immediate(arg_stack_space * kPointerSize));
}
// Get the required frame alignment for the OS. // Get the required frame alignment for the OS.
static const int kFrameAlignment = OS::ActivationFrameAlignment(); static const int kFrameAlignment = OS::ActivationFrameAlignment();
...@@ -1774,7 +1766,7 @@ void MacroAssembler::EnterExitFrameEpilogue(int result_size, ...@@ -1774,7 +1766,7 @@ void MacroAssembler::EnterExitFrameEpilogue(int result_size,
} }
void MacroAssembler::EnterExitFrame(int result_size) { void MacroAssembler::EnterExitFrame(int arg_stack_space) {
EnterExitFramePrologue(true); EnterExitFramePrologue(true);
// Setup argv in callee-saved register r12. It is reused in LeaveExitFrame, // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
...@@ -1782,13 +1774,12 @@ void MacroAssembler::EnterExitFrame(int result_size) { ...@@ -1782,13 +1774,12 @@ void MacroAssembler::EnterExitFrame(int result_size) {
int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
lea(r12, Operand(rbp, r14, times_pointer_size, offset)); lea(r12, Operand(rbp, r14, times_pointer_size, offset));
EnterExitFrameEpilogue(result_size, 2); EnterExitFrameEpilogue(arg_stack_space);
} }
void MacroAssembler::EnterApiExitFrame(int stack_space, void MacroAssembler::EnterApiExitFrame(int stack_space,
int argc, int arg_stack_space) {
int result_size) {
EnterExitFramePrologue(false); EnterExitFramePrologue(false);
// Setup argv in callee-saved register r12. It is reused in LeaveExitFrame, // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
...@@ -1796,11 +1787,7 @@ void MacroAssembler::EnterApiExitFrame(int stack_space, ...@@ -1796,11 +1787,7 @@ void MacroAssembler::EnterApiExitFrame(int stack_space,
int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
lea(r12, Operand(rbp, (stack_space * kPointerSize) + offset)); lea(r12, Operand(rbp, (stack_space * kPointerSize) + offset));
#ifndef _WIN64 EnterExitFrameEpilogue(arg_stack_space);
ASSERT(argc <= 6); // EnterApiExitFrame supports only register based args.
#endif
EnterExitFrameEpilogue(result_size, argc);
} }
......
...@@ -155,11 +155,13 @@ class MacroAssembler: public Assembler { ...@@ -155,11 +155,13 @@ class MacroAssembler: public Assembler {
// debug mode. Expects the number of arguments in register rax and // debug mode. Expects the number of arguments in register rax and
// sets up the number of arguments in register rdi and the pointer // sets up the number of arguments in register rdi and the pointer
// to the first argument in register rsi. // to the first argument in register rsi.
void EnterExitFrame(int result_size = 1); //
// Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
// accessible via StackSpaceOperand.
void EnterExitFrame(int arg_stack_space = 0);
void EnterApiExitFrame(int stack_space, void EnterApiExitFrame(int stack_space,
int argc, int arg_stack_space);
int result_size = 1);
// Leave the current exit frame. Expects/provides the return value in // Leave the current exit frame. Expects/provides the return value in
// register rax:rdx (untouched) and the pointer to the first // register rax:rdx (untouched) and the pointer to the first
...@@ -838,7 +840,12 @@ class MacroAssembler: public Assembler { ...@@ -838,7 +840,12 @@ class MacroAssembler: public Assembler {
// (rcx must be preserverd until TryCallApiFunctionAndReturn). argc is number // (rcx must be preserverd until TryCallApiFunctionAndReturn). argc is number
// of arguments to be passed in C-function. stack_space * kPointerSize bytes // of arguments to be passed in C-function. stack_space * kPointerSize bytes
// will be removed from stack after the call. Saves context (rsi). // will be removed from stack after the call. Saves context (rsi).
void PrepareCallApiFunction(int stack_space, int argc); // Clobbers rax. Allocates arg_stack_space * kPointerSize inside the exit
// frame (not GCed).
//
// Assumes stack_space GCed references on top of the stack and return address.
// After call they will be removed.
void PrepareCallApiFunction(int stack_space, int arg_stack_space);
// Calls an API function. Allocates HandleScope, extracts // Calls an API function. Allocates HandleScope, extracts
// returned value from handle and propagates exceptions. // returned value from handle and propagates exceptions.
...@@ -935,7 +942,10 @@ class MacroAssembler: public Assembler { ...@@ -935,7 +942,10 @@ class MacroAssembler: public Assembler {
void LeaveFrame(StackFrame::Type type); void LeaveFrame(StackFrame::Type type);
void EnterExitFramePrologue(bool save_rax); void EnterExitFramePrologue(bool save_rax);
void EnterExitFrameEpilogue(int result_size, int argc);
// Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
// accessible via StackSpaceOperand.
void EnterExitFrameEpilogue(int arg_stack_space);
// Allocation support helpers. // Allocation support helpers.
// Loads the top of new-space into the result register. // Loads the top of new-space into the result register.
...@@ -1008,6 +1018,17 @@ static inline Operand GlobalObjectOperand() { ...@@ -1008,6 +1018,17 @@ static inline Operand GlobalObjectOperand() {
} }
// Provides access to exit frame stack space (not GCed).
static inline Operand StackSpaceOperand(int index) {
#ifdef _WIN64
const int kShaddowSpace = 4;
return Operand(rsp, (index + kShaddowSpace) * kPointerSize);
#else
return Operand(rsp, index * kPointerSize);
#endif
}
#ifdef GENERATED_CODE_COVERAGE #ifdef GENERATED_CODE_COVERAGE
extern void LogGeneratedCodeCoverage(const char* file_line); extern void LogGeneratedCodeCoverage(const char* file_line);
......
...@@ -497,6 +497,8 @@ void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, ...@@ -497,6 +497,8 @@ void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
__ ret(0); __ ret(0);
} }
// Number of pointers to be reserved on stack for fast API call.
static const int kFastApiCallArguments = 3;
// Reserves space for the extra arguments to FastHandleApiCall in the // Reserves space for the extra arguments to FastHandleApiCall in the
// caller's frame. // caller's frame.
...@@ -508,48 +510,48 @@ static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { ...@@ -508,48 +510,48 @@ static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
// -- rsp[8] : last argument in the internal frame of the caller // -- rsp[8] : last argument in the internal frame of the caller
// ----------------------------------- // -----------------------------------
__ movq(scratch, Operand(rsp, 0)); __ movq(scratch, Operand(rsp, 0));
__ subq(rsp, Immediate(4 * kPointerSize)); __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
__ movq(Operand(rsp, 0), scratch); __ movq(Operand(rsp, 0), scratch);
__ Move(scratch, Smi::FromInt(0)); __ Move(scratch, Smi::FromInt(0));
__ movq(Operand(rsp, 1 * kPointerSize), scratch); for (int i = 1; i <= kFastApiCallArguments; i++) {
__ movq(Operand(rsp, 2 * kPointerSize), scratch); __ movq(Operand(rsp, i * kPointerSize), scratch);
__ movq(Operand(rsp, 3 * kPointerSize), scratch); }
__ movq(Operand(rsp, 4 * kPointerSize), scratch);
} }
// Undoes the effects of ReserveSpaceForFastApiCall. // Undoes the effects of ReserveSpaceForFastApiCall.
static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- rsp[0] : return address // -- rsp[0] : return address.
// -- rsp[8] : last fast api call extra argument // -- rsp[8] : last fast api call extra argument.
// -- ... // -- ...
// -- rsp[32] : first fast api call extra argument // -- rsp[kFastApiCallArguments * 8] : first fast api call extra argument.
// -- rsp[40] : last argument in the internal frame // -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal
// frame.
// ----------------------------------- // -----------------------------------
__ movq(scratch, Operand(rsp, 0)); __ movq(scratch, Operand(rsp, 0));
__ movq(Operand(rsp, 4 * kPointerSize), scratch); __ movq(Operand(rsp, kFastApiCallArguments * kPointerSize), scratch);
__ addq(rsp, Immediate(kPointerSize * 4)); __ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments));
} }
// Generates call to FastHandleApiCall builtin. // Generates call to FastHandleApiCall builtin.
static void GenerateFastApiCall(MacroAssembler* masm, static bool GenerateFastApiCall(MacroAssembler* masm,
const CallOptimization& optimization, const CallOptimization& optimization,
int argc) { int argc,
Failure** failure) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- rsp[0] : return address // -- rsp[0] : return address
// -- rsp[8] : object passing the type check // -- rsp[8] : object passing the type check
// (last fast api call extra argument, // (last fast api call extra argument,
// set by CheckPrototypes) // set by CheckPrototypes)
// -- rsp[16] : api call data // -- rsp[16] : api function
// -- rsp[24] : api callback
// -- rsp[32] : api function
// (first fast api call extra argument) // (first fast api call extra argument)
// -- rsp[40] : last argument // -- rsp[24] : api call data
// -- rsp[32] : last argument
// -- ... // -- ...
// -- rsp[(argc + 5) * 8] : first argument // -- rsp[(argc + 3) * 8] : first argument
// -- rsp[(argc + 6) * 8] : receiver // -- rsp[(argc + 4) * 8] : receiver
// ----------------------------------- // -----------------------------------
// Get the function and setup the context. // Get the function and setup the context.
...@@ -558,37 +560,57 @@ static void GenerateFastApiCall(MacroAssembler* masm, ...@@ -558,37 +560,57 @@ static void GenerateFastApiCall(MacroAssembler* masm,
__ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
// Pass the additional arguments FastHandleApiCall expects. // Pass the additional arguments FastHandleApiCall expects.
__ movq(Operand(rsp, 4 * kPointerSize), rdi); __ movq(Operand(rsp, 2 * kPointerSize), rdi);
bool info_loaded = false;
Object* callback = optimization.api_call_info()->callback();
if (Heap::InNewSpace(callback)) {
info_loaded = true;
__ Move(rcx, Handle<CallHandlerInfo>(optimization.api_call_info()));
__ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kCallbackOffset));
__ movq(Operand(rsp, 3 * kPointerSize), rbx);
} else {
__ Move(Operand(rsp, 3 * kPointerSize), Handle<Object>(callback));
}
Object* call_data = optimization.api_call_info()->data(); Object* call_data = optimization.api_call_info()->data();
Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
if (Heap::InNewSpace(call_data)) { if (Heap::InNewSpace(call_data)) {
if (!info_loaded) { __ Move(rcx, api_call_info_handle);
__ Move(rcx, Handle<CallHandlerInfo>(optimization.api_call_info()));
}
__ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset)); __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset));
__ movq(Operand(rsp, 2 * kPointerSize), rbx); __ movq(Operand(rsp, 3 * kPointerSize), rbx);
} else { } else {
__ Move(Operand(rsp, 2 * kPointerSize), Handle<Object>(call_data)); __ Move(Operand(rsp, 3 * kPointerSize), Handle<Object>(call_data));
} }
// Set the number of arguments. // Prepare arguments.
__ movq(rax, Immediate(argc + 4)); __ lea(rbx, Operand(rsp, 3 * kPointerSize));
// Jump to the fast api call builtin (tail call). Object* callback = optimization.api_call_info()->callback();
Handle<Code> code = Handle<Code>( Address api_function_address = v8::ToCData<Address>(callback);
Builtins::builtin(Builtins::FastHandleApiCall)); ApiFunction fun(api_function_address);
ParameterCount expected(0);
__ InvokeCode(code, expected, expected, #ifdef _WIN64
RelocInfo::CODE_TARGET, JUMP_FUNCTION); // Win64 uses first register--rcx--for returned value.
Register arguments_arg = rdx;
#else
Register arguments_arg = rdi;
#endif
// Allocate the v8::Arguments structure in the arguments' space since
// it's not controlled by GC.
const int kApiStackSpace = 4;
__ PrepareCallApiFunction(argc + kFastApiCallArguments + 1, kApiStackSpace);
__ movq(StackSpaceOperand(0), rbx); // v8::Arguments::implicit_args_.
__ addq(rbx, Immediate(argc * kPointerSize));
__ movq(StackSpaceOperand(1), rbx); // v8::Arguments::values_.
__ Set(StackSpaceOperand(2), argc); // v8::Arguments::length_.
// v8::Arguments::is_construct_call_.
__ Set(StackSpaceOperand(3), 0);
// v8::InvocationCallback's argument.
__ lea(arguments_arg, StackSpaceOperand(0));
// Emitting a stub call may try to allocate (if the code is not
// already generated). Do not allow the assembler to perform a
// garbage collection but instead return the allocation failure
// object.
MaybeObject* result =
masm->TryCallApiFunctionAndReturn(&fun);
if (result->IsFailure()) {
*failure = Failure::cast(result);
return false;
}
return true;
} }
...@@ -601,7 +623,7 @@ class CallInterceptorCompiler BASE_EMBEDDED { ...@@ -601,7 +623,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
arguments_(arguments), arguments_(arguments),
name_(name) {} name_(name) {}
void Compile(MacroAssembler* masm, bool Compile(MacroAssembler* masm,
JSObject* object, JSObject* object,
JSObject* holder, JSObject* holder,
String* name, String* name,
...@@ -610,7 +632,8 @@ class CallInterceptorCompiler BASE_EMBEDDED { ...@@ -610,7 +632,8 @@ class CallInterceptorCompiler BASE_EMBEDDED {
Register scratch1, Register scratch1,
Register scratch2, Register scratch2,
Register scratch3, Register scratch3,
Label* miss) { Label* miss,
Failure** failure) {
ASSERT(holder->HasNamedInterceptor()); ASSERT(holder->HasNamedInterceptor());
ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined()); ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
...@@ -620,17 +643,18 @@ class CallInterceptorCompiler BASE_EMBEDDED { ...@@ -620,17 +643,18 @@ class CallInterceptorCompiler BASE_EMBEDDED {
CallOptimization optimization(lookup); CallOptimization optimization(lookup);
if (optimization.is_constant_call()) { if (optimization.is_constant_call()) {
CompileCacheable(masm, return CompileCacheable(masm,
object, object,
receiver, receiver,
scratch1, scratch1,
scratch2, scratch2,
scratch3, scratch3,
holder, holder,
lookup, lookup,
name, name,
optimization, optimization,
miss); miss,
failure);
} else { } else {
CompileRegular(masm, CompileRegular(masm,
object, object,
...@@ -641,11 +665,12 @@ class CallInterceptorCompiler BASE_EMBEDDED { ...@@ -641,11 +665,12 @@ class CallInterceptorCompiler BASE_EMBEDDED {
name, name,
holder, holder,
miss); miss);
return true;
} }
} }
private: private:
void CompileCacheable(MacroAssembler* masm, bool CompileCacheable(MacroAssembler* masm,
JSObject* object, JSObject* object,
Register receiver, Register receiver,
Register scratch1, Register scratch1,
...@@ -655,7 +680,8 @@ class CallInterceptorCompiler BASE_EMBEDDED { ...@@ -655,7 +680,8 @@ class CallInterceptorCompiler BASE_EMBEDDED {
LookupResult* lookup, LookupResult* lookup,
String* name, String* name,
const CallOptimization& optimization, const CallOptimization& optimization,
Label* miss_label) { Label* miss_label,
Failure** failure) {
ASSERT(optimization.is_constant_call()); ASSERT(optimization.is_constant_call());
ASSERT(!lookup->holder()->IsGlobalObject()); ASSERT(!lookup->holder()->IsGlobalObject());
...@@ -717,7 +743,13 @@ class CallInterceptorCompiler BASE_EMBEDDED { ...@@ -717,7 +743,13 @@ class CallInterceptorCompiler BASE_EMBEDDED {
// Invoke function. // Invoke function.
if (can_do_fast_api_call) { if (can_do_fast_api_call) {
GenerateFastApiCall(masm, optimization, arguments_.immediate()); bool success = GenerateFastApiCall(masm,
optimization,
arguments_.immediate(),
failure);
if (!success) {
return false;
}
} else { } else {
__ InvokeFunction(optimization.constant_function(), arguments_, __ InvokeFunction(optimization.constant_function(), arguments_,
JUMP_FUNCTION); JUMP_FUNCTION);
...@@ -735,6 +767,8 @@ class CallInterceptorCompiler BASE_EMBEDDED { ...@@ -735,6 +767,8 @@ class CallInterceptorCompiler BASE_EMBEDDED {
if (can_do_fast_api_call) { if (can_do_fast_api_call) {
FreeSpaceForFastApiCall(masm, scratch1); FreeSpaceForFastApiCall(masm, scratch1);
} }
return true;
} }
void CompileRegular(MacroAssembler* masm, void CompileRegular(MacroAssembler* masm,
...@@ -1036,7 +1070,11 @@ MaybeObject* CallStubCompiler::CompileCallConstant( ...@@ -1036,7 +1070,11 @@ MaybeObject* CallStubCompiler::CompileCallConstant(
} }
if (depth != kInvalidProtoDepth) { if (depth != kInvalidProtoDepth) {
GenerateFastApiCall(masm(), optimization, argc); Failure* failure;
bool success = GenerateFastApiCall(masm(), optimization, argc, &failure);
if (!success) {
return failure;
}
} else { } else {
__ InvokeFunction(function, arguments(), JUMP_FUNCTION); __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
} }
...@@ -1723,16 +1761,21 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, ...@@ -1723,16 +1761,21 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
CallInterceptorCompiler compiler(this, arguments(), rcx); CallInterceptorCompiler compiler(this, arguments(), rcx);
compiler.Compile(masm(), Failure* failure;
object, bool success = compiler.Compile(masm(),
holder, object,
name, holder,
&lookup, name,
rdx, &lookup,
rbx, rdx,
rdi, rbx,
rax, rdi,
&miss); rax,
&miss,
&failure);
if (!success) {
return failure;
}
// Restore receiver. // Restore receiver.
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment