Commit 31a9396e authored by mbrandy's avatar mbrandy Committed by Commit bot

PPC: [builtins] Unify the various versions of [[Call]] with a Call builtin.

Port ccbb4ff0

Original commit message:
    The new Call and CallFunction builtins supersede the current
    CallFunctionStub (and CallIC magic) and will be the single bottleneck
    for all calling, including the currently special Function.prototype.call
    and Function.prototype.apply builtins, which had handwritten (and
    not fully compliant) versions of CallFunctionStub, and also the
    CallIC(s), which where also slightly different.

    This also reduces the overhead for API function calls, which is still
    unnecessary high, but let's do that step-by-step.

    This also fixes a bunch of cases where the implicit ToObject for
    sloppy receivers was done in the wrong context (in the caller
    context instead of the callee context), which basically meant
    that we allowed cross context access to %ObjectPrototype%.

    MIPS and MIPS64 ports contributed by akos.palfi@imgtec.com.

R=bmeurer@chromium.org, joransiu@ca.ibm.com, jyan@ca.ibm.com, michael_dawson@ca.ibm.com, dstence@us.ibm.com
BUG=v8:4413
LOG=n

Review URL: https://codereview.chromium.org/1327093002

Cr-Commit-Position: refs/heads/master@{#30656}
parent 50c6b031
...@@ -1263,6 +1263,7 @@ void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { ...@@ -1263,6 +1263,7 @@ void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
} }
// static
void Builtins::Generate_FunctionCall(MacroAssembler* masm) { void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// 1. Make sure we have at least one argument. // 1. Make sure we have at least one argument.
// r3: actual number of arguments // r3: actual number of arguments
...@@ -1270,201 +1271,41 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { ...@@ -1270,201 +1271,41 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
Label done; Label done;
__ cmpi(r3, Operand::Zero()); __ cmpi(r3, Operand::Zero());
__ bne(&done); __ bne(&done);
__ LoadRoot(r5, Heap::kUndefinedValueRootIndex); __ PushRoot(Heap::kUndefinedValueRootIndex);
__ push(r5);
__ addi(r3, r3, Operand(1)); __ addi(r3, r3, Operand(1));
__ bind(&done); __ bind(&done);
} }
// 2. Get the function to call (passed as receiver) from the stack, check // 2. Get the callable to call (passed as receiver) from the stack.
// if it is a function.
// r3: actual number of arguments
Label slow, non_function;
__ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2));
__ add(r4, sp, r4);
__ LoadP(r4, MemOperand(r4));
__ JumpIfSmi(r4, &non_function);
__ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
__ bne(&slow);
// 3a. Patch the first argument if necessary when calling a function.
// r3: actual number of arguments
// r4: function
Label shift_arguments;
__ li(r7, Operand::Zero()); // indicate regular JS_FUNCTION
{
Label convert_to_object, use_global_proxy, patch_receiver;
// Change context eagerly in case we need the global receiver.
__ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
// Do not transform the receiver for strict mode functions.
__ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
__ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
__ TestBit(r6,
#if V8_TARGET_ARCH_PPC64
SharedFunctionInfo::kStrictModeFunction,
#else
SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
#endif
r0);
__ bne(&shift_arguments, cr0);
// Do not transform the receiver for native (Compilerhints already in r6).
__ TestBit(r6,
#if V8_TARGET_ARCH_PPC64
SharedFunctionInfo::kNative,
#else
SharedFunctionInfo::kNative + kSmiTagSize,
#endif
r0);
__ bne(&shift_arguments, cr0);
// Compute the receiver in sloppy mode.
__ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
__ add(r5, sp, ip);
__ LoadP(r5, MemOperand(r5, -kPointerSize));
// r3: actual number of arguments
// r4: function
// r5: first argument
__ JumpIfSmi(r5, &convert_to_object);
__ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
__ cmp(r5, r6);
__ beq(&use_global_proxy);
__ LoadRoot(r6, Heap::kNullValueRootIndex);
__ cmp(r5, r6);
__ beq(&use_global_proxy);
STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
__ CompareObjectType(r5, r6, r6, FIRST_SPEC_OBJECT_TYPE);
__ bge(&shift_arguments);
__ bind(&convert_to_object);
{
// Enter an internal frame in order to preserve argument count.
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r3);
__ Push(r3);
__ mr(r3, r5);
ToObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ mr(r5, r3);
__ pop(r3);
__ SmiUntag(r3);
// Exit the internal frame.
}
// Restore the function to r4, and the flag to r7.
__ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
__ add(r7, sp, r7);
__ LoadP(r4, MemOperand(r7));
__ li(r7, Operand::Zero());
__ b(&patch_receiver);
__ bind(&use_global_proxy);
__ LoadP(r5, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
__ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
__ bind(&patch_receiver);
__ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
__ add(r6, sp, ip);
__ StoreP(r5, MemOperand(r6, -kPointerSize));
__ b(&shift_arguments);
}
// 3b. Check for function proxy.
__ bind(&slow);
__ li(r7, Operand(1, RelocInfo::NONE32)); // indicate function proxy
__ cmpi(r5, Operand(JS_FUNCTION_PROXY_TYPE));
__ beq(&shift_arguments);
__ bind(&non_function);
__ li(r7, Operand(2, RelocInfo::NONE32)); // indicate non-function
// 3c. Patch the first argument when calling a non-function. The
// CALL_NON_FUNCTION builtin expects the non-function callee as
// receiver, so overwrite the first argument which will ultimately
// become the receiver.
// r3: actual number of arguments // r3: actual number of arguments
// r4: function __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
// r7: call type (0: JS function, 1: function proxy, 2: non-function) __ LoadPX(r4, MemOperand(sp, r5));
__ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
__ add(r5, sp, ip);
__ StoreP(r4, MemOperand(r5, -kPointerSize));
// 4. Shift arguments and return address one slot down on the stack // 3. Shift arguments and return address one slot down on the stack
// (overwriting the original receiver). Adjust argument count to make // (overwriting the original receiver). Adjust argument count to make
// the original first argument the new receiver. // the original first argument the new receiver.
// r3: actual number of arguments // r3: actual number of arguments
// r4: function // r4: callable
// r7: call type (0: JS function, 1: function proxy, 2: non-function)
__ bind(&shift_arguments);
{ {
Label loop; Label loop;
// Calculate the copy start address (destination). Copy end address is sp. // Calculate the copy start address (destination). Copy end address is sp.
__ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2)); __ add(r5, sp, r5);
__ add(r5, sp, ip);
__ mtctr(r3);
__ bind(&loop); __ bind(&loop);
__ LoadP(ip, MemOperand(r5, -kPointerSize)); __ LoadP(ip, MemOperand(r5, -kPointerSize));
__ StoreP(ip, MemOperand(r5)); __ StoreP(ip, MemOperand(r5));
__ subi(r5, r5, Operand(kPointerSize)); __ subi(r5, r5, Operand(kPointerSize));
__ cmp(r5, sp); __ bdnz(&loop);
__ bne(&loop);
// Adjust the actual number of arguments and remove the top element // Adjust the actual number of arguments and remove the top element
// (which is a copy of the last argument). // (which is a copy of the last argument).
__ subi(r3, r3, Operand(1)); __ subi(r3, r3, Operand(1));
__ pop(); __ pop();
} }
// 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, // 4. Call the callable.
// or a function proxy via CALL_FUNCTION_PROXY. __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
// r3: actual number of arguments
// r4: function
// r7: call type (0: JS function, 1: function proxy, 2: non-function)
{
Label function, non_proxy;
__ cmpi(r7, Operand::Zero());
__ beq(&function);
// Expected number of arguments is 0 for CALL_NON_FUNCTION.
__ li(r5, Operand::Zero());
__ cmpi(r7, Operand(1));
__ bne(&non_proxy);
__ push(r4); // re-add proxy object as additional argument
__ addi(r3, r3, Operand(1));
__ GetBuiltinFunction(r4, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX);
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
__ bind(&non_proxy);
__ GetBuiltinFunction(r4, Context::CALL_NON_FUNCTION_BUILTIN_INDEX);
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
__ bind(&function);
}
// 5b. Get the code to call from the function and check that the number of
// expected arguments matches what we're providing. If so, jump
// (tail-call) to the code in register ip without checking arguments.
// r3: actual number of arguments
// r4: function
__ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
__ LoadWordArith(
r5, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
#if !V8_TARGET_ARCH_PPC64
__ SmiUntag(r5);
#endif
__ cmp(r5, r3); // Check formal and actual parameter counts.
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET, ne);
__ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
ParameterCount expected(0);
__ InvokeCode(ip, expected, expected, JUMP_FUNCTION, NullCallWrapper());
} }
...@@ -1528,9 +1369,8 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { ...@@ -1528,9 +1369,8 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
const int kFunctionOffset = kReceiverOffset + kPointerSize; const int kFunctionOffset = kReceiverOffset + kPointerSize;
__ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function
__ push(r3); __ LoadP(r4, MemOperand(fp, kArgumentsOffset)); // get the args array
__ LoadP(r3, MemOperand(fp, kArgumentsOffset)); // get the args array __ Push(r3, r4);
__ push(r3);
if (targetIsArgument) { if (targetIsArgument) {
__ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX,
CALL_FUNCTION); CALL_FUNCTION);
...@@ -1546,99 +1386,17 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { ...@@ -1546,99 +1386,17 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
const int kLimitOffset = const int kLimitOffset =
StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
__ li(r4, Operand::Zero()); __ li(r4, Operand::Zero());
__ Push(r3, r4); // limit and initial index. __ LoadP(r5, MemOperand(fp, kReceiverOffset));
__ Push(r3, r4, r5); // limit, initial index and receiver.
// Get the receiver.
__ LoadP(r3, MemOperand(fp, kReceiverOffset));
// Check that the function is a JS function (otherwise it must be a proxy).
Label push_receiver;
__ LoadP(r4, MemOperand(fp, kFunctionOffset));
__ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
__ bne(&push_receiver);
// Change context eagerly to get the right global object if necessary.
__ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
// Load the shared function info while the function is still in r4.
__ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
// Compute the receiver.
// Do not transform the receiver for strict mode functions.
Label call_to_object, use_global_proxy;
__ lwz(r5, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
__ TestBit(r5,
#if V8_TARGET_ARCH_PPC64
SharedFunctionInfo::kStrictModeFunction,
#else
SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
#endif
r0);
__ bne(&push_receiver, cr0);
// Do not transform the receiver for strict mode functions.
__ TestBit(r5,
#if V8_TARGET_ARCH_PPC64
SharedFunctionInfo::kNative,
#else
SharedFunctionInfo::kNative + kSmiTagSize,
#endif
r0);
__ bne(&push_receiver, cr0);
// Compute the receiver in sloppy mode.
__ JumpIfSmi(r3, &call_to_object);
__ LoadRoot(r4, Heap::kNullValueRootIndex);
__ cmp(r3, r4);
__ beq(&use_global_proxy);
__ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
__ cmp(r3, r4);
__ beq(&use_global_proxy);
// Check if the receiver is already a JavaScript object.
// r3: receiver
STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
__ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
__ bge(&push_receiver);
// Convert the receiver to a regular object.
// r3: receiver
__ bind(&call_to_object);
ToObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ b(&push_receiver);
__ bind(&use_global_proxy);
__ LoadP(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
__ LoadP(r3, FieldMemOperand(r3, GlobalObject::kGlobalProxyOffset));
// Push the receiver.
// r3: receiver
__ bind(&push_receiver);
__ push(r3);
// Copy all arguments from the array to the stack. // Copy all arguments from the array to the stack.
Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset, Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset,
kLimitOffset); kLimitOffset);
// Call the function. // Call the callable.
Label call_proxy; // TODO(bmeurer): This should be a tail call according to ES6.
ParameterCount actual(r3);
__ LoadP(r4, MemOperand(fp, kFunctionOffset)); __ LoadP(r4, MemOperand(fp, kFunctionOffset));
__ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE); __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
__ bne(&call_proxy);
__ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
__ LeaveFrame(StackFrame::INTERNAL, kStackSize * kPointerSize);
__ blr();
// Call the function proxy.
__ bind(&call_proxy);
__ push(r4); // add function proxy as last argument
__ addi(r3, r3, Operand(1));
__ li(r5, Operand::Zero());
__ GetBuiltinFunction(r4, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX);
__ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
// Tear down the internal frame and remove function, receiver and args. // Tear down the internal frame and remove function, receiver and args.
} }
...@@ -1775,6 +1533,146 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { ...@@ -1775,6 +1533,146 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
} }
// static
void Builtins::Generate_CallFunction(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r3 : the number of arguments (not including the receiver)
// -- r4 : the function to call (checked to be a JSFunction)
// -----------------------------------
Label convert, convert_global_proxy, convert_to_object, done_convert;
__ AssertFunction(r4);
// TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal
// slot is "classConstructor".
// Enter the context of the function; ToObject has to run in the function
// context, and we also need to take the global proxy from the function
// context in case of conversion.
// See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
SharedFunctionInfo::kStrictModeByteOffset);
__ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
__ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
// We need to convert the receiver for non-native sloppy mode functions.
__ lbz(r6, FieldMemOperand(r5, SharedFunctionInfo::kNativeByteOffset));
__ andi(r0, r6, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
(1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
__ bne(&done_convert, cr0);
{
__ ShiftLeftImm(r6, r3, Operand(kPointerSizeLog2));
__ LoadPX(r6, MemOperand(sp, r6));
// ----------- S t a t e -------------
// -- r3 : the number of arguments (not including the receiver)
// -- r4 : the function to call (checked to be a JSFunction)
// -- r5 : the shared function info.
// -- r6 : the receiver
// -- cp : the function context.
// -----------------------------------
Label convert_receiver;
__ JumpIfSmi(r6, &convert_to_object);
STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
__ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE);
__ bge(&done_convert);
__ JumpIfRoot(r6, Heap::kUndefinedValueRootIndex, &convert_global_proxy);
__ JumpIfNotRoot(r6, Heap::kNullValueRootIndex, &convert_to_object);
__ bind(&convert_global_proxy);
{
// Patch receiver to global proxy.
__ LoadGlobalProxy(r6);
}
__ b(&convert_receiver);
__ bind(&convert_to_object);
{
// Convert receiver using ToObject.
// TODO(bmeurer): Inline the allocation here to avoid building the frame
// in the fast case? (fall back to AllocateInNewSpace?)
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r3);
__ Push(r3, r4);
__ mr(r3, r6);
ToObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ mr(r6, r3);
__ Pop(r3, r4);
__ SmiUntag(r3);
}
__ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
__ bind(&convert_receiver);
__ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
__ StorePX(r6, MemOperand(sp, r7));
}
__ bind(&done_convert);
// ----------- S t a t e -------------
// -- r3 : the number of arguments (not including the receiver)
// -- r4 : the function to call (checked to be a JSFunction)
// -- r5 : the shared function info.
// -- cp : the function context.
// -----------------------------------
__ LoadWordArith(
r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
#if !V8_TARGET_ARCH_PPC64
__ SmiUntag(r5);
#endif
__ LoadP(r6, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
ParameterCount actual(r3);
ParameterCount expected(r5);
__ InvokeCode(r6, expected, actual, JUMP_FUNCTION, NullCallWrapper());
}
// static
void Builtins::Generate_Call(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r3 : the number of arguments (not including the receiver)
// -- r4 : the target to call (can be any Object).
// -----------------------------------
Label non_smi, non_function;
__ JumpIfSmi(r4, &non_function);
__ bind(&non_smi);
__ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
__ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET,
eq);
__ cmpi(r5, Operand(JS_FUNCTION_PROXY_TYPE));
__ bne(&non_function);
// 1. Call to function proxy.
// TODO(neis): This doesn't match the ES6 spec for [[Call]] on proxies.
__ LoadP(r4, FieldMemOperand(r4, JSFunctionProxy::kCallTrapOffset));
__ AssertNotSmi(r4);
__ b(&non_smi);
// 2. Call to something else, which might have a [[Call]] internal method (if
// not we raise an exception).
__ bind(&non_function);
// TODO(bmeurer): I wonder why we prefer to have slow API calls? This could
// be awesome instead; i.e. a trivial improvement would be to call into the
// runtime and just deal with the API function there instead of returning a
// delegate from a runtime call that just jumps back to the runtime once
// called. Or, bonus points, call directly into the C API function here, as
// we do in some Crankshaft fast cases.
// Overwrite the original receiver with the (original) target.
__ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
__ StorePX(r4, MemOperand(sp, r5));
{
// Determine the delegate for the target (if any).
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r3);
__ Push(r3, r4);
__ CallRuntime(Runtime::kGetFunctionDelegate, 1);
__ mr(r4, r3);
__ Pop(r3);
__ SmiUntag(r3);
}
// The delegate is always a regular function.
__ AssertFunction(r4);
__ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
}
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- r3 : actual number of arguments // -- r3 : actual number of arguments
......
...@@ -2567,30 +2567,9 @@ static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { ...@@ -2567,30 +2567,9 @@ static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
} }
static void EmitSlowCase(MacroAssembler* masm, int argc, Label* non_function) { static void EmitSlowCase(MacroAssembler* masm, int argc) {
// Check for function proxy. __ mov(r3, Operand(argc));
STATIC_ASSERT(JS_FUNCTION_PROXY_TYPE < 0xffffu); __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
__ cmpi(r7, Operand(JS_FUNCTION_PROXY_TYPE));
__ bne(non_function);
__ push(r4); // put proxy as additional argument
__ li(r3, Operand(argc + 1));
__ li(r5, Operand::Zero());
__ GetBuiltinFunction(r4, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX);
{
Handle<Code> adaptor =
masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
__ Jump(adaptor, RelocInfo::CODE_TARGET);
}
// CALL_NON_FUNCTION expects the non-function callee as receiver (instead
// of the original receiver from the call site).
__ bind(non_function);
__ StoreP(r4, MemOperand(sp, argc * kPointerSize), r0);
__ li(r3, Operand(argc)); // Set up the number of arguments.
__ li(r5, Operand::Zero());
__ GetBuiltinFunction(r4, Context::CALL_NON_FUNCTION_BUILTIN_INDEX);
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
} }
...@@ -2612,12 +2591,12 @@ static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) { ...@@ -2612,12 +2591,12 @@ static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
static void CallFunctionNoFeedback(MacroAssembler* masm, int argc, static void CallFunctionNoFeedback(MacroAssembler* masm, int argc,
bool needs_checks, bool call_as_method) { bool needs_checks, bool call_as_method) {
// r4 : the function to call // r4 : the function to call
Label slow, non_function, wrap, cont; Label slow, wrap, cont;
if (needs_checks) { if (needs_checks) {
// Check that the function is really a JavaScript function. // Check that the function is really a JavaScript function.
// r4: pushed function (to be verified) // r4: pushed function (to be verified)
__ JumpIfSmi(r4, &non_function); __ JumpIfSmi(r4, &slow);
// Goto slow case if we do not have a function. // Goto slow case if we do not have a function.
__ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE); __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE);
...@@ -2652,7 +2631,7 @@ static void CallFunctionNoFeedback(MacroAssembler* masm, int argc, ...@@ -2652,7 +2631,7 @@ static void CallFunctionNoFeedback(MacroAssembler* masm, int argc,
if (needs_checks) { if (needs_checks) {
// Slow-case: Non-function called. // Slow-case: Non-function called.
__ bind(&slow); __ bind(&slow);
EmitSlowCase(masm, argc, &non_function); EmitSlowCase(masm, argc);
} }
if (call_as_method) { if (call_as_method) {
...@@ -2794,10 +2773,8 @@ void CallIC_ArrayStub::Generate(MacroAssembler* masm) { ...@@ -2794,10 +2773,8 @@ void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
GenerateMiss(masm); GenerateMiss(masm);
// The slow case, we need this no matter what to complete a call after a miss. // The slow case, we need this no matter what to complete a call after a miss.
CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod()); __ mov(r3, Operand(arg_count()));
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
// Unreachable.
__ stop("Unexpected code address");
} }
...@@ -2810,7 +2787,7 @@ void CallICStub::Generate(MacroAssembler* masm) { ...@@ -2810,7 +2787,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
const int generic_offset = const int generic_offset =
FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex);
Label extra_checks_or_miss, slow_start; Label extra_checks_or_miss, slow_start;
Label slow, non_function, wrap, cont; Label slow, wrap, cont;
Label have_js_function; Label have_js_function;
int argc = arg_count(); int argc = arg_count();
ParameterCount actual(argc); ParameterCount actual(argc);
...@@ -2864,7 +2841,7 @@ void CallICStub::Generate(MacroAssembler* masm) { ...@@ -2864,7 +2841,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ InvokeFunction(r4, actual, JUMP_FUNCTION, NullCallWrapper()); __ InvokeFunction(r4, actual, JUMP_FUNCTION, NullCallWrapper());
__ bind(&slow); __ bind(&slow);
EmitSlowCase(masm, argc, &non_function); EmitSlowCase(masm, argc);
if (CallAsMethod()) { if (CallAsMethod()) {
__ bind(&wrap); __ bind(&wrap);
...@@ -2949,7 +2926,7 @@ void CallICStub::Generate(MacroAssembler* masm) { ...@@ -2949,7 +2926,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ bind(&slow_start); __ bind(&slow_start);
// Check that the function is really a JavaScript function. // Check that the function is really a JavaScript function.
// r4: pushed function (to be verified) // r4: pushed function (to be verified)
__ JumpIfSmi(r4, &non_function); __ JumpIfSmi(r4, &slow);
// Goto slow case if we do not have a function. // Goto slow case if we do not have a function.
__ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE); __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE);
......
...@@ -2399,6 +2399,12 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) { ...@@ -2399,6 +2399,12 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
} }
void MacroAssembler::LoadGlobalProxy(Register dst) {
LoadP(dst, GlobalObjectOperand());
LoadP(dst, FieldMemOperand(dst, GlobalObject::kGlobalProxyOffset));
}
void MacroAssembler::LoadTransitionedArrayMapConditional( void MacroAssembler::LoadTransitionedArrayMapConditional(
ElementsKind expected_kind, ElementsKind transitioned_kind, ElementsKind expected_kind, ElementsKind transitioned_kind,
Register map_in_out, Register scratch, Label* no_map_match) { Register map_in_out, Register scratch, Label* no_map_match) {
...@@ -2575,6 +2581,17 @@ void MacroAssembler::AssertName(Register object) { ...@@ -2575,6 +2581,17 @@ void MacroAssembler::AssertName(Register object) {
} }
void MacroAssembler::AssertFunction(Register object) {
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
TestIfSmi(object, r0);
Check(ne, kOperandIsASmiAndNotAFunction, cr0);
CompareObjectType(object, r0, r0, JS_FUNCTION_TYPE);
Check(eq, kOperandIsNotAFunction);
}
}
void MacroAssembler::AssertUndefinedOrAllocationSite(Register object, void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
Register scratch) { Register scratch) {
if (emit_debug_code()) { if (emit_debug_code()) {
......
...@@ -415,6 +415,9 @@ class MacroAssembler : public Assembler { ...@@ -415,6 +415,9 @@ class MacroAssembler : public Assembler {
void LoadContext(Register dst, int context_chain_length); void LoadContext(Register dst, int context_chain_length);
// Load the global proxy from the current context.
void LoadGlobalProxy(Register dst);
// Conditionally load the cached Array transitioned map of type // Conditionally load the cached Array transitioned map of type
// transitioned_kind from the native context if the map in register // transitioned_kind from the native context if the map in register
// map_in_out is the cached Array map in the native context of // map_in_out is the cached Array map in the native context of
...@@ -781,7 +784,23 @@ class MacroAssembler : public Assembler { ...@@ -781,7 +784,23 @@ class MacroAssembler : public Assembler {
// Compare the object in a register to a value from the root list. // Compare the object in a register to a value from the root list.
// Uses the ip register as scratch. // Uses the ip register as scratch.
void CompareRoot(Register obj, Heap::RootListIndex index); void CompareRoot(Register obj, Heap::RootListIndex index);
void PushRoot(Heap::RootListIndex index) {
LoadRoot(r0, index);
Push(r0);
}
// Compare the object in a register to a value and jump if they are equal.
void JumpIfRoot(Register with, Heap::RootListIndex index, Label* if_equal) {
CompareRoot(with, index);
beq(if_equal);
}
// Compare the object in a register to a value and jump if they are not equal.
void JumpIfNotRoot(Register with, Heap::RootListIndex index,
Label* if_not_equal) {
CompareRoot(with, index);
bne(if_not_equal);
}
// Load and check the instance type of an object for being a string. // Load and check the instance type of an object for being a string.
// Loads the type into the second argument register. // Loads the type into the second argument register.
...@@ -1288,6 +1307,8 @@ class MacroAssembler : public Assembler { ...@@ -1288,6 +1307,8 @@ class MacroAssembler : public Assembler {
// Abort execution if argument is not a name, enabled via --debug-code. // Abort execution if argument is not a name, enabled via --debug-code.
void AssertName(Register object); void AssertName(Register object);
void AssertFunction(Register object);
// Abort execution if argument is not undefined or an AllocationSite, enabled // Abort execution if argument is not undefined or an AllocationSite, enabled
// via --debug-code. // via --debug-code.
void AssertUndefinedOrAllocationSite(Register object, Register scratch); void AssertUndefinedOrAllocationSite(Register object, Register scratch);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment