Commit 433db377 authored by erikcorry's avatar erikcorry

MIPS: port Implement function proxies (except for their use as constructors).

port r9258 (c8709a9)

Note on mips implementation: Arm reg r4 (call type) normally maps to mips
reg t0. We had already used t0 as a temp in Generate_FunctionCall() and
Generate_FunctionApply(), so I replaced that existing t0 usage with t3, and
now use t0 only for call type.

Original commit message:
Introduce new %Apply native.
Extend Execution::Call to optionally handle receiver rewriting (needed for %Apply).
Fix Function.prototype.bind for functions that have .apply modified.

Landing http://codereview.chromium.org/7891033/ for Paul Lind.


git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@9316 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 4c64b160
...@@ -1200,19 +1200,20 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { ...@@ -1200,19 +1200,20 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// 2. Get the function to call (passed as receiver) from the stack, check // 2. Get the function to call (passed as receiver) from the stack, check
// if it is a function. // if it is a function.
// a0: actual number of arguments // a0: actual number of arguments
Label non_function; Label slow, non_function;
__ sll(at, a0, kPointerSizeLog2); __ sll(at, a0, kPointerSizeLog2);
__ addu(at, sp, at); __ addu(at, sp, at);
__ lw(a1, MemOperand(at)); __ lw(a1, MemOperand(at));
__ And(at, a1, Operand(kSmiTagMask)); __ And(at, a1, Operand(kSmiTagMask));
__ Branch(&non_function, eq, at, Operand(zero_reg)); __ Branch(&non_function, eq, at, Operand(zero_reg));
__ GetObjectType(a1, a2, a2); __ GetObjectType(a1, a2, a2);
__ Branch(&non_function, ne, a2, Operand(JS_FUNCTION_TYPE)); __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
// 3a. Patch the first argument if necessary when calling a function. // 3a. Patch the first argument if necessary when calling a function.
// a0: actual number of arguments // a0: actual number of arguments
// a1: function // a1: function
Label shift_arguments; Label shift_arguments;
__ li(t0, Operand(0, RelocInfo::NONE)); // Indicate regular JS_FUNCTION.
{ Label convert_to_object, use_global_receiver, patch_receiver; { Label convert_to_object, use_global_receiver, patch_receiver;
// Change context eagerly in case we need the global receiver. // Change context eagerly in case we need the global receiver.
__ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
...@@ -1220,13 +1221,13 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { ...@@ -1220,13 +1221,13 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// Do not transform the receiver for strict mode functions. // Do not transform the receiver for strict mode functions.
__ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset)); __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
__ And(t0, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
kSmiTagSize))); kSmiTagSize)));
__ Branch(&shift_arguments, ne, t0, Operand(zero_reg)); __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
// Do not transform the receiver for native (Compilerhints already in a3). // Do not transform the receiver for native (Compilerhints already in a3).
__ And(t0, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
__ Branch(&shift_arguments, ne, t0, Operand(zero_reg)); __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
// Compute the receiver in non-strict mode. // Compute the receiver in non-strict mode.
// Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2). // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
...@@ -1262,10 +1263,11 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { ...@@ -1262,10 +1263,11 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ sra(a0, a0, kSmiTagSize); // Un-tag. __ sra(a0, a0, kSmiTagSize); // Un-tag.
// Leave internal frame. // Leave internal frame.
} }
// Restore the function to a1. // Restore the function to a1, and the flag to t0.
__ sll(at, a0, kPointerSizeLog2); __ sll(at, a0, kPointerSizeLog2);
__ addu(at, sp, at); __ addu(at, sp, at);
__ lw(a1, MemOperand(at)); __ lw(a1, MemOperand(at));
__ li(t0, Operand(0, RelocInfo::NONE));
__ Branch(&patch_receiver); __ Branch(&patch_receiver);
// Use the global receiver object from the called function as the // Use the global receiver object from the called function as the
...@@ -1286,25 +1288,31 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { ...@@ -1286,25 +1288,31 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ Branch(&shift_arguments); __ Branch(&shift_arguments);
} }
// 3b. Patch the first argument when calling a non-function. The // 3b. Check for function proxy.
__ bind(&slow);
__ li(t0, Operand(1, RelocInfo::NONE)); // Indicate function proxy.
__ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
__ bind(&non_function);
__ li(t0, Operand(2, RelocInfo::NONE)); // Indicate non-function.
// 3c. Patch the first argument when calling a non-function. The
// CALL_NON_FUNCTION builtin expects the non-function callee as // CALL_NON_FUNCTION builtin expects the non-function callee as
// receiver, so overwrite the first argument which will ultimately // receiver, so overwrite the first argument which will ultimately
// become the receiver. // become the receiver.
// a0: actual number of arguments // a0: actual number of arguments
// a1: function // a1: function
__ bind(&non_function); // t0: call type (0: JS function, 1: function proxy, 2: non-function)
// Restore the function in case it has been modified.
__ sll(at, a0, kPointerSizeLog2); __ sll(at, a0, kPointerSizeLog2);
__ addu(a2, sp, at); __ addu(a2, sp, at);
__ sw(a1, MemOperand(a2, -kPointerSize)); __ sw(a1, MemOperand(a2, -kPointerSize));
// Clear a1 to indicate a non-function being called.
__ mov(a1, zero_reg);
// 4. Shift arguments and return address one slot down on the stack // 4. Shift arguments and return address one slot down on the stack
// (overwriting the original receiver). Adjust argument count to make // (overwriting the original receiver). Adjust argument count to make
// the original first argument the new receiver. // the original first argument the new receiver.
// a0: actual number of arguments // a0: actual number of arguments
// a1: function // a1: function
// t0: call type (0: JS function, 1: function proxy, 2: non-function)
__ bind(&shift_arguments); __ bind(&shift_arguments);
{ Label loop; { Label loop;
// Calculate the copy start address (destination). Copy end address is sp. // Calculate the copy start address (destination). Copy end address is sp.
...@@ -1322,14 +1330,26 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { ...@@ -1322,14 +1330,26 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ Pop(); __ Pop();
} }
// 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin. // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
// or a function proxy via CALL_FUNCTION_PROXY.
// a0: actual number of arguments // a0: actual number of arguments
// a1: function // a1: function
{ Label function; // t0: call type (0: JS function, 1: function proxy, 2: non-function)
__ Branch(&function, ne, a1, Operand(zero_reg)); { Label function, non_proxy;
__ mov(a2, zero_reg); // expected arguments is 0 for CALL_NON_FUNCTION __ Branch(&function, eq, t0, Operand(zero_reg));
__ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION); // Expected number of arguments is 0 for CALL_NON_FUNCTION.
__ mov(a2, zero_reg);
__ SetCallKind(t1, CALL_AS_METHOD); __ SetCallKind(t1, CALL_AS_METHOD);
__ Branch(&non_proxy, ne, t0, Operand(1));
__ push(a1); // Re-add proxy object as additional argument.
__ Addu(a0, a0, Operand(1));
__ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
__ bind(&non_proxy);
__ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION);
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET); RelocInfo::CODE_TARGET);
__ bind(&function); __ bind(&function);
...@@ -1364,8 +1384,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { ...@@ -1364,8 +1384,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
const int kFunctionOffset = 4 * kPointerSize; const int kFunctionOffset = 4 * kPointerSize;
{ {
FrameScope scope(masm, StackFrame::INTERNAL); FrameScope frame_scope(masm, StackFrame::INTERNAL);
__ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function. __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
__ push(a0); __ push(a0);
__ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array. __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array.
...@@ -1373,7 +1392,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { ...@@ -1373,7 +1392,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// Returns (in v0) number of arguments to copy to stack as Smi. // Returns (in v0) number of arguments to copy to stack as Smi.
__ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
// Check the stack for overflow. We are not trying need to catch // Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack // interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked. // limit" is checked.
Label okay; Label okay;
...@@ -1382,8 +1401,8 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { ...@@ -1382,8 +1401,8 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// here which will cause a2 to become negative. // here which will cause a2 to become negative.
__ subu(a2, sp, a2); __ subu(a2, sp, a2);
// Check if the arguments will overflow the stack. // Check if the arguments will overflow the stack.
__ sll(t0, v0, kPointerSizeLog2 - kSmiTagSize); __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize);
__ Branch(&okay, gt, a2, Operand(t0)); // Signed comparison. __ Branch(&okay, gt, a2, Operand(t3)); // Signed comparison.
// Out of stack space. // Out of stack space.
__ lw(a1, MemOperand(fp, kFunctionOffset)); __ lw(a1, MemOperand(fp, kFunctionOffset));
...@@ -1398,29 +1417,35 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { ...@@ -1398,29 +1417,35 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
__ mov(a1, zero_reg); // Initial index. __ mov(a1, zero_reg); // Initial index.
__ push(a1); __ push(a1);
// Get the receiver.
__ lw(a0, MemOperand(fp, kRecvOffset));
// Check that the function is a JS function (otherwise it must be a proxy).
Label push_receiver;
__ lw(a1, MemOperand(fp, kFunctionOffset));
__ GetObjectType(a1, a2, a2);
__ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));
// Change context eagerly to get the right global object if necessary. // Change context eagerly to get the right global object if necessary.
__ lw(a0, MemOperand(fp, kFunctionOffset)); __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
__ lw(cp, FieldMemOperand(a0, JSFunction::kContextOffset)); // Load the shared function info while the function is still in a1.
// Load the shared function info while the function is still in a0. __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(a1, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset));
// Compute the receiver. // Compute the receiver.
Label call_to_object, use_global_receiver, push_receiver;
__ lw(a0, MemOperand(fp, kRecvOffset));
// Do not transform the receiver for strict mode functions. // Do not transform the receiver for strict mode functions.
__ lw(a2, FieldMemOperand(a1, SharedFunctionInfo::kCompilerHintsOffset)); Label call_to_object, use_global_receiver;
__ And(t0, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
__ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
kSmiTagSize))); kSmiTagSize)));
__ Branch(&push_receiver, ne, t0, Operand(zero_reg)); __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
// Do not transform the receiver for native (Compilerhints already in a2). // Do not transform the receiver for native (Compilerhints already in a2).
__ And(t0, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
__ Branch(&push_receiver, ne, t0, Operand(zero_reg)); __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
// Compute the receiver in non-strict mode. // Compute the receiver in non-strict mode.
__ And(t0, a0, Operand(kSmiTagMask)); __ And(t3, a0, Operand(kSmiTagMask));
__ Branch(&call_to_object, eq, t0, Operand(zero_reg)); __ Branch(&call_to_object, eq, t3, Operand(zero_reg));
__ LoadRoot(a1, Heap::kNullValueRootIndex); __ LoadRoot(a1, Heap::kNullValueRootIndex);
__ Branch(&use_global_receiver, eq, a0, Operand(a1)); __ Branch(&use_global_receiver, eq, a0, Operand(a1));
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex); __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
...@@ -1481,17 +1506,37 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { ...@@ -1481,17 +1506,37 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
__ bind(&entry); __ bind(&entry);
__ lw(a1, MemOperand(fp, kLimitOffset)); __ lw(a1, MemOperand(fp, kLimitOffset));
__ Branch(&loop, ne, a0, Operand(a1)); __ Branch(&loop, ne, a0, Operand(a1));
// Invoke the function. // Invoke the function.
Label call_proxy;
ParameterCount actual(a0); ParameterCount actual(a0);
__ sra(a0, a0, kSmiTagSize); __ sra(a0, a0, kSmiTagSize);
__ lw(a1, MemOperand(fp, kFunctionOffset)); __ lw(a1, MemOperand(fp, kFunctionOffset));
__ GetObjectType(a1, a2, a2);
__ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
__ InvokeFunction(a1, actual, CALL_FUNCTION, __ InvokeFunction(a1, actual, CALL_FUNCTION,
NullCallWrapper(), CALL_AS_METHOD); NullCallWrapper(), CALL_AS_METHOD);
frame_scope.GenerateLeaveFrame();
__ Ret(USE_DELAY_SLOT);
__ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
// Invoke the function proxy.
__ bind(&call_proxy);
__ push(a1); // Add function proxy as last argument.
__ Addu(a0, a0, Operand(1));
__ li(a2, Operand(0, RelocInfo::NONE));
__ SetCallKind(t1, CALL_AS_METHOD);
__ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
__ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
// Tear down the internal frame and remove function, receiver and args. // Tear down the internal frame and remove function, receiver and args.
} }
__ Addu(sp, sp, Operand(3 * kPointerSize));
__ Ret(); __ Ret(USE_DELAY_SLOT);
__ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
} }
......
...@@ -4956,7 +4956,7 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) { ...@@ -4956,7 +4956,7 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
void CallFunctionStub::Generate(MacroAssembler* masm) { void CallFunctionStub::Generate(MacroAssembler* masm) {
Label slow; Label slow, non_function;
// The receiver might implicitly be the global object. This is // The receiver might implicitly be the global object. This is
// indicated by passing the hole as the receiver to the call // indicated by passing the hole as the receiver to the call
...@@ -4982,7 +4982,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { ...@@ -4982,7 +4982,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// Check that the function is really a JavaScript function. // Check that the function is really a JavaScript function.
// a1: pushed function (to be verified) // a1: pushed function (to be verified)
__ JumpIfSmi(a1, &slow); __ JumpIfSmi(a1, &non_function);
// Get the map of the function object. // Get the map of the function object.
__ GetObjectType(a1, a2, a2); __ GetObjectType(a1, a2, a2);
__ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE)); __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
...@@ -5010,8 +5010,22 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { ...@@ -5010,8 +5010,22 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// Slow-case: Non-function called. // Slow-case: Non-function called.
__ bind(&slow); __ bind(&slow);
// Check for function proxy.
__ Branch(&non_function, ne, a2, Operand(JS_FUNCTION_PROXY_TYPE));
__ push(a1); // Put proxy as additional argument.
__ li(a0, Operand(argc_ + 1, RelocInfo::NONE));
__ li(a2, Operand(0, RelocInfo::NONE));
__ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
__ SetCallKind(t1, CALL_AS_FUNCTION);
{
Handle<Code> adaptor =
masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
__ Jump(adaptor, RelocInfo::CODE_TARGET);
}
// CALL_NON_FUNCTION expects the non-function callee as receiver (instead // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
// of the original receiver from the call site). // of the original receiver from the call site).
__ bind(&non_function);
__ sw(a1, MemOperand(sp, argc_ * kPointerSize)); __ sw(a1, MemOperand(sp, argc_ * kPointerSize));
__ li(a0, Operand(argc_)); // Setup the number of arguments. __ li(a0, Operand(argc_)); // Setup the number of arguments.
__ mov(a2, zero_reg); __ mov(a2, zero_reg);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment