Commit 30e5a737 authored by plind44@gmail.com's avatar plind44@gmail.com

MIPS: CallICStub with a "never patch" approach by default.

Port r21093 (21e3836)

Original commit message:
Patching will
occur only when custom feedback needs to be gathered (future CLs).

Now rebased on https://codereview.chromium.org/254623002/, which moves the type feedback vector to the SharedFunctionInfo.

BUG=
R=plind44@gmail.com

Review URL: https://codereview.chromium.org/260753004

Patch from Balazs Kilvady <kilvadyb@homejinni.com>.

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@21105 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 56d0b975
...@@ -799,7 +799,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, ...@@ -799,7 +799,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
if (is_construct) { if (is_construct) {
// No type feedback cell is available // No type feedback cell is available
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex); __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
CallConstructStub stub(masm->isolate(), NO_CALL_FUNCTION_FLAGS); CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
__ CallStub(&stub); __ CallStub(&stub);
} else { } else {
ParameterCount actual(a0); ParameterCount actual(a0);
......
...@@ -3017,11 +3017,61 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { ...@@ -3017,11 +3017,61 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
} }
static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
__ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset));
// Do not transform the receiver for strict mode functions.
int32_t strict_mode_function_mask =
1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize);
// Do not transform the receiver for native (Compilerhints already in a3).
int32_t native_mask = 1 << (SharedFunctionInfo::kNative + kSmiTagSize);
__ And(at, t0, Operand(strict_mode_function_mask | native_mask));
__ Branch(cont, ne, at, Operand(zero_reg));
}
static void EmitSlowCase(MacroAssembler* masm,
int argc,
Label* non_function) {
// Check for function proxy.
__ Branch(non_function, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE));
__ push(a1); // put proxy as additional argument
__ li(a0, Operand(argc + 1, RelocInfo::NONE32));
__ mov(a2, zero_reg);
__ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
{
Handle<Code> adaptor =
masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
__ Jump(adaptor, RelocInfo::CODE_TARGET);
}
// CALL_NON_FUNCTION expects the non-function callee as receiver (instead
// of the original receiver from the call site).
__ bind(non_function);
__ sw(a1, MemOperand(sp, argc * kPointerSize));
__ li(a0, Operand(argc)); // Set up the number of arguments.
__ mov(a2, zero_reg);
__ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
}
static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
// Wrap the receiver and patch it back onto the stack.
{ FrameScope frame_scope(masm, StackFrame::INTERNAL);
__ Push(a1, a3);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ pop(a1);
}
__ Branch(USE_DELAY_SLOT, cont);
__ sw(v0, MemOperand(sp, argc * kPointerSize));
}
void CallFunctionStub::Generate(MacroAssembler* masm) { void CallFunctionStub::Generate(MacroAssembler* masm) {
// a1 : the function to call // a1 : the function to call
// a2 : feedback vector
// a3 : (only if a2 is not the megamorphic symbol) slot in feedback
// vector (Smi)
Label slow, non_function, wrap, cont; Label slow, non_function, wrap, cont;
if (NeedsChecks()) { if (NeedsChecks()) {
...@@ -3032,34 +3082,20 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { ...@@ -3032,34 +3082,20 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// Goto slow case if we do not have a function. // Goto slow case if we do not have a function.
__ GetObjectType(a1, t0, t0); __ GetObjectType(a1, t0, t0);
__ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE));
if (RecordCallTarget()) {
GenerateRecordCallTarget(masm);
// Type information was updated. Because we may call Array, which
// expects either undefined or an AllocationSite in a2 we need
// to set a2 to undefined.
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
}
} }
// Fast-case: Invoke the function now. // Fast-case: Invoke the function now.
// a1: pushed function // a1: pushed function
ParameterCount actual(argc_); int argc = argc_;
ParameterCount actual(argc);
if (CallAsMethod()) { if (CallAsMethod()) {
if (NeedsChecks()) { if (NeedsChecks()) {
// Do not transform the receiver for strict mode functions and natives. EmitContinueIfStrictOrNative(masm, &cont);
__ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset));
int32_t strict_mode_function_mask =
1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize);
int32_t native_mask = 1 << (SharedFunctionInfo::kNative + kSmiTagSize);
__ And(at, t0, Operand(strict_mode_function_mask | native_mask));
__ Branch(&cont, ne, at, Operand(zero_reg));
} }
// Compute the receiver in sloppy mode. // Compute the receiver in sloppy mode.
__ lw(a3, MemOperand(sp, argc_ * kPointerSize)); __ lw(a3, MemOperand(sp, argc * kPointerSize));
if (NeedsChecks()) { if (NeedsChecks()) {
__ JumpIfSmi(a3, &wrap); __ JumpIfSmi(a3, &wrap);
...@@ -3071,56 +3107,19 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { ...@@ -3071,56 +3107,19 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ bind(&cont); __ bind(&cont);
} }
__ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper());
if (NeedsChecks()) { if (NeedsChecks()) {
// Slow-case: Non-function called. // Slow-case: Non-function called.
__ bind(&slow); __ bind(&slow);
if (RecordCallTarget()) { EmitSlowCase(masm, argc, &non_function);
// If there is a call target cache, mark it megamorphic in the
// non-function case. MegamorphicSentinel is an immortal immovable
// object (megamorphic symbol) so no write barrier is needed.
ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(isolate()),
isolate()->heap()->megamorphic_symbol());
__ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize);
__ Addu(t1, a2, Operand(t1));
__ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
__ sw(at, FieldMemOperand(t1, FixedArray::kHeaderSize));
}
// Check for function proxy.
__ Branch(&non_function, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE));
__ push(a1); // Put proxy as additional argument.
__ li(a0, Operand(argc_ + 1, RelocInfo::NONE32));
__ li(a2, Operand(0, RelocInfo::NONE32));
__ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
{
Handle<Code> adaptor =
isolate()->builtins()->ArgumentsAdaptorTrampoline();
__ Jump(adaptor, RelocInfo::CODE_TARGET);
}
// CALL_NON_FUNCTION expects the non-function callee as receiver (instead
// of the original receiver from the call site).
__ bind(&non_function);
__ sw(a1, MemOperand(sp, argc_ * kPointerSize));
__ li(a0, Operand(argc_)); // Set up the number of arguments.
__ li(a2, Operand(0, RelocInfo::NONE32));
__ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
__ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
} }
if (CallAsMethod()) { if (CallAsMethod()) {
__ bind(&wrap); __ bind(&wrap);
// Wrap the receiver and patch it back onto the stack. // Wrap the receiver and patch it back onto the stack.
{ FrameScope frame_scope(masm, StackFrame::INTERNAL); EmitWrapCase(masm, argc, &cont);
__ Push(a1, a3);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ pop(a1);
}
__ mov(a0, v0);
__ sw(a0, MemOperand(sp, argc_ * kPointerSize));
__ jmp(&cont);
} }
} }
...@@ -3189,6 +3188,110 @@ void CallConstructStub::Generate(MacroAssembler* masm) { ...@@ -3189,6 +3188,110 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
} }
static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
__ lw(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ lw(vector, FieldMemOperand(vector,
JSFunction::kSharedFunctionInfoOffset));
__ lw(vector, FieldMemOperand(vector,
SharedFunctionInfo::kFeedbackVectorOffset));
}
void CallICStub::Generate(MacroAssembler* masm) {
// r1 - function
// r3 - slot id (Smi)
Label extra_checks_or_miss, slow_start;
Label slow, non_function, wrap, cont;
Label have_js_function;
int argc = state_.arg_count();
ParameterCount actual(argc);
EmitLoadTypeFeedbackVector(masm, a2);
// The checks. First, does r1 match the recorded monomorphic target?
__ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
__ Addu(t0, a2, Operand(t0));
__ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize));
__ Branch(&extra_checks_or_miss, ne, a1, Operand(t0));
__ bind(&have_js_function);
if (state_.CallAsMethod()) {
EmitContinueIfStrictOrNative(masm, &cont);
// Compute the receiver in sloppy mode.
__ lw(a3, MemOperand(sp, argc * kPointerSize));
__ JumpIfSmi(a3, &wrap);
__ GetObjectType(a3, t0, t0);
__ Branch(&wrap, lt, t0, Operand(FIRST_SPEC_OBJECT_TYPE));
__ bind(&cont);
}
__ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper());
__ bind(&slow);
EmitSlowCase(masm, argc, &non_function);
if (state_.CallAsMethod()) {
__ bind(&wrap);
EmitWrapCase(masm, argc, &cont);
}
__ bind(&extra_checks_or_miss);
Label miss;
__ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
__ Branch(&slow_start, eq, t0, Operand(at));
__ LoadRoot(at, Heap::kUninitializedSymbolRootIndex);
__ Branch(&miss, eq, t0, Operand(at));
if (!FLAG_trace_ic) {
// We are going megamorphic, and we don't want to visit the runtime.
__ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
__ Addu(t0, a2, Operand(t0));
__ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
__ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize));
__ Branch(&slow_start);
}
// We are here because tracing is on or we are going monomorphic.
__ bind(&miss);
GenerateMiss(masm);
// the slow case
__ bind(&slow_start);
// Check that the function is really a JavaScript function.
// r1: pushed function (to be verified)
__ JumpIfSmi(a1, &non_function);
// Goto slow case if we do not have a function.
__ GetObjectType(a1, t0, t0);
__ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE));
__ Branch(&have_js_function);
}
void CallICStub::GenerateMiss(MacroAssembler* masm) {
// Get the receiver of the function from the stack; 1 ~ return address.
__ lw(t0, MemOperand(sp, (state_.arg_count() + 1) * kPointerSize));
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Push the receiver and the function and feedback info.
__ Push(t0, a1, a2, a3);
// Call the entry.
ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss),
masm->isolate());
__ CallExternalReference(miss, 4);
// Move result to a1 and exit the internal frame.
__ mov(a1, v0);
}
}
// StringCharCodeAtGenerator. // StringCharCodeAtGenerator.
void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
Label flat_string; Label flat_string;
......
...@@ -163,6 +163,16 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, ...@@ -163,6 +163,16 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
} }
void Debug::GenerateCallICStubDebugBreak(MacroAssembler* masm) {
// Register state for CallICStub
// ----------- S t a t e -------------
// -- a1 : function
// -- a3 : slot in feedback array (smi)
// -----------------------------------
Generate_DebugBreakCallHelper(masm, a1.bit() | a3.bit(), 0);
}
void Debug::GenerateLoadICDebugBreak(MacroAssembler* masm) { void Debug::GenerateLoadICDebugBreak(MacroAssembler* masm) {
// Calling convention for IC load (from ic-mips.cc). // Calling convention for IC load (from ic-mips.cc).
// ----------- S t a t e ------------- // ----------- S t a t e -------------
...@@ -219,15 +229,6 @@ void Debug::GenerateCompareNilICDebugBreak(MacroAssembler* masm) { ...@@ -219,15 +229,6 @@ void Debug::GenerateCompareNilICDebugBreak(MacroAssembler* masm) {
} }
void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) {
// Calling convention for IC call (from ic-mips.cc).
// ----------- S t a t e -------------
// -- a2: name
// -----------------------------------
Generate_DebugBreakCallHelper(masm, a2.bit(), 0);
}
void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) { void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
// In places other than IC call sites it is expected that v0 is TOS which // In places other than IC call sites it is expected that v0 is TOS which
// is an object - this is not generally the case so this should be used with // is an object - this is not generally the case so this should be used with
...@@ -245,17 +246,6 @@ void Debug::GenerateCallFunctionStubDebugBreak(MacroAssembler* masm) { ...@@ -245,17 +246,6 @@ void Debug::GenerateCallFunctionStubDebugBreak(MacroAssembler* masm) {
} }
void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) {
// Register state for CallFunctionStub (from code-stubs-mips.cc).
// ----------- S t a t e -------------
// -- a1 : function
// -- a2 : feedback array
// -- a3 : slot in feedback array
// -----------------------------------
Generate_DebugBreakCallHelper(masm, a1.bit() | a2.bit() | a3.bit(), 0);
}
void Debug::GenerateCallConstructStubDebugBreak(MacroAssembler* masm) { void Debug::GenerateCallConstructStubDebugBreak(MacroAssembler* masm) {
// Calling convention for CallConstructStub (from code-stubs-mips.cc). // Calling convention for CallConstructStub (from code-stubs-mips.cc).
// ----------- S t a t e ------------- // ----------- S t a t e -------------
......
...@@ -2624,14 +2624,15 @@ void FullCodeGenerator::CallIC(Handle<Code> code, ...@@ -2624,14 +2624,15 @@ void FullCodeGenerator::CallIC(Handle<Code> code,
// Code common for calls using the IC. // Code common for calls using the IC.
void FullCodeGenerator::EmitCallWithIC(Call* expr) { void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
Expression* callee = expr->expression(); Expression* callee = expr->expression();
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
CallFunctionFlags flags; CallIC::CallType call_type = callee->IsVariableProxy()
? CallIC::FUNCTION
: CallIC::METHOD;
// Get the target function. // Get the target function.
if (callee->IsVariableProxy()) { if (call_type == CallIC::FUNCTION) {
{ StackValueContext context(this); { StackValueContext context(this);
EmitVariableLoad(callee->AsVariableProxy()); EmitVariableLoad(callee->AsVariableProxy());
PrepareForBailout(callee, NO_REGISTERS); PrepareForBailout(callee, NO_REGISTERS);
...@@ -2639,7 +2640,6 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) { ...@@ -2639,7 +2640,6 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) {
// Push undefined as receiver. This is patched in the method prologue if it // Push undefined as receiver. This is patched in the method prologue if it
// is a sloppy mode method. // is a sloppy mode method.
__ Push(isolate()->factory()->undefined_value()); __ Push(isolate()->factory()->undefined_value());
flags = NO_CALL_FUNCTION_FLAGS;
} else { } else {
// Load the function from the receiver. // Load the function from the receiver.
ASSERT(callee->IsProperty()); ASSERT(callee->IsProperty());
...@@ -2650,39 +2650,19 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) { ...@@ -2650,39 +2650,19 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) {
__ lw(at, MemOperand(sp, 0)); __ lw(at, MemOperand(sp, 0));
__ push(at); __ push(at);
__ sw(v0, MemOperand(sp, kPointerSize)); __ sw(v0, MemOperand(sp, kPointerSize));
flags = CALL_AS_METHOD;
} }
// Load the arguments. EmitCall(expr, call_type);
{ PreservePositionScope scope(masm()->positions_recorder());
for (int i = 0; i < arg_count; i++) {
VisitForStackValue(args->at(i));
}
}
// Record source position for debugger.
SetSourcePosition(expr->position());
CallFunctionStub stub(isolate(), arg_count, flags);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, v0);
} }
// Code common for calls using the IC. // Code common for calls using the IC.
void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
Expression* key) { Expression* key) {
// Load the key. // Load the key.
VisitForAccumulatorValue(key); VisitForAccumulatorValue(key);
Expression* callee = expr->expression(); Expression* callee = expr->expression();
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
// Load the function from the receiver. // Load the function from the receiver.
ASSERT(callee->IsProperty()); ASSERT(callee->IsProperty());
...@@ -2695,28 +2675,12 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, ...@@ -2695,28 +2675,12 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
__ push(at); __ push(at);
__ sw(v0, MemOperand(sp, kPointerSize)); __ sw(v0, MemOperand(sp, kPointerSize));
{ PreservePositionScope scope(masm()->positions_recorder()); EmitCall(expr, CallIC::METHOD);
for (int i = 0; i < arg_count; i++) {
VisitForStackValue(args->at(i));
}
}
// Record source position for debugger.
SetSourcePosition(expr->position());
CallFunctionStub stub(isolate(), arg_count, CALL_AS_METHOD);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, v0);
} }
void FullCodeGenerator::EmitCallWithStub(Call* expr) { void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
// Code common for calls using the call stub. // Load the arguments.
ZoneList<Expression*>* args = expr->arguments(); ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length(); int arg_count = args->length();
{ PreservePositionScope scope(masm()->positions_recorder()); { PreservePositionScope scope(masm()->positions_recorder());
...@@ -2724,16 +2688,17 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) { ...@@ -2724,16 +2688,17 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
VisitForStackValue(args->at(i)); VisitForStackValue(args->at(i));
} }
} }
// Record source position for debugger.
SetSourcePosition(expr->position());
__ li(a2, FeedbackVector()); // Record source position of the IC call.
SetSourcePosition(expr->position());
Handle<Code> ic = CallIC::initialize_stub(
isolate(), arg_count, call_type);
__ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); __ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
// Record call targets in unoptimized code.
CallFunctionStub stub(isolate(), arg_count, RECORD_CALL_TARGET);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub); // Don't assign a type feedback id to the IC, since type feedback is provided
// by the vector above.
CallIC(ic);
RecordJSReturnSite(expr); RecordJSReturnSite(expr);
// Restore context register. // Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
...@@ -2815,7 +2780,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { ...@@ -2815,7 +2780,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, v0); context()->DropAndPlug(1, v0);
} else if (call_type == Call::GLOBAL_CALL) { } else if (call_type == Call::GLOBAL_CALL) {
EmitCallWithIC(expr); EmitCallWithLoadIC(expr);
} else if (call_type == Call::LOOKUP_SLOT_CALL) { } else if (call_type == Call::LOOKUP_SLOT_CALL) {
// Call to a lookup slot (dynamically introduced variable). // Call to a lookup slot (dynamically introduced variable).
VariableProxy* proxy = callee->AsVariableProxy(); VariableProxy* proxy = callee->AsVariableProxy();
...@@ -2854,16 +2819,16 @@ void FullCodeGenerator::VisitCall(Call* expr) { ...@@ -2854,16 +2819,16 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// The receiver is either the global receiver or an object found // The receiver is either the global receiver or an object found
// by LoadContextSlot. // by LoadContextSlot.
EmitCallWithStub(expr); EmitCall(expr);
} else if (call_type == Call::PROPERTY_CALL) { } else if (call_type == Call::PROPERTY_CALL) {
Property* property = callee->AsProperty(); Property* property = callee->AsProperty();
{ PreservePositionScope scope(masm()->positions_recorder()); { PreservePositionScope scope(masm()->positions_recorder());
VisitForStackValue(property->obj()); VisitForStackValue(property->obj());
} }
if (property->key()->IsPropertyName()) { if (property->key()->IsPropertyName()) {
EmitCallWithIC(expr); EmitCallWithLoadIC(expr);
} else { } else {
EmitKeyedCallWithIC(expr, property->key()); EmitKeyedCallWithLoadIC(expr, property->key());
} }
} else { } else {
ASSERT(call_type == Call::OTHER_CALL); ASSERT(call_type == Call::OTHER_CALL);
...@@ -2874,7 +2839,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { ...@@ -2874,7 +2839,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
__ LoadRoot(a1, Heap::kUndefinedValueRootIndex); __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
__ push(a1); __ push(a1);
// Emit function call. // Emit function call.
EmitCallWithStub(expr); EmitCall(expr);
} }
#ifdef DEBUG #ifdef DEBUG
...@@ -2920,7 +2885,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { ...@@ -2920,7 +2885,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ li(a2, FeedbackVector()); __ li(a2, FeedbackVector());
__ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
CallConstructStub stub(isolate(), RECORD_CALL_TARGET); CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
__ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
PrepareForBailoutForId(expr->ReturnId(), TOS_REG); PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(v0); context()->Plug(v0);
......
...@@ -3981,7 +3981,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) { ...@@ -3981,7 +3981,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
__ li(a0, Operand(instr->arity())); __ li(a0, Operand(instr->arity()));
// No cell in a2 for construct type feedback in optimized code // No cell in a2 for construct type feedback in optimized code
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex); __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS); CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment