Commit 018ecfd1 authored by zhengxing.li's avatar zhengxing.li Committed by Commit bot

X87: Remove CallFunctionStub, always call through the Call builtin (also from CallIC).

  port 44c44521 (r31823).

  original commit message:
  This fixes receiver conversion since the Call builtin does it correctly.

BUG=

Review URL: https://codereview.chromium.org/1416673009

Cr-Commit-Position: refs/heads/master@{#31848}
parent 1db43a84
......@@ -4181,7 +4181,6 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
DCHECK(ToRegister(instr->result()).is(eax));
int arity = instr->arity();
CallFunctionFlags flags = instr->hydrogen()->function_flags();
if (instr->hydrogen()->HasVectorAndSlot()) {
Register slot_register = ToRegister(instr->temp_slot());
Register vector_register = ToRegister(instr->temp_vector());
......@@ -4195,15 +4194,12 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
__ mov(vector_register, vector);
__ mov(slot_register, Immediate(Smi::FromInt(index)));
CallICState::CallType call_type =
(flags & CALL_AS_METHOD) ? CallICState::METHOD : CallICState::FUNCTION;
Handle<Code> ic =
CodeFactory::CallICInOptimizedCode(isolate(), arity, call_type).code();
CodeFactory::CallICInOptimizedCode(isolate(), arity).code();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
} else {
CallFunctionStub stub(isolate(), arity, flags);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
__ Set(eax, arity);
CallCode(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, instr);
}
}
......
......@@ -1729,7 +1729,6 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
StoreArrayLiteralElementStub stub(isolate());
__ CallStub(&stub);
}
PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
}
......@@ -2041,8 +2040,8 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
__ mov(edi, eax);
__ mov(Operand(esp, 2 * kPointerSize), edi);
SetCallPosition(expr, 1);
CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
__ CallStub(&stub);
__ Set(eax, 1);
__ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ Drop(1); // The function is still on the stack; drop it.
......@@ -2711,10 +2710,8 @@ void FullCodeGenerator::CallIC(Handle<Code> code,
void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
Expression* callee = expr->expression();
CallICState::CallType call_type =
callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
// Get the target function.
if (call_type == CallICState::FUNCTION) {
if (callee->IsVariableProxy()) {
{ StackValueContext context(this);
EmitVariableLoad(callee->AsVariableProxy());
PrepareForBailout(callee, NO_REGISTERS);
......@@ -2734,7 +2731,7 @@ void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
__ mov(Operand(esp, kPointerSize), eax);
}
EmitCall(expr, call_type);
EmitCall(expr);
}
......@@ -2771,7 +2768,7 @@ void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
// Stack here:
// - target function
// - this (receiver)
EmitCall(expr, CallICState::METHOD);
EmitCall(expr);
}
......@@ -2794,7 +2791,7 @@ void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
__ push(Operand(esp, 0));
__ mov(Operand(esp, kPointerSize), eax);
EmitCall(expr, CallICState::METHOD);
EmitCall(expr);
}
......@@ -2829,11 +2826,11 @@ void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
// Stack here:
// - target function
// - this (receiver)
EmitCall(expr, CallICState::METHOD);
EmitCall(expr);
}
void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
void FullCodeGenerator::EmitCall(Call* expr) {
// Load the arguments.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
......@@ -2843,7 +2840,7 @@ void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
SetCallPosition(expr, arg_count);
Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count).code();
__ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
__ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
// Don't assign a type feedback id to the IC, since type feedback is provided
......@@ -2947,9 +2944,9 @@ void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
SetCallPosition(expr, arg_count);
CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
__ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
__ Set(eax, arg_count);
__ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
......@@ -4157,9 +4154,9 @@ void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
int arg_count = args->length();
SetCallPosition(expr, arg_count);
CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
__ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
__ Set(eax, arg_count);
__ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
......
......@@ -1736,121 +1736,6 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
}
static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
// ----------- S t a t e -------------
// -- edi : the function to call
// -- edx : the function's shared function info
// -----------------------------------
// Do not transform the receiver for strict mode functions.
__ test_b(FieldOperand(edx, SharedFunctionInfo::kStrictModeByteOffset),
1 << SharedFunctionInfo::kStrictModeBitWithinByte);
__ j(not_equal, cont);
// Do not transform the receiver for natives (shared already in ecx).
__ test_b(FieldOperand(edx, SharedFunctionInfo::kNativeByteOffset),
1 << SharedFunctionInfo::kNativeBitWithinByte);
__ j(not_equal, cont);
}
static void EmitSlowCase(Isolate* isolate, MacroAssembler* masm, int argc) {
__ Set(eax, argc);
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
// Wrap the receiver and patch it back onto the stack.
{ FrameScope frame_scope(masm, StackFrame::INTERNAL);
__ push(edi);
ToObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ pop(edi);
}
__ mov(Operand(esp, (argc + 1) * kPointerSize), eax);
__ jmp(cont);
}
static void EmitClassConstructorCallCheck(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- edi : the function to call
// -- edx : the function's shared function info
// -----------------------------------
// ClassConstructor Check: ES6 section 9.2.1 [[Call]]
Label non_class_constructor;
// Check whether the current function is a classConstructor.
__ test_b(FieldOperand(edx, SharedFunctionInfo::kFunctionKindByteOffset),
SharedFunctionInfo::kClassConstructorBitsWithinByte);
__ j(zero, &non_class_constructor, Label::kNear);
// If we call a classConstructor Function throw a TypeError
// indirectly via the CallFunction builtin.
__ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
__ bind(&non_class_constructor);
}
static void CallFunctionNoFeedback(MacroAssembler* masm,
int argc, bool needs_checks,
bool call_as_method) {
// edi : the function to call
Label slow, wrap, cont;
if (needs_checks) {
// Check that the function really is a JavaScript function.
__ JumpIfSmi(edi, &slow);
// Goto slow case if we do not have a function.
__ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
__ j(not_equal, &slow);
}
__ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
EmitClassConstructorCallCheck(masm);
// Fast-case: Just invoke the function.
ParameterCount actual(argc);
if (call_as_method) {
if (needs_checks) {
EmitContinueIfStrictOrNative(masm, &cont);
}
// Load the receiver from the stack.
__ mov(eax, Operand(esp, (argc + 1) * kPointerSize));
if (needs_checks) {
__ JumpIfSmi(eax, &wrap);
__ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
__ j(below, &wrap);
} else {
__ jmp(&wrap);
}
__ bind(&cont);
}
__ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
if (needs_checks) {
// Slow-case: Non-function called.
__ bind(&slow);
EmitSlowCase(masm->isolate(), masm, argc);
}
if (call_as_method) {
__ bind(&wrap);
EmitWrapCase(masm, argc, &cont);
}
}
void CallFunctionStub::Generate(MacroAssembler* masm) {
CallFunctionNoFeedback(masm, argc(), NeedsChecks(), CallAsMethod());
}
void CallConstructStub::Generate(MacroAssembler* masm) {
// eax : number of arguments
// ebx : feedback vector
......@@ -1943,9 +1828,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex);
const int generic_offset =
FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex);
Label extra_checks_or_miss, slow_start;
Label slow, wrap, cont;
Label have_js_function;
Label extra_checks_or_miss, call;
int argc = arg_count();
ParameterCount actual(argc);
......@@ -1979,40 +1862,15 @@ void CallICStub::Generate(MacroAssembler* masm) {
FixedArray::kHeaderSize + kPointerSize),
Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement)));
__ bind(&have_js_function);
__ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
EmitClassConstructorCallCheck(masm);
if (CallAsMethod()) {
EmitContinueIfStrictOrNative(masm, &cont);
// Load the receiver from the stack.
__ mov(eax, Operand(esp, (argc + 1) * kPointerSize));
__ JumpIfSmi(eax, &wrap);
__ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
__ j(below, &wrap);
__ bind(&cont);
}
__ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
__ bind(&slow);
EmitSlowCase(isolate, masm, argc);
if (CallAsMethod()) {
__ bind(&wrap);
EmitWrapCase(masm, argc, &cont);
}
__ bind(&call);
__ Set(eax, argc);
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
__ bind(&extra_checks_or_miss);
Label uninitialized, miss, not_allocation_site;
__ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
__ j(equal, &slow_start);
__ j(equal, &call);
// Check if we have an allocation site.
__ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
......@@ -2044,7 +1902,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
// We have to update statistics for runtime profiling.
__ sub(FieldOperand(ebx, with_types_offset), Immediate(Smi::FromInt(1)));
__ add(FieldOperand(ebx, generic_offset), Immediate(Smi::FromInt(1)));
__ jmp(&slow_start);
__ jmp(&call);
__ bind(&uninitialized);
......@@ -2081,23 +1939,14 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ pop(edi);
}
__ jmp(&have_js_function);
__ jmp(&call);
// We are here because tracing is on or we encountered a MISS case we can't
// handle here.
__ bind(&miss);
GenerateMiss(masm);
// the slow case
__ bind(&slow_start);
// Check that the function really is a JavaScript function.
__ JumpIfSmi(edi, &slow);
// Goto slow case if we do not have a function.
__ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
__ j(not_equal, &slow);
__ jmp(&have_js_function);
__ jmp(&call);
// Unreachable
__ int3();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment