Commit fecba0dc authored by bjaideep's avatar bjaideep Committed by Commit bot

PPC/s390: [stubs] Also port the CallICStub to CSA.

Port d68dfe86

Original Commit Message:

    Port the Call feedback machinery from the interpreter to the CallICStub
    as second step to unify the feedback collection. This removes a lot of
    hand-written native code, and makes the runtime miss handler obsolete.
    The next step will be to use the CallICStub from the interpreter as
    well.

R=bmeurer@chromium.org, joransiu@ca.ibm.com, jyan@ca.ibm.com, michael_dawson@ca.ibm.com
BUG=v8:5049
LOG=N

Review-Url: https://codereview.chromium.org/2670003003
Cr-Commit-Position: refs/heads/master@{#42896}
parent 5805526d
......@@ -1905,7 +1905,7 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
Handle<Code> code =
CodeFactory::CallICTrampoline(isolate(), mode, expr->tail_call_mode())
.code();
__ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot()));
__ mov(r6, Operand(IntFromSlot(expr->CallFeedbackICSlot())));
__ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
__ mov(r3, Operand(arg_count));
CallIC(code);
......
......@@ -1860,7 +1860,7 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
Handle<Code> code =
CodeFactory::CallICTrampoline(isolate(), mode, expr->tail_call_mode())
.code();
__ LoadSmiLiteral(r5, SmiFromSlot(expr->CallFeedbackICSlot()));
__ Load(r5, Operand(IntFromSlot(expr->CallFeedbackICSlot())));
__ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
__ mov(r2, Operand(arg_count));
CallIC(code);
......
......@@ -1859,190 +1859,6 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
__ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
}
// Note: feedback_vector and slot are clobbered after the call.
static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
Register slot, Register temp) {
const int count_offset = FixedArray::kHeaderSize + kPointerSize;
__ SmiToPtrArrayOffset(temp, slot);
__ add(feedback_vector, feedback_vector, temp);
__ LoadP(slot, FieldMemOperand(feedback_vector, count_offset));
__ AddSmiLiteral(slot, slot, Smi::FromInt(1), temp);
__ StoreP(slot, FieldMemOperand(feedback_vector, count_offset), temp);
}
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// r3 - number of arguments
// r4 - function
// r6 - slot id
// r5 - vector
// r7 - allocation site (loaded from vector[slot])
__ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8);
__ cmp(r4, r8);
__ bne(miss);
// Increment the call count for monomorphic function calls.
IncrementCallCount(masm, r5, r6, r0);
__ mr(r5, r7);
__ mr(r6, r4);
ArrayConstructorStub stub(masm->isolate());
__ TailCallStub(&stub);
}
void CallICStub::Generate(MacroAssembler* masm) {
// r3 - number of arguments
// r4 - function
// r6 - slot id (Smi)
// r5 - vector
Label extra_checks_or_miss, call, call_function, call_count_incremented;
// The checks. First, does r4 match the recorded monomorphic target?
__ SmiToPtrArrayOffset(r9, r6);
__ add(r9, r5, r9);
__ LoadP(r7, FieldMemOperand(r9, FixedArray::kHeaderSize));
// We don't know that we have a weak cell. We might have a private symbol
// or an AllocationSite, but the memory is safe to examine.
// AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
// FixedArray.
// WeakCell::kValueOffset - contains a JSFunction or Smi(0)
// Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
// computed, meaning that it can't appear to be a pointer. If the low bit is
// 0, then hash is computed, but the 0 bit prevents the field from appearing
// to be a pointer.
STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
WeakCell::kValueOffset &&
WeakCell::kValueOffset == Symbol::kHashFieldSlot);
__ LoadP(r8, FieldMemOperand(r7, WeakCell::kValueOffset));
__ cmp(r4, r8);
__ bne(&extra_checks_or_miss);
// The compare above could have been a SMI/SMI comparison. Guard against this
// convincing us that we have a monomorphic JSFunction.
__ JumpIfSmi(r4, &extra_checks_or_miss);
__ bind(&call_function);
// Increment the call count for monomorphic function calls.
IncrementCallCount(masm, r5, r6, r0);
__ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
tail_call_mode()),
RelocInfo::CODE_TARGET);
__ bind(&extra_checks_or_miss);
Label uninitialized, miss, not_allocation_site;
__ CompareRoot(r7, Heap::kmegamorphic_symbolRootIndex);
__ beq(&call);
// Verify that r7 contains an AllocationSite
__ LoadP(r8, FieldMemOperand(r7, HeapObject::kMapOffset));
__ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex);
__ bne(&not_allocation_site);
// We have an allocation site.
HandleArrayCase(masm, &miss);
__ bind(&not_allocation_site);
// The following cases attempt to handle MISS cases without going to the
// runtime.
if (FLAG_trace_ic) {
__ b(&miss);
}
__ CompareRoot(r7, Heap::kuninitialized_symbolRootIndex);
__ beq(&uninitialized);
// We are going megamorphic. If the feedback is a JSFunction, it is fine
// to handle it here. More complex cases are dealt with in the runtime.
__ AssertNotSmi(r7);
__ CompareObjectType(r7, r8, r8, JS_FUNCTION_TYPE);
__ bne(&miss);
__ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex);
__ StoreP(ip, FieldMemOperand(r9, FixedArray::kHeaderSize), r0);
__ bind(&call);
// Increment the call count for megamorphic function calls.
IncrementCallCount(masm, r5, r6, r0);
__ bind(&call_count_incremented);
__ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
RelocInfo::CODE_TARGET);
__ bind(&uninitialized);
// We are going monomorphic, provided we actually have a JSFunction.
__ JumpIfSmi(r4, &miss);
// Goto miss case if we do not have a function.
__ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE);
__ bne(&miss);
// Make sure the function is not the Array() function, which requires special
// behavior on MISS.
__ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r7);
__ cmp(r4, r7);
__ beq(&miss);
// Make sure the function belongs to the same native context.
__ LoadP(r7, FieldMemOperand(r4, JSFunction::kContextOffset));
__ LoadP(r7, ContextMemOperand(r7, Context::NATIVE_CONTEXT_INDEX));
__ LoadP(ip, NativeContextMemOperand());
__ cmp(r7, ip);
__ bne(&miss);
// Store the function. Use a stub since we need a frame for allocation.
// r5 - vector
// r6 - slot
// r4 - function
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
CreateWeakCellStub create_stub(masm->isolate());
__ SmiTag(r3);
__ Push(r3, r5, r6, cp, r4);
__ CallStub(&create_stub);
__ Pop(r5, r6, cp, r4);
__ Pop(r3);
__ SmiUntag(r3);
}
__ b(&call_function);
// We are here because tracing is on or we encountered a MISS case we can't
// handle here.
__ bind(&miss);
GenerateMiss(masm);
__ b(&call_count_incremented);
}
void CallICStub::GenerateMiss(MacroAssembler* masm) {
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
// Preserve the number of arguments as Smi.
__ SmiTag(r3);
// Push the receiver and the function and feedback info.
__ Push(r3, r4, r5, r6);
// Call the entry.
__ CallRuntime(Runtime::kCallIC_Miss);
// Move result to r4 and exit the internal frame.
__ mr(r4, r3);
// Restore number of arguments.
__ Pop(r3);
__ SmiUntag(r3);
}
// StringCharCodeAtGenerator
void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
......
......@@ -221,6 +221,13 @@ void AllocateHeapNumberDescriptor::InitializePlatformSpecific(
SIMD128_TYPES(SIMD128_ALLOC_DESC)
#undef SIMD128_ALLOC_DESC
void ArrayConstructorDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// kTarget, kNewTarget, kActualArgumentsCount, kAllocationSite
Register registers[] = {r4, r6, r3, r5};
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
void ArrayNoArgumentConstructorDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// register state
......
......@@ -1860,188 +1860,6 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
__ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
}
// Note: feedback_vector and slot are clobbered after the call.
static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
Register slot, Register temp) {
const int count_offset = FixedArray::kHeaderSize + kPointerSize;
__ SmiToPtrArrayOffset(temp, slot);
__ AddP(feedback_vector, feedback_vector, temp);
__ LoadP(slot, FieldMemOperand(feedback_vector, count_offset));
__ AddSmiLiteral(slot, slot, Smi::FromInt(1), temp);
__ StoreP(slot, FieldMemOperand(feedback_vector, count_offset), temp);
}
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// r2 - number of arguments
// r3 - function
// r5 - slot id
// r4 - vector
// r6 - allocation site (loaded from vector[slot])
__ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r7);
__ CmpP(r3, r7);
__ bne(miss);
// Increment the call count for monomorphic function calls.
IncrementCallCount(masm, r4, r5, r1);
__ LoadRR(r4, r6);
__ LoadRR(r5, r3);
ArrayConstructorStub stub(masm->isolate());
__ TailCallStub(&stub);
}
void CallICStub::Generate(MacroAssembler* masm) {
// r2 - number of arguments
// r3 - function
// r5 - slot id (Smi)
// r4 - vector
Label extra_checks_or_miss, call, call_function, call_count_incremented;
// The checks. First, does r3 match the recorded monomorphic target?
__ SmiToPtrArrayOffset(r8, r5);
__ AddP(r8, r4, r8);
__ LoadP(r6, FieldMemOperand(r8, FixedArray::kHeaderSize));
// We don't know that we have a weak cell. We might have a private symbol
// or an AllocationSite, but the memory is safe to examine.
// AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
// FixedArray.
// WeakCell::kValueOffset - contains a JSFunction or Smi(0)
// Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
// computed, meaning that it can't appear to be a pointer. If the low bit is
// 0, then hash is computed, but the 0 bit prevents the field from appearing
// to be a pointer.
STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
WeakCell::kValueOffset &&
WeakCell::kValueOffset == Symbol::kHashFieldSlot);
__ LoadP(r7, FieldMemOperand(r6, WeakCell::kValueOffset));
__ CmpP(r3, r7);
__ bne(&extra_checks_or_miss, Label::kNear);
// The compare above could have been a SMI/SMI comparison. Guard against this
// convincing us that we have a monomorphic JSFunction.
__ JumpIfSmi(r3, &extra_checks_or_miss);
__ bind(&call_function);
// Increment the call count for monomorphic function calls.
IncrementCallCount(masm, r4, r5, r1);
__ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
tail_call_mode()),
RelocInfo::CODE_TARGET);
__ bind(&extra_checks_or_miss);
Label uninitialized, miss, not_allocation_site;
__ CompareRoot(r6, Heap::kmegamorphic_symbolRootIndex);
__ beq(&call);
// Verify that r6 contains an AllocationSite
__ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset));
__ CompareRoot(r7, Heap::kAllocationSiteMapRootIndex);
__ bne(&not_allocation_site);
// We have an allocation site.
HandleArrayCase(masm, &miss);
__ bind(&not_allocation_site);
// The following cases attempt to handle MISS cases without going to the
// runtime.
if (FLAG_trace_ic) {
__ b(&miss);
}
__ CompareRoot(r6, Heap::kuninitialized_symbolRootIndex);
__ beq(&uninitialized);
// We are going megamorphic. If the feedback is a JSFunction, it is fine
// to handle it here. More complex cases are dealt with in the runtime.
__ AssertNotSmi(r6);
__ CompareObjectType(r6, r7, r7, JS_FUNCTION_TYPE);
__ bne(&miss);
__ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex);
__ StoreP(ip, FieldMemOperand(r8, FixedArray::kHeaderSize), r0);
__ bind(&call);
// Increment the call count for megamorphic function calls.
IncrementCallCount(masm, r4, r5, r1);
__ bind(&call_count_incremented);
__ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
RelocInfo::CODE_TARGET);
__ bind(&uninitialized);
// We are going monomorphic, provided we actually have a JSFunction.
__ JumpIfSmi(r3, &miss);
// Goto miss case if we do not have a function.
__ CompareObjectType(r3, r6, r6, JS_FUNCTION_TYPE);
__ bne(&miss);
// Make sure the function is not the Array() function, which requires special
// behavior on MISS.
__ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r6);
__ CmpP(r3, r6);
__ beq(&miss);
// Make sure the function belongs to the same native context.
__ LoadP(r6, FieldMemOperand(r3, JSFunction::kContextOffset));
__ LoadP(r6, ContextMemOperand(r6, Context::NATIVE_CONTEXT_INDEX));
__ LoadP(ip, NativeContextMemOperand());
__ CmpP(r6, ip);
__ bne(&miss);
// Store the function. Use a stub since we need a frame for allocation.
// r4 - vector
// r5 - slot
// r3 - function
{
FrameScope scope(masm, StackFrame::INTERNAL);
CreateWeakCellStub create_stub(masm->isolate());
__ SmiTag(r2);
__ Push(r2, r4, r5, cp, r3);
__ CallStub(&create_stub);
__ Pop(r4, r5, cp, r3);
__ Pop(r2);
__ SmiUntag(r2);
}
__ b(&call_function);
// We are here because tracing is on or we encountered a MISS case we can't
// handle here.
__ bind(&miss);
GenerateMiss(masm);
__ b(&call_count_incremented);
}
void CallICStub::GenerateMiss(MacroAssembler* masm) {
FrameScope scope(masm, StackFrame::INTERNAL);
// Preserve the number of arguments as Smi.
__ SmiTag(r2);
// Push the receiver and the function and feedback info.
__ Push(r2, r3, r4, r5);
// Call the entry.
__ CallRuntime(Runtime::kCallIC_Miss);
// Move result to r3 and exit the internal frame.
__ LoadRR(r3, r2);
// Restore number of arguments.
__ Pop(r2);
__ SmiUntag(r2);
}
// StringCharCodeAtGenerator
void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
// If the receiver is a smi trigger the non-string case.
......
......@@ -207,6 +207,13 @@ void AllocateHeapNumberDescriptor::InitializePlatformSpecific(
SIMD128_TYPES(SIMD128_ALLOC_DESC)
#undef SIMD128_ALLOC_DESC
void ArrayConstructorDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// kTarget, kNewTarget, kActualArgumentsCount, kAllocationSite
Register registers[] = {r3, r5, r2, r4};
data->InitializePlatformSpecific(arraysize(registers), registers, NULL);
}
void ArrayNoArgumentConstructorDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// register state
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment