Commit e9f1f4d3 authored by mbrandy's avatar mbrandy Committed by Commit bot

PPC: [ic] Record call counts for monomorphic calls made with an IC.

Port c1a4f747

Original commit message:
The idea is that TurboFan can use this information for more intelligent
inlining.

R=mvstanton@chromium.org, dstence@us.ibm.com, michael_dawson@ca.ibm.com
BUG=

Review URL: https://codereview.chromium.org/1208093002

Cr-Commit-Position: refs/heads/master@{#29303}
parent afb31199
...@@ -2884,15 +2884,21 @@ void CallIC_ArrayStub::Generate(MacroAssembler* masm) { ...@@ -2884,15 +2884,21 @@ void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
__ bne(&miss); __ bne(&miss);
__ mov(r3, Operand(arg_count())); __ mov(r3, Operand(arg_count()));
__ SmiToPtrArrayOffset(r7, r6); __ SmiToPtrArrayOffset(r9, r6);
__ add(r7, r5, r7); __ add(r9, r5, r9);
__ LoadP(r7, FieldMemOperand(r7, FixedArray::kHeaderSize)); __ LoadP(r7, FieldMemOperand(r9, FixedArray::kHeaderSize));
// Verify that r7 contains an AllocationSite // Verify that r7 contains an AllocationSite
__ LoadP(r8, FieldMemOperand(r7, HeapObject::kMapOffset)); __ LoadP(r8, FieldMemOperand(r7, HeapObject::kMapOffset));
__ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex); __ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex);
__ bne(&miss); __ bne(&miss);
// Increment the call count for monomorphic function calls.
const int count_offset = FixedArray::kHeaderSize + kPointerSize;
__ LoadP(r6, FieldMemOperand(r9, count_offset));
__ AddSmiLiteral(r6, r6, Smi::FromInt(CallICNexus::kCallCountIncrement), r0);
__ StoreP(r6, FieldMemOperand(r9, count_offset));
__ mr(r5, r7); __ mr(r5, r7);
__ mr(r6, r4); __ mr(r6, r4);
ArrayConstructorStub stub(masm->isolate(), arg_count()); ArrayConstructorStub stub(masm->isolate(), arg_count());
...@@ -2924,9 +2930,9 @@ void CallICStub::Generate(MacroAssembler* masm) { ...@@ -2924,9 +2930,9 @@ void CallICStub::Generate(MacroAssembler* masm) {
ParameterCount actual(argc); ParameterCount actual(argc);
// The checks. First, does r4 match the recorded monomorphic target? // The checks. First, does r4 match the recorded monomorphic target?
__ SmiToPtrArrayOffset(r7, r6); __ SmiToPtrArrayOffset(r9, r6);
__ add(r7, r5, r7); __ add(r9, r5, r9);
__ LoadP(r7, FieldMemOperand(r7, FixedArray::kHeaderSize)); __ LoadP(r7, FieldMemOperand(r9, FixedArray::kHeaderSize));
// We don't know that we have a weak cell. We might have a private symbol // We don't know that we have a weak cell. We might have a private symbol
// or an AllocationSite, but the memory is safe to examine. // or an AllocationSite, but the memory is safe to examine.
...@@ -2950,6 +2956,12 @@ void CallICStub::Generate(MacroAssembler* masm) { ...@@ -2950,6 +2956,12 @@ void CallICStub::Generate(MacroAssembler* masm) {
// convincing us that we have a monomorphic JSFunction. // convincing us that we have a monomorphic JSFunction.
__ JumpIfSmi(r4, &extra_checks_or_miss); __ JumpIfSmi(r4, &extra_checks_or_miss);
// Increment the call count for monomorphic function calls.
const int count_offset = FixedArray::kHeaderSize + kPointerSize;
__ LoadP(r6, FieldMemOperand(r9, count_offset));
__ AddSmiLiteral(r6, r6, Smi::FromInt(CallICNexus::kCallCountIncrement), r0);
__ StoreP(r6, FieldMemOperand(r9, count_offset));
__ bind(&have_js_function); __ bind(&have_js_function);
if (CallAsMethod()) { if (CallAsMethod()) {
EmitContinueIfStrictOrNative(masm, &cont); EmitContinueIfStrictOrNative(masm, &cont);
...@@ -2993,10 +3005,8 @@ void CallICStub::Generate(MacroAssembler* masm) { ...@@ -2993,10 +3005,8 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ AssertNotSmi(r7); __ AssertNotSmi(r7);
__ CompareObjectType(r7, r8, r8, JS_FUNCTION_TYPE); __ CompareObjectType(r7, r8, r8, JS_FUNCTION_TYPE);
__ bne(&miss); __ bne(&miss);
__ SmiToPtrArrayOffset(r7, r6);
__ add(r7, r5, r7);
__ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex);
__ StoreP(ip, FieldMemOperand(r7, FixedArray::kHeaderSize), r0); __ StoreP(ip, FieldMemOperand(r9, FixedArray::kHeaderSize), r0);
// We have to update statistics for runtime profiling. // We have to update statistics for runtime profiling.
__ LoadP(r7, FieldMemOperand(r5, with_types_offset)); __ LoadP(r7, FieldMemOperand(r5, with_types_offset));
__ SubSmiLiteral(r7, r7, Smi::FromInt(1), r0); __ SubSmiLiteral(r7, r7, Smi::FromInt(1), r0);
...@@ -3026,6 +3036,10 @@ void CallICStub::Generate(MacroAssembler* masm) { ...@@ -3026,6 +3036,10 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0); __ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0);
__ StoreP(r7, FieldMemOperand(r5, with_types_offset), r0); __ StoreP(r7, FieldMemOperand(r5, with_types_offset), r0);
// Initialize the call counter.
__ LoadSmiLiteral(r0, Smi::FromInt(CallICNexus::kCallCountIncrement));
__ StoreP(r0, FieldMemOperand(r9, count_offset));
// Store the function. Use a stub since we need a frame for allocation. // Store the function. Use a stub since we need a frame for allocation.
// r5 - vector // r5 - vector
// r6 - slot // r6 - slot
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment