Commit d2146f74 authored by bjaideep's avatar bjaideep Committed by Commit bot

Revert of "PPC/s390: [TypeFeedbackVector] Root literal arrays in function literals slots"

Reason for revert:
Original CL was reverted, https://codereview.chromium.org/2597163002

Original issue's description:
> PPC/s390: [TypeFeedbackVector] Root literal arrays in function literals slots
>
> Port 93df0940
>
> Original Commit Message:
>
>     Literal arrays and feedback vectors for a function can be garbage
>     collected if we don't have a rooted closure for the function, which
>     happens often. It's expensive to come back from this (recreating
>     boilerplates and gathering feedback again), and the cost is
>     disproportionate if the function was inlined into optimized code.
>
>     To guard against losing these arrays when we need them, we'll now
>     create literal arrays when creating the feedback vector for the outer
>     closure, and root them strongly in that vector.
>
> R=mvstanton@chromium.org, joransiu@ca.ibm.com, jyan@ca.ibm.com, michael_dawson@ca.ibm.com
> BUG=v8:5456
> LOG=N
>
> Review-Url: https://codereview.chromium.org/2592043003
> Cr-Commit-Position: refs/heads/master@{#41898}
> Committed: https://chromium.googlesource.com/v8/v8/+/19aa7a20b0c39ea9ef81d6e021863183732f82c0

R=mvstanton@chromium.org, joransiu@ca.ibm.com, jyan@ca.ibm.com, michael_dawson@ca.ibm.com
BUG=v8:5456
LOG=N

Review-Url: https://codereview.chromium.org/2601793002
Cr-Commit-Position: refs/heads/master@{#41966}
parent 2c3fda91
......@@ -1369,6 +1369,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ CmpSmiLiteral(index, Smi::FromInt(2), r0);
__ blt(&gotta_call_runtime);
// Find literals.
// r10 : native context
// r5 : length / index
// r9 : optimized code map
......@@ -1389,6 +1390,18 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ cmp(temp, native_context);
__ bne(&loop_bottom);
// Literals available?
__ LoadP(temp,
FieldMemOperand(array_pointer,
SharedFunctionInfo::kOffsetToPreviousLiterals));
__ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ JumpIfSmi(temp, &gotta_call_runtime);
// Save the literals in the closure.
__ StoreP(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset), r0);
__ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r7,
kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
// Code available?
Register entry = r7;
......@@ -1398,7 +1411,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
__ JumpIfSmi(entry, &try_shared);
// Found code. Get it into the closure and return.
// Found literals and code. Get them into the closure and return.
// Store code entry in the closure.
__ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
__ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
......@@ -1432,7 +1445,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ CmpSmiLiteral(index, Smi::FromInt(1), r0);
__ bgt(&loop_top);
// We found no code.
// We found neither literals nor code.
__ b(&gotta_call_runtime);
__ bind(&try_shared);
......
......@@ -1374,6 +1374,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ CmpSmiLiteral(index, Smi::FromInt(2), r0);
__ blt(&gotta_call_runtime);
// Find literals.
// r9 : native context
// r4 : length / index
// r8 : optimized code map
......@@ -1394,6 +1395,18 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ CmpP(temp, native_context);
__ bne(&loop_bottom, Label::kNear);
// Literals available?
__ LoadP(temp,
FieldMemOperand(array_pointer,
SharedFunctionInfo::kOffsetToPreviousLiterals));
__ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ JumpIfSmi(temp, &gotta_call_runtime);
// Save the literals in the closure.
__ StoreP(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset), r0);
__ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r6,
kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
// Code available?
Register entry = r6;
......@@ -1403,7 +1416,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
__ JumpIfSmi(entry, &try_shared);
// Found code. Get it into the closure and return.
// Found literals and code. Get them into the closure and return.
// Store code entry in the closure.
__ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
__ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
......@@ -1437,7 +1450,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ CmpSmiLiteral(index, Smi::FromInt(1), r0);
__ bgt(&loop_top);
// We found no code.
// We found neither literals nor code.
__ b(&gotta_call_runtime);
__ bind(&try_shared);
......
......@@ -63,7 +63,7 @@ const Register GrowArrayElementsDescriptor::KeyRegister() { return r6; }
void FastNewClosureDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r4, r5, r6};
Register registers[] = {r5};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
......
......@@ -61,7 +61,7 @@ const Register GrowArrayElementsDescriptor::KeyRegister() { return r5; }
void FastNewClosureDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r3, r4, r5};
Register registers[] = {r4};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment