Commit 7ad54344 authored by bjaideep's avatar bjaideep Committed by Commit bot

PPC/s390: [stubs] Port FastNewObjectStub to TF

Port 5b02a98b

Original Commit Message:

    In the process, convert from a code stub into a builtin.

R=danno@chromium.org, joransiu@ca.ibm.com, jyan@ca.ibm.com, michael_dawson@ca.ibm.com
BUG=
LOG=N

Review-Url: https://codereview.chromium.org/2601243003
Cr-Commit-Position: refs/heads/master@{#41992}
parent 5668ce39
...@@ -338,8 +338,8 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { ...@@ -338,8 +338,8 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ SmiTag(r9); __ SmiTag(r9);
__ EnterBuiltinFrame(cp, r4, r9); __ EnterBuiltinFrame(cp, r4, r9);
__ Push(r5); // first argument __ Push(r5); // first argument
FastNewObjectStub stub(masm->isolate()); __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
__ CallStub(&stub); RelocInfo::CODE_TARGET);
__ Pop(r5); __ Pop(r5);
__ LeaveBuiltinFrame(cp, r4, r9); __ LeaveBuiltinFrame(cp, r4, r9);
__ SmiUntag(r9); __ SmiUntag(r9);
...@@ -490,8 +490,8 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { ...@@ -490,8 +490,8 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
__ SmiTag(r9); __ SmiTag(r9);
__ EnterBuiltinFrame(cp, r4, r9); __ EnterBuiltinFrame(cp, r4, r9);
__ Push(r5); // first argument __ Push(r5); // first argument
FastNewObjectStub stub(masm->isolate()); __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
__ CallStub(&stub); RelocInfo::CODE_TARGET);
__ Pop(r5); __ Pop(r5);
__ LeaveBuiltinFrame(cp, r4, r9); __ LeaveBuiltinFrame(cp, r4, r9);
__ SmiUntag(r9); __ SmiUntag(r9);
...@@ -587,8 +587,8 @@ void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function, ...@@ -587,8 +587,8 @@ void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
// Allocate the new receiver object. // Allocate the new receiver object.
__ Push(r4, r6); __ Push(r4, r6);
FastNewObjectStub stub(masm->isolate()); __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
__ CallStub(&stub); RelocInfo::CODE_TARGET);
__ mr(r7, r3); __ mr(r7, r3);
__ Pop(r4, r6); __ Pop(r4, r6);
......
...@@ -334,11 +334,11 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { ...@@ -334,11 +334,11 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ bind(&new_object); __ bind(&new_object);
{ {
FrameScope scope(masm, StackFrame::MANUAL); FrameScope scope(masm, StackFrame::MANUAL);
FastNewObjectStub stub(masm->isolate());
__ SmiTag(r8); __ SmiTag(r8);
__ EnterBuiltinFrame(cp, r3, r8); __ EnterBuiltinFrame(cp, r3, r8);
__ Push(r4); // first argument __ Push(r4); // first argument
__ CallStub(&stub); __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ Pop(r4); __ Pop(r4);
__ LeaveBuiltinFrame(cp, r3, r8); __ LeaveBuiltinFrame(cp, r3, r8);
__ SmiUntag(r8); __ SmiUntag(r8);
...@@ -484,11 +484,11 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { ...@@ -484,11 +484,11 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
__ bind(&new_object); __ bind(&new_object);
{ {
FrameScope scope(masm, StackFrame::MANUAL); FrameScope scope(masm, StackFrame::MANUAL);
FastNewObjectStub stub(masm->isolate());
__ SmiTag(r8); __ SmiTag(r8);
__ EnterBuiltinFrame(cp, r3, r8); __ EnterBuiltinFrame(cp, r3, r8);
__ Push(r4); // first argument __ Push(r4); // first argument
__ CallStub(&stub); __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
RelocInfo::CODE_TARGET);
__ Pop(r4); __ Pop(r4);
__ LeaveBuiltinFrame(cp, r3, r8); __ LeaveBuiltinFrame(cp, r3, r8);
__ SmiUntag(r8); __ SmiUntag(r8);
...@@ -584,8 +584,8 @@ void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function, ...@@ -584,8 +584,8 @@ void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
// Allocate the new receiver object. // Allocate the new receiver object.
__ Push(r3, r5); __ Push(r3, r5);
FastNewObjectStub stub(masm->isolate()); __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
__ CallStub(&stub); RelocInfo::CODE_TARGET);
__ LoadRR(r6, r2); __ LoadRR(r6, r2);
__ Pop(r3, r5); __ Pop(r3, r5);
......
...@@ -3522,122 +3522,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { ...@@ -3522,122 +3522,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS); GenerateCase(masm, FAST_ELEMENTS);
} }
void FastNewObjectStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r4 : target
// -- r6 : new target
// -- cp : context
// -- lr : return address
// -----------------------------------
__ AssertFunction(r4);
__ AssertReceiver(r6);
// Verify that the new target is a JSFunction.
Label new_object;
__ CompareObjectType(r6, r5, r5, JS_FUNCTION_TYPE);
__ bne(&new_object);
// Load the initial map and verify that it's in fact a map.
__ LoadP(r5, FieldMemOperand(r6, JSFunction::kPrototypeOrInitialMapOffset));
__ JumpIfSmi(r5, &new_object);
__ CompareObjectType(r5, r3, r3, MAP_TYPE);
__ bne(&new_object);
// Fall back to runtime if the target differs from the new target's
// initial map constructor.
__ LoadP(r3, FieldMemOperand(r5, Map::kConstructorOrBackPointerOffset));
__ cmp(r3, r4);
__ bne(&new_object);
// Allocate the JSObject on the heap.
Label allocate, done_allocate;
__ lbz(r7, FieldMemOperand(r5, Map::kInstanceSizeOffset));
__ Allocate(r7, r3, r8, r9, &allocate, SIZE_IN_WORDS);
__ bind(&done_allocate);
// Initialize the JSObject fields.
__ StoreP(r5, FieldMemOperand(r3, JSObject::kMapOffset), r0);
__ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
__ StoreP(r6, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
__ StoreP(r6, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
__ addi(r4, r3, Operand(JSObject::kHeaderSize - kHeapObjectTag));
// ----------- S t a t e -------------
// -- r3 : result (tagged)
// -- r4 : result fields (untagged)
// -- r8 : result end (untagged)
// -- r5 : initial map
// -- cp : context
// -- lr : return address
// -----------------------------------
// Perform in-object slack tracking if requested.
Label slack_tracking;
STATIC_ASSERT(Map::kNoSlackTracking == 0);
__ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
__ lwz(r6, FieldMemOperand(r5, Map::kBitField3Offset));
__ DecodeField<Map::ConstructionCounter>(r10, r6, SetRC);
__ bne(&slack_tracking, cr0);
{
// Initialize all in-object fields with undefined.
__ InitializeFieldsWithFiller(r4, r8, r9);
__ Ret();
}
__ bind(&slack_tracking);
{
// Decrease generous allocation count.
STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
__ Add(r6, r6, -(1 << Map::ConstructionCounter::kShift), r0);
__ stw(r6, FieldMemOperand(r5, Map::kBitField3Offset));
// Initialize the in-object fields with undefined.
__ lbz(r7, FieldMemOperand(r5, Map::kUnusedPropertyFieldsOffset));
__ ShiftLeftImm(r7, r7, Operand(kPointerSizeLog2));
__ sub(r7, r8, r7);
__ InitializeFieldsWithFiller(r4, r7, r9);
// Initialize the remaining (reserved) fields with one pointer filler map.
__ LoadRoot(r9, Heap::kOnePointerFillerMapRootIndex);
__ InitializeFieldsWithFiller(r4, r8, r9);
// Check if we can finalize the instance size.
__ cmpi(r10, Operand(Map::kSlackTrackingCounterEnd));
__ Ret(ne);
// Finalize the instance size.
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ Push(r3, r5);
__ CallRuntime(Runtime::kFinalizeInstanceSize);
__ Pop(r3);
}
__ Ret();
}
// Fall back to %AllocateInNewSpace.
__ bind(&allocate);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
STATIC_ASSERT(kSmiTag == 0);
__ ShiftLeftImm(r7, r7,
Operand(kPointerSizeLog2 + kSmiTagSize + kSmiShiftSize));
__ Push(r5, r7);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(r5);
}
__ lbz(r8, FieldMemOperand(r5, Map::kInstanceSizeOffset));
__ ShiftLeftImm(r8, r8, Operand(kPointerSizeLog2));
__ add(r8, r3, r8);
__ subi(r8, r8, Operand(kHeapObjectTag));
__ b(&done_allocate);
// Fall back to %NewObject.
__ bind(&new_object);
__ Push(r4, r6);
__ TailCallRuntime(Runtime::kNewObject);
}
void FastNewRestParameterStub::Generate(MacroAssembler* masm) { void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- r4 : function // -- r4 : function
......
...@@ -67,12 +67,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific( ...@@ -67,12 +67,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers); data->InitializePlatformSpecific(arraysize(registers), registers);
} }
void FastNewObjectDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r4, r6};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific( void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
Register registers[] = {r4}; Register registers[] = {r4};
......
...@@ -3471,124 +3471,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { ...@@ -3471,124 +3471,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS); GenerateCase(masm, FAST_ELEMENTS);
} }
void FastNewObjectStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r3 : target
// -- r5 : new target
// -- cp : context
// -- lr : return address
// -----------------------------------
__ AssertFunction(r3);
__ AssertReceiver(r5);
// Verify that the new target is a JSFunction.
Label new_object;
__ CompareObjectType(r5, r4, r4, JS_FUNCTION_TYPE);
__ bne(&new_object);
// Load the initial map and verify that it's in fact a map.
__ LoadP(r4, FieldMemOperand(r5, JSFunction::kPrototypeOrInitialMapOffset));
__ JumpIfSmi(r4, &new_object);
__ CompareObjectType(r4, r2, r2, MAP_TYPE);
__ bne(&new_object);
// Fall back to runtime if the target differs from the new target's
// initial map constructor.
__ LoadP(r2, FieldMemOperand(r4, Map::kConstructorOrBackPointerOffset));
__ CmpP(r2, r3);
__ bne(&new_object);
// Allocate the JSObject on the heap.
Label allocate, done_allocate;
__ LoadlB(r6, FieldMemOperand(r4, Map::kInstanceSizeOffset));
__ Allocate(r6, r2, r7, r8, &allocate, SIZE_IN_WORDS);
__ bind(&done_allocate);
// Initialize the JSObject fields.
__ StoreP(r4, FieldMemOperand(r2, JSObject::kMapOffset));
__ LoadRoot(r5, Heap::kEmptyFixedArrayRootIndex);
__ StoreP(r5, FieldMemOperand(r2, JSObject::kPropertiesOffset));
__ StoreP(r5, FieldMemOperand(r2, JSObject::kElementsOffset));
STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
__ AddP(r3, r2, Operand(JSObject::kHeaderSize - kHeapObjectTag));
// ----------- S t a t e -------------
// -- r2 : result (tagged)
// -- r3 : result fields (untagged)
// -- r7 : result end (untagged)
// -- r4 : initial map
// -- cp : context
// -- lr : return address
// -----------------------------------
// Perform in-object slack tracking if requested.
Label slack_tracking;
STATIC_ASSERT(Map::kNoSlackTracking == 0);
__ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
__ LoadlW(r5, FieldMemOperand(r4, Map::kBitField3Offset));
__ DecodeField<Map::ConstructionCounter>(r9, r5);
__ LoadAndTestP(r9, r9);
__ bne(&slack_tracking);
{
// Initialize all in-object fields with undefined.
__ InitializeFieldsWithFiller(r3, r7, r8);
__ Ret();
}
__ bind(&slack_tracking);
{
// Decrease generous allocation count.
STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
__ Add32(r5, r5, Operand(-(1 << Map::ConstructionCounter::kShift)));
__ StoreW(r5, FieldMemOperand(r4, Map::kBitField3Offset));
// Initialize the in-object fields with undefined.
__ LoadlB(r6, FieldMemOperand(r4, Map::kUnusedPropertyFieldsOffset));
__ ShiftLeftP(r6, r6, Operand(kPointerSizeLog2));
__ SubP(r6, r7, r6);
__ InitializeFieldsWithFiller(r3, r6, r8);
// Initialize the remaining (reserved) fields with one pointer filler map.
__ LoadRoot(r8, Heap::kOnePointerFillerMapRootIndex);
__ InitializeFieldsWithFiller(r3, r7, r8);
// Check if we can finalize the instance size.
__ CmpP(r9, Operand(Map::kSlackTrackingCounterEnd));
__ Ret(ne);
// Finalize the instance size.
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ Push(r2, r4);
__ CallRuntime(Runtime::kFinalizeInstanceSize);
__ Pop(r2);
}
__ Ret();
}
// Fall back to %AllocateInNewSpace.
__ bind(&allocate);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
STATIC_ASSERT(kSmiTag == 0);
__ ShiftLeftP(r6, r6,
Operand(kPointerSizeLog2 + kSmiTagSize + kSmiShiftSize));
__ Push(r4, r6);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(r4);
}
__ LoadlB(r7, FieldMemOperand(r4, Map::kInstanceSizeOffset));
__ ShiftLeftP(r7, r7, Operand(kPointerSizeLog2));
__ AddP(r7, r2, r7);
__ SubP(r7, r7, Operand(kHeapObjectTag));
__ b(&done_allocate);
// Fall back to %NewObject.
__ bind(&new_object);
__ Push(r3, r5);
__ TailCallRuntime(Runtime::kNewObject);
}
void FastNewRestParameterStub::Generate(MacroAssembler* masm) { void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- r3 : function // -- r3 : function
......
...@@ -65,12 +65,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific( ...@@ -65,12 +65,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers); data->InitializePlatformSpecific(arraysize(registers), registers);
} }
void FastNewObjectDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r3, r5};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific( void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
Register registers[] = {r3}; Register registers[] = {r3};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment