Commit f6a325fb authored by bjaideep's avatar bjaideep Committed by Commit bot

PPC/s390: [builtins] Port parameter and argument-related code stubs to CSA

Port c205c9b7

Original Commit Message:

    Includes the port of these three builtins: FastNewStrictArguments,
    FastNewSloppyArguments and FastNewRestParameter. Also inline
    the implementation of these into the corresponding interpreter
    byte codes.

R=danno@chromium.org, joransiu@ca.ibm.com, jyan@ca.ibm.com, michael_dawson@ca.ibm.com
BUG=v8:5269
LOG=N

Review-Url: https://codereview.chromium.org/2686623002
Cr-Commit-Position: refs/heads/master@{#43007}
parent d0bccc9c
......@@ -278,14 +278,16 @@ void FullCodeGenerator::Generate() {
__ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
}
if (is_strict(language_mode()) || !has_simple_parameters()) {
FastNewStrictArgumentsStub stub(isolate());
__ CallStub(&stub);
Callable callable = CodeFactory::FastNewStrictArguments(isolate());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
} else if (literal()->has_duplicate_parameters()) {
__ Push(r4);
__ CallRuntime(Runtime::kNewSloppyArguments_Generic);
} else {
FastNewSloppyArgumentsStub stub(isolate());
__ CallStub(&stub);
Callable callable = CodeFactory::FastNewSloppyArguments(isolate());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
SetVar(arguments, r3, r4, r5);
......
......@@ -282,14 +282,16 @@ void FullCodeGenerator::Generate() {
__ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
}
if (is_strict(language_mode()) || !has_simple_parameters()) {
FastNewStrictArgumentsStub stub(isolate());
__ CallStub(&stub);
Callable callable = CodeFactory::FastNewStrictArguments(isolate());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
} else if (literal()->has_duplicate_parameters()) {
__ Push(r3);
__ CallRuntime(Runtime::kNewSloppyArguments_Generic);
} else {
FastNewSloppyArgumentsStub stub(isolate());
__ CallStub(&stub);
Callable callable = CodeFactory::FastNewSloppyArguments(isolate());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
SetVar(arguments, r2, r3, r4);
......
......@@ -3264,549 +3264,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS);
}
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r4 : function
// -- cp : context
// -- fp : frame pointer
// -- lr : return address
// -----------------------------------
__ AssertFunction(r4);
// Make r5 point to the JavaScript frame.
__ mr(r5, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ LoadP(ip, MemOperand(r5, StandardFrameConstants::kFunctionOffset));
__ cmp(ip, r4);
__ beq(&ok);
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// Check if we have rest parameters (only possible if we have an
// arguments adaptor frame below the function frame).
Label no_rest_parameters;
__ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset));
__ LoadP(ip, MemOperand(r5, CommonFrameConstants::kContextOrFrameTypeOffset));
__ CmpSmiLiteral(ip, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
__ bne(&no_rest_parameters);
// Check if the arguments adaptor frame contains more arguments than
// specified by the function's internal formal parameter count.
Label rest_parameters;
__ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
__ LoadWordArith(
r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
#if V8_TARGET_ARCH_PPC64
__ SmiTag(r6);
#endif
__ sub(r3, r3, r6, LeaveOE, SetRC);
__ bgt(&rest_parameters, cr0);
// Return an empty rest parameter array.
__ bind(&no_rest_parameters);
{
// ----------- S t a t e -------------
// -- cp : context
// -- lr : return address
// -----------------------------------
// Allocate an empty rest parameter array.
Label allocate, done_allocate;
__ Allocate(JSArray::kSize, r3, r4, r5, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the rest parameter array in r0.
__ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, r4);
__ StoreP(r4, FieldMemOperand(r3, JSArray::kMapOffset), r0);
__ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
__ StoreP(r4, FieldMemOperand(r3, JSArray::kPropertiesOffset), r0);
__ StoreP(r4, FieldMemOperand(r3, JSArray::kElementsOffset), r0);
__ li(r4, Operand::Zero());
__ StoreP(r4, FieldMemOperand(r3, JSArray::kLengthOffset), r0);
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ Ret();
// Fall back to %AllocateInNewSpace.
__ bind(&allocate);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ Push(Smi::FromInt(JSArray::kSize));
__ CallRuntime(Runtime::kAllocateInNewSpace);
}
__ b(&done_allocate);
}
__ bind(&rest_parameters);
{
// Compute the pointer to the first rest parameter (skippping the receiver).
__ SmiToPtrArrayOffset(r9, r3);
__ add(r5, r5, r9);
__ addi(r5, r5, Operand(StandardFrameConstants::kCallerSPOffset));
// ----------- S t a t e -------------
// -- cp : context
// -- r3 : number of rest parameters (tagged)
// -- r4 : function
// -- r5 : pointer just past first rest parameters
// -- r9 : size of rest parameters
// -- lr : return address
// -----------------------------------
// Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate;
__ mov(r10, Operand(JSArray::kSize + FixedArray::kHeaderSize));
__ add(r10, r10, r9);
__ Allocate(r10, r6, r7, r8, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the elements array in r6.
__ LoadRoot(r4, Heap::kFixedArrayMapRootIndex);
__ StoreP(r4, FieldMemOperand(r6, FixedArray::kMapOffset), r0);
__ StoreP(r3, FieldMemOperand(r6, FixedArray::kLengthOffset), r0);
__ addi(r7, r6,
Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
{
Label loop;
__ SmiUntag(r0, r3);
__ mtctr(r0);
__ bind(&loop);
__ LoadPU(ip, MemOperand(r5, -kPointerSize));
__ StorePU(ip, MemOperand(r7, kPointerSize));
__ bdnz(&loop);
__ addi(r7, r7, Operand(kPointerSize));
}
// Setup the rest parameter array in r7.
__ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, r4);
__ StoreP(r4, MemOperand(r7, JSArray::kMapOffset));
__ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
__ StoreP(r4, MemOperand(r7, JSArray::kPropertiesOffset));
__ StoreP(r6, MemOperand(r7, JSArray::kElementsOffset));
__ StoreP(r3, MemOperand(r7, JSArray::kLengthOffset));
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ addi(r3, r7, Operand(kHeapObjectTag));
__ Ret();
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
__ Cmpi(r10, Operand(kMaxRegularHeapObjectSize), r0);
__ bgt(&too_big_for_new_space);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r10);
__ Push(r3, r5, r10);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ mr(r6, r3);
__ Pop(r3, r5);
}
__ b(&done_allocate);
// Fall back to %NewRestParameter.
__ bind(&too_big_for_new_space);
__ push(r4);
__ TailCallRuntime(Runtime::kNewRestParameter);
}
}
void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r4 : function
// -- cp : context
// -- fp : frame pointer
// -- lr : return address
// -----------------------------------
__ AssertFunction(r4);
// Make r10 point to the JavaScript frame.
__ mr(r10, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ LoadP(r10, MemOperand(r10, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ LoadP(ip, MemOperand(r10, StandardFrameConstants::kFunctionOffset));
__ cmp(ip, r4);
__ beq(&ok);
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
__ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
__ LoadWordArith(
r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
#if V8_TARGET_ARCH_PPC64
__ SmiTag(r5);
#endif
__ SmiToPtrArrayOffset(r6, r5);
__ add(r6, r10, r6);
__ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset));
// r4 : function
// r5 : number of parameters (tagged)
// r6 : parameters pointer
// r10 : JavaScript frame pointer
// Registers used over whole function:
// r8 : arguments count (tagged)
// r9 : mapped parameter count (tagged)
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, try_allocate, runtime;
__ LoadP(r7, MemOperand(r10, StandardFrameConstants::kCallerFPOffset));
__ LoadP(r3, MemOperand(r7, CommonFrameConstants::kContextOrFrameTypeOffset));
__ CmpSmiLiteral(r3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
__ beq(&adaptor_frame);
// No adaptor, parameter count = argument count.
__ mr(r8, r5);
__ mr(r9, r5);
__ b(&try_allocate);
// We have an adaptor frame. Patch the parameters pointer.
__ bind(&adaptor_frame);
__ LoadP(r8, MemOperand(r7, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ SmiToPtrArrayOffset(r6, r8);
__ add(r6, r6, r7);
__ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset));
// r8 = argument count (tagged)
// r9 = parameter count (tagged)
// Compute the mapped parameter count = min(r5, r8) in r9.
__ cmp(r5, r8);
if (CpuFeatures::IsSupported(ISELECT)) {
__ isel(lt, r9, r5, r8);
} else {
Label skip;
__ mr(r9, r5);
__ blt(&skip);
__ mr(r9, r8);
__ bind(&skip);
}
__ bind(&try_allocate);
// Compute the sizes of backing store, parameter map, and arguments object.
// 1. Parameter map, has 2 extra words containing context and backing store.
const int kParameterMapHeaderSize =
FixedArray::kHeaderSize + 2 * kPointerSize;
// If there are no mapped parameters, we do not need the parameter_map.
__ CmpSmiLiteral(r9, Smi::kZero, r0);
if (CpuFeatures::IsSupported(ISELECT)) {
__ SmiToPtrArrayOffset(r11, r9);
__ addi(r11, r11, Operand(kParameterMapHeaderSize));
__ isel(eq, r11, r0, r11);
} else {
Label skip2, skip3;
__ bne(&skip2);
__ li(r11, Operand::Zero());
__ b(&skip3);
__ bind(&skip2);
__ SmiToPtrArrayOffset(r11, r9);
__ addi(r11, r11, Operand(kParameterMapHeaderSize));
__ bind(&skip3);
}
// 2. Backing store.
__ SmiToPtrArrayOffset(r7, r8);
__ add(r11, r11, r7);
__ addi(r11, r11, Operand(FixedArray::kHeaderSize));
// 3. Arguments object.
__ addi(r11, r11, Operand(JSSloppyArgumentsObject::kSize));
// Do the allocation of all three objects in one go.
__ Allocate(r11, r3, r11, r7, &runtime, NO_ALLOCATION_FLAGS);
// r3 = address of new object(s) (tagged)
// r5 = argument count (smi-tagged)
// Get the arguments boilerplate from the current native context into r4.
const int kNormalOffset =
Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
const int kAliasedOffset =
Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
__ LoadP(r7, NativeContextMemOperand());
__ cmpi(r9, Operand::Zero());
if (CpuFeatures::IsSupported(ISELECT)) {
__ LoadP(r11, MemOperand(r7, kNormalOffset));
__ LoadP(r7, MemOperand(r7, kAliasedOffset));
__ isel(eq, r7, r11, r7);
} else {
Label skip4, skip5;
__ bne(&skip4);
__ LoadP(r7, MemOperand(r7, kNormalOffset));
__ b(&skip5);
__ bind(&skip4);
__ LoadP(r7, MemOperand(r7, kAliasedOffset));
__ bind(&skip5);
}
// r3 = address of new object (tagged)
// r5 = argument count (smi-tagged)
// r7 = address of arguments map (tagged)
// r9 = mapped parameter count (tagged)
__ StoreP(r7, FieldMemOperand(r3, JSObject::kMapOffset), r0);
__ LoadRoot(r11, Heap::kEmptyFixedArrayRootIndex);
__ StoreP(r11, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
__ StoreP(r11, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
// Set up the callee in-object property.
__ AssertNotSmi(r4);
__ StoreP(r4, FieldMemOperand(r3, JSSloppyArgumentsObject::kCalleeOffset),
r0);
// Use the length (smi tagged) and set that as an in-object property too.
__ AssertSmi(r8);
__ StoreP(r8, FieldMemOperand(r3, JSSloppyArgumentsObject::kLengthOffset),
r0);
// Set up the elements pointer in the allocated arguments object.
// If we allocated a parameter map, r7 will point there, otherwise
// it will point to the backing store.
__ addi(r7, r3, Operand(JSSloppyArgumentsObject::kSize));
__ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
// r3 = address of new object (tagged)
// r5 = argument count (tagged)
// r7 = address of parameter map or backing store (tagged)
// r9 = mapped parameter count (tagged)
// Initialize parameter map. If there are no mapped arguments, we're done.
Label skip_parameter_map;
__ CmpSmiLiteral(r9, Smi::kZero, r0);
if (CpuFeatures::IsSupported(ISELECT)) {
__ isel(eq, r4, r7, r4);
__ beq(&skip_parameter_map);
} else {
Label skip6;
__ bne(&skip6);
// Move backing store address to r4, because it is
// expected there when filling in the unmapped arguments.
__ mr(r4, r7);
__ b(&skip_parameter_map);
__ bind(&skip6);
}
__ LoadRoot(r8, Heap::kSloppyArgumentsElementsMapRootIndex);
__ StoreP(r8, FieldMemOperand(r7, FixedArray::kMapOffset), r0);
__ AddSmiLiteral(r8, r9, Smi::FromInt(2), r0);
__ StoreP(r8, FieldMemOperand(r7, FixedArray::kLengthOffset), r0);
__ StoreP(cp, FieldMemOperand(r7, FixedArray::kHeaderSize + 0 * kPointerSize),
r0);
__ SmiToPtrArrayOffset(r8, r9);
__ add(r8, r8, r7);
__ addi(r8, r8, Operand(kParameterMapHeaderSize));
__ StoreP(r8, FieldMemOperand(r7, FixedArray::kHeaderSize + 1 * kPointerSize),
r0);
// Copy the parameter slots and the holes in the arguments.
// We need to fill in mapped_parameter_count slots. They index the context,
// where parameters are stored in reverse order, at
// MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
// The mapped parameter thus need to get indices
// MIN_CONTEXT_SLOTS+parameter_count-1 ..
// MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
// We loop from right to left.
Label parameters_loop;
__ mr(r8, r9);
__ AddSmiLiteral(r11, r5, Smi::FromInt(Context::MIN_CONTEXT_SLOTS), r0);
__ sub(r11, r11, r9);
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ SmiToPtrArrayOffset(r4, r8);
__ add(r4, r4, r7);
__ addi(r4, r4, Operand(kParameterMapHeaderSize));
// r4 = address of backing store (tagged)
// r7 = address of parameter map (tagged)
// r8 = temporary scratch (a.o., for address calculation)
// r10 = temporary scratch (a.o., for address calculation)
// ip = the hole value
__ SmiUntag(r8);
__ mtctr(r8);
__ ShiftLeftImm(r8, r8, Operand(kPointerSizeLog2));
__ add(r10, r4, r8);
__ add(r8, r7, r8);
__ addi(r10, r10, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ addi(r8, r8, Operand(kParameterMapHeaderSize - kHeapObjectTag));
__ bind(&parameters_loop);
__ StorePU(r11, MemOperand(r8, -kPointerSize));
__ StorePU(ip, MemOperand(r10, -kPointerSize));
__ AddSmiLiteral(r11, r11, Smi::FromInt(1), r0);
__ bdnz(&parameters_loop);
// Restore r8 = argument count (tagged).
__ LoadP(r8, FieldMemOperand(r3, JSSloppyArgumentsObject::kLengthOffset));
__ bind(&skip_parameter_map);
// r3 = address of new object (tagged)
// r4 = address of backing store (tagged)
// r8 = argument count (tagged)
// r9 = mapped parameter count (tagged)
// r11 = scratch
// Copy arguments header and remaining slots (if there are any).
__ LoadRoot(r11, Heap::kFixedArrayMapRootIndex);
__ StoreP(r11, FieldMemOperand(r4, FixedArray::kMapOffset), r0);
__ StoreP(r8, FieldMemOperand(r4, FixedArray::kLengthOffset), r0);
__ sub(r11, r8, r9, LeaveOE, SetRC);
__ Ret(eq, cr0);
Label arguments_loop;
__ SmiUntag(r11);
__ mtctr(r11);
__ SmiToPtrArrayOffset(r0, r9);
__ sub(r6, r6, r0);
__ add(r11, r4, r0);
__ addi(r11, r11,
Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
__ bind(&arguments_loop);
__ LoadPU(r7, MemOperand(r6, -kPointerSize));
__ StorePU(r7, MemOperand(r11, kPointerSize));
__ bdnz(&arguments_loop);
// Return.
__ Ret();
// Do the runtime call to allocate the arguments object.
// r8 = argument count (tagged)
__ bind(&runtime);
__ Push(r4, r6, r8);
__ TailCallRuntime(Runtime::kNewSloppyArguments);
}
void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r4 : function
// -- cp : context
// -- fp : frame pointer
// -- lr : return address
// -----------------------------------
__ AssertFunction(r4);
// Make r5 point to the JavaScript frame.
__ mr(r5, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ LoadP(ip, MemOperand(r5, StandardFrameConstants::kFunctionOffset));
__ cmp(ip, r4);
__ b(&ok);
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ LoadP(r6, MemOperand(r5, StandardFrameConstants::kCallerFPOffset));
__ LoadP(ip, MemOperand(r6, CommonFrameConstants::kContextOrFrameTypeOffset));
__ CmpSmiLiteral(ip, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
__ beq(&arguments_adaptor);
{
__ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
__ LoadWordArith(
r3,
FieldMemOperand(r7, SharedFunctionInfo::kFormalParameterCountOffset));
#if V8_TARGET_ARCH_PPC64
__ SmiTag(r3);
#endif
__ SmiToPtrArrayOffset(r9, r3);
__ add(r5, r5, r9);
}
__ b(&arguments_done);
__ bind(&arguments_adaptor);
{
__ LoadP(r3, MemOperand(r6, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ SmiToPtrArrayOffset(r9, r3);
__ add(r5, r6, r9);
}
__ bind(&arguments_done);
__ addi(r5, r5, Operand(StandardFrameConstants::kCallerSPOffset));
// ----------- S t a t e -------------
// -- cp : context
// -- r3 : number of rest parameters (tagged)
// -- r4 : function
// -- r5 : pointer just past first rest parameters
// -- r9 : size of rest parameters
// -- lr : return address
// -----------------------------------
// Allocate space for the strict arguments object plus the backing store.
Label allocate, done_allocate;
__ mov(r10,
Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
__ add(r10, r10, r9);
__ Allocate(r10, r6, r7, r8, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the elements array in r6.
__ LoadRoot(r4, Heap::kFixedArrayMapRootIndex);
__ StoreP(r4, FieldMemOperand(r6, FixedArray::kMapOffset), r0);
__ StoreP(r3, FieldMemOperand(r6, FixedArray::kLengthOffset), r0);
__ addi(r7, r6,
Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
{
Label loop, done_loop;
__ SmiUntag(r0, r3, SetRC);
__ beq(&done_loop, cr0);
__ mtctr(r0);
__ bind(&loop);
__ LoadPU(ip, MemOperand(r5, -kPointerSize));
__ StorePU(ip, MemOperand(r7, kPointerSize));
__ bdnz(&loop);
__ bind(&done_loop);
__ addi(r7, r7, Operand(kPointerSize));
}
// Setup the rest parameter array in r7.
__ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, r4);
__ StoreP(r4, MemOperand(r7, JSStrictArgumentsObject::kMapOffset));
__ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
__ StoreP(r4, MemOperand(r7, JSStrictArgumentsObject::kPropertiesOffset));
__ StoreP(r6, MemOperand(r7, JSStrictArgumentsObject::kElementsOffset));
__ StoreP(r3, MemOperand(r7, JSStrictArgumentsObject::kLengthOffset));
STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
__ addi(r3, r7, Operand(kHeapObjectTag));
__ Ret();
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
__ Cmpi(r10, Operand(kMaxRegularHeapObjectSize), r0);
__ bgt(&too_big_for_new_space);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r10);
__ Push(r3, r5, r10);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ mr(r6, r3);
__ Pop(r3, r5);
}
__ b(&done_allocate);
// Fall back to %NewStrictArguments.
__ bind(&too_big_for_new_space);
__ push(r4);
__ TailCallRuntime(Runtime::kNewStrictArguments);
}
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
return ref0.address() - ref1.address();
}
......
......@@ -67,24 +67,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r4};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewSloppyArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r4};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewStrictArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r4};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
// static
const Register TypeConversionDescriptor::ArgumentRegister() { return r3; }
......
......@@ -3216,537 +3216,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS);
}
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r3 : function
// -- cp : context
// -- fp : frame pointer
// -- lr : return address
// -----------------------------------
__ AssertFunction(r3);
// Make r4 point to the JavaScript frame.
__ LoadRR(r4, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ LoadP(r4, MemOperand(r4, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ LoadP(ip, MemOperand(r4, StandardFrameConstants::kFunctionOffset));
__ CmpP(ip, r3);
__ b(&ok, Label::kNear);
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// Check if we have rest parameters (only possible if we have an
// arguments adaptor frame below the function frame).
Label no_rest_parameters;
__ LoadP(r4, MemOperand(r4, StandardFrameConstants::kCallerFPOffset));
__ LoadP(ip, MemOperand(r4, CommonFrameConstants::kContextOrFrameTypeOffset));
__ CmpSmiLiteral(ip, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
__ bne(&no_rest_parameters);
// Check if the arguments adaptor frame contains more arguments than
// specified by the function's internal formal parameter count.
Label rest_parameters;
__ LoadP(r2, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ LoadP(r5, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
__ LoadW(
r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
#if V8_TARGET_ARCH_S390X
__ SmiTag(r5);
#endif
__ SubP(r2, r2, r5);
__ bgt(&rest_parameters);
// Return an empty rest parameter array.
__ bind(&no_rest_parameters);
{
// ----------- S t a t e -------------
// -- cp : context
// -- lr : return address
// -----------------------------------
// Allocate an empty rest parameter array.
Label allocate, done_allocate;
__ Allocate(JSArray::kSize, r2, r3, r4, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the rest parameter array in r0.
__ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, r3);
__ StoreP(r3, FieldMemOperand(r2, JSArray::kMapOffset), r0);
__ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
__ StoreP(r3, FieldMemOperand(r2, JSArray::kPropertiesOffset), r0);
__ StoreP(r3, FieldMemOperand(r2, JSArray::kElementsOffset), r0);
__ LoadImmP(r3, Operand::Zero());
__ StoreP(r3, FieldMemOperand(r2, JSArray::kLengthOffset), r0);
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ Ret();
// Fall back to %AllocateInNewSpace.
__ bind(&allocate);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ Push(Smi::FromInt(JSArray::kSize));
__ CallRuntime(Runtime::kAllocateInNewSpace);
}
__ b(&done_allocate);
}
__ bind(&rest_parameters);
{
// Compute the pointer to the first rest parameter (skippping the receiver).
__ SmiToPtrArrayOffset(r8, r2);
__ AddP(r4, r4, r8);
__ AddP(r4, r4, Operand(StandardFrameConstants::kCallerSPOffset));
// ----------- S t a t e -------------
// -- cp : context
// -- r2 : number of rest parameters (tagged)
// -- r3 : function
// -- r4 : pointer just past first rest parameters
// -- r8 : size of rest parameters
// -- lr : return address
// -----------------------------------
// Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate;
__ mov(r9, Operand(JSArray::kSize + FixedArray::kHeaderSize));
__ AddP(r9, r9, r8);
__ Allocate(r9, r5, r6, r7, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the elements array in r5.
__ LoadRoot(r3, Heap::kFixedArrayMapRootIndex);
__ StoreP(r3, FieldMemOperand(r5, FixedArray::kMapOffset), r0);
__ StoreP(r2, FieldMemOperand(r5, FixedArray::kLengthOffset), r0);
__ AddP(r6, r5,
Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
{
Label loop;
__ SmiUntag(r1, r2);
// __ mtctr(r0);
__ bind(&loop);
__ lay(r4, MemOperand(r4, -kPointerSize));
__ LoadP(ip, MemOperand(r4));
__ la(r6, MemOperand(r6, kPointerSize));
__ StoreP(ip, MemOperand(r6));
// __ bdnz(&loop);
__ BranchOnCount(r1, &loop);
__ AddP(r6, r6, Operand(kPointerSize));
}
// Setup the rest parameter array in r6.
__ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, r3);
__ StoreP(r3, MemOperand(r6, JSArray::kMapOffset));
__ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
__ StoreP(r3, MemOperand(r6, JSArray::kPropertiesOffset));
__ StoreP(r5, MemOperand(r6, JSArray::kElementsOffset));
__ StoreP(r2, MemOperand(r6, JSArray::kLengthOffset));
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ AddP(r2, r6, Operand(kHeapObjectTag));
__ Ret();
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
__ CmpP(r9, Operand(kMaxRegularHeapObjectSize));
__ bgt(&too_big_for_new_space);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r9);
__ Push(r2, r4, r9);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ LoadRR(r5, r2);
__ Pop(r2, r4);
}
__ b(&done_allocate);
// Fall back to %NewRestParameter.
__ bind(&too_big_for_new_space);
__ push(r3);
__ TailCallRuntime(Runtime::kNewRestParameter);
}
}
void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r3 : function
// -- cp : context
// -- fp : frame pointer
// -- lr : return address
// -----------------------------------
__ AssertFunction(r3);
// Make r9 point to the JavaScript frame.
__ LoadRR(r9, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ LoadP(r9, MemOperand(r9, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ LoadP(ip, MemOperand(r9, StandardFrameConstants::kFunctionOffset));
__ CmpP(ip, r3);
__ beq(&ok, Label::kNear);
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
__ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
__ LoadW(
r4, FieldMemOperand(r4, SharedFunctionInfo::kFormalParameterCountOffset));
#if V8_TARGET_ARCH_S390X
__ SmiTag(r4);
#endif
__ SmiToPtrArrayOffset(r5, r4);
__ AddP(r5, r9, r5);
__ AddP(r5, r5, Operand(StandardFrameConstants::kCallerSPOffset));
// r3 : function
// r4 : number of parameters (tagged)
// r5 : parameters pointer
// r9 : JavaScript frame pointer
// Registers used over whole function:
// r7 : arguments count (tagged)
// r8 : mapped parameter count (tagged)
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, try_allocate, runtime;
__ LoadP(r6, MemOperand(r9, StandardFrameConstants::kCallerFPOffset));
__ LoadP(r2, MemOperand(r6, CommonFrameConstants::kContextOrFrameTypeOffset));
__ LoadSmiLiteral(r0, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ CmpP(r2, r0);
__ beq(&adaptor_frame);
// No adaptor, parameter count = argument count.
__ LoadRR(r7, r4);
__ LoadRR(r8, r4);
__ b(&try_allocate);
// We have an adaptor frame. Patch the parameters pointer.
__ bind(&adaptor_frame);
__ LoadP(r7, MemOperand(r6, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ SmiToPtrArrayOffset(r5, r7);
__ AddP(r5, r5, r6);
__ AddP(r5, r5, Operand(StandardFrameConstants::kCallerSPOffset));
// r7 = argument count (tagged)
// r8 = parameter count (tagged)
// Compute the mapped parameter count = min(r4, r7) in r8.
__ CmpP(r4, r7);
Label skip;
__ LoadRR(r8, r4);
__ blt(&skip);
__ LoadRR(r8, r7);
__ bind(&skip);
__ bind(&try_allocate);
// Compute the sizes of backing store, parameter map, and arguments object.
// 1. Parameter map, has 2 extra words containing context and backing store.
const int kParameterMapHeaderSize =
FixedArray::kHeaderSize + 2 * kPointerSize;
// If there are no mapped parameters, we do not need the parameter_map.
__ CmpSmiLiteral(r8, Smi::kZero, r0);
Label skip2, skip3;
__ bne(&skip2);
__ LoadImmP(r1, Operand::Zero());
__ b(&skip3);
__ bind(&skip2);
__ SmiToPtrArrayOffset(r1, r8);
__ AddP(r1, r1, Operand(kParameterMapHeaderSize));
__ bind(&skip3);
// 2. Backing store.
__ SmiToPtrArrayOffset(r6, r7);
__ AddP(r1, r1, r6);
__ AddP(r1, r1, Operand(FixedArray::kHeaderSize));
// 3. Arguments object.
__ AddP(r1, r1, Operand(JSSloppyArgumentsObject::kSize));
// Do the allocation of all three objects in one go.
__ Allocate(r1, r2, r1, r6, &runtime, NO_ALLOCATION_FLAGS);
// r2 = address of new object(s) (tagged)
// r4 = argument count (smi-tagged)
// Get the arguments boilerplate from the current native context into r3.
const int kNormalOffset =
Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
const int kAliasedOffset =
Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
__ LoadP(r6, NativeContextMemOperand());
__ CmpP(r8, Operand::Zero());
Label skip4, skip5;
__ bne(&skip4);
__ LoadP(r6, MemOperand(r6, kNormalOffset));
__ b(&skip5);
__ bind(&skip4);
__ LoadP(r6, MemOperand(r6, kAliasedOffset));
__ bind(&skip5);
// r2 = address of new object (tagged)
// r4 = argument count (smi-tagged)
// r6 = address of arguments map (tagged)
// r8 = mapped parameter count (tagged)
__ StoreP(r6, FieldMemOperand(r2, JSObject::kMapOffset), r0);
__ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
__ StoreP(r1, FieldMemOperand(r2, JSObject::kPropertiesOffset), r0);
__ StoreP(r1, FieldMemOperand(r2, JSObject::kElementsOffset), r0);
// Set up the callee in-object property.
__ AssertNotSmi(r3);
__ StoreP(r3, FieldMemOperand(r2, JSSloppyArgumentsObject::kCalleeOffset),
r0);
// Use the length (smi tagged) and set that as an in-object property too.
__ AssertSmi(r7);
__ StoreP(r7, FieldMemOperand(r2, JSSloppyArgumentsObject::kLengthOffset),
r0);
// Set up the elements pointer in the allocated arguments object.
// If we allocated a parameter map, r6 will point there, otherwise
// it will point to the backing store.
__ AddP(r6, r2, Operand(JSSloppyArgumentsObject::kSize));
__ StoreP(r6, FieldMemOperand(r2, JSObject::kElementsOffset), r0);
// r2 = address of new object (tagged)
// r4 = argument count (tagged)
// r6 = address of parameter map or backing store (tagged)
// r8 = mapped parameter count (tagged)
// Initialize parameter map. If there are no mapped arguments, we're done.
Label skip_parameter_map;
__ CmpSmiLiteral(r8, Smi::kZero, r0);
Label skip6;
__ bne(&skip6);
// Move backing store address to r3, because it is
// expected there when filling in the unmapped arguments.
__ LoadRR(r3, r6);
__ b(&skip_parameter_map);
__ bind(&skip6);
__ LoadRoot(r7, Heap::kSloppyArgumentsElementsMapRootIndex);
__ StoreP(r7, FieldMemOperand(r6, FixedArray::kMapOffset), r0);
__ AddSmiLiteral(r7, r8, Smi::FromInt(2), r0);
__ StoreP(r7, FieldMemOperand(r6, FixedArray::kLengthOffset), r0);
__ StoreP(cp, FieldMemOperand(r6, FixedArray::kHeaderSize + 0 * kPointerSize),
r0);
__ SmiToPtrArrayOffset(r7, r8);
__ AddP(r7, r7, r6);
__ AddP(r7, r7, Operand(kParameterMapHeaderSize));
__ StoreP(r7, FieldMemOperand(r6, FixedArray::kHeaderSize + 1 * kPointerSize),
r0);
// Copy the parameter slots and the holes in the arguments.
// We need to fill in mapped_parameter_count slots. They index the context,
// where parameters are stored in reverse order, at
// MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
// The mapped parameter thus need to get indices
// MIN_CONTEXT_SLOTS+parameter_count-1 ..
// MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
// We loop from right to left.
Label parameters_loop;
__ LoadRR(r7, r8);
__ AddSmiLiteral(r1, r4, Smi::FromInt(Context::MIN_CONTEXT_SLOTS), r0);
__ SubP(r1, r1, r8);
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ SmiToPtrArrayOffset(r3, r7);
__ AddP(r3, r3, r6);
__ AddP(r3, r3, Operand(kParameterMapHeaderSize));
// r3 = address of backing store (tagged)
// r6 = address of parameter map (tagged)
// r7 = temporary scratch (a.o., for address calculation)
// r9 = temporary scratch (a.o., for address calculation)
// ip = the hole value
__ SmiUntag(r7);
__ push(r4);
__ LoadRR(r4, r7);
__ ShiftLeftP(r7, r7, Operand(kPointerSizeLog2));
__ AddP(r9, r3, r7);
__ AddP(r7, r6, r7);
__ AddP(r9, r9, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ AddP(r7, r7, Operand(kParameterMapHeaderSize - kHeapObjectTag));
__ bind(&parameters_loop);
__ StoreP(r1, MemOperand(r7, -kPointerSize));
__ lay(r7, MemOperand(r7, -kPointerSize));
__ StoreP(ip, MemOperand(r9, -kPointerSize));
__ lay(r9, MemOperand(r9, -kPointerSize));
__ AddSmiLiteral(r1, r1, Smi::FromInt(1), r0);
__ BranchOnCount(r4, &parameters_loop);
__ pop(r4);
// Restore r7 = argument count (tagged).
__ LoadP(r7, FieldMemOperand(r2, JSSloppyArgumentsObject::kLengthOffset));
__ bind(&skip_parameter_map);
// r2 = address of new object (tagged)
// r3 = address of backing store (tagged)
// r7 = argument count (tagged)
// r8 = mapped parameter count (tagged)
// r1 = scratch
// Copy arguments header and remaining slots (if there are any).
__ LoadRoot(r1, Heap::kFixedArrayMapRootIndex);
__ StoreP(r1, FieldMemOperand(r3, FixedArray::kMapOffset), r0);
__ StoreP(r7, FieldMemOperand(r3, FixedArray::kLengthOffset), r0);
__ SubP(r1, r7, r8);
__ Ret(eq);
Label arguments_loop;
__ SmiUntag(r1);
__ LoadRR(r4, r1);
__ SmiToPtrArrayOffset(r0, r8);
__ SubP(r5, r5, r0);
__ AddP(r1, r3, r0);
__ AddP(r1, r1,
Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
__ bind(&arguments_loop);
__ LoadP(r6, MemOperand(r5, -kPointerSize));
__ lay(r5, MemOperand(r5, -kPointerSize));
__ StoreP(r6, MemOperand(r1, kPointerSize));
__ la(r1, MemOperand(r1, kPointerSize));
__ BranchOnCount(r4, &arguments_loop);
// Return.
__ Ret();
// Do the runtime call to allocate the arguments object.
// r7 = argument count (tagged)
__ bind(&runtime);
__ Push(r3, r5, r7);
__ TailCallRuntime(Runtime::kNewSloppyArguments);
}
void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r3 : function
// -- cp : context
// -- fp : frame pointer
// -- lr : return address
// -----------------------------------
__ AssertFunction(r3);
// Make r4 point to the JavaScript frame.
__ LoadRR(r4, fp);
if (skip_stub_frame()) {
// For Ignition we need to skip the handler/stub frame to reach the
// JavaScript frame for the function.
__ LoadP(r4, MemOperand(r4, StandardFrameConstants::kCallerFPOffset));
}
if (FLAG_debug_code) {
Label ok;
__ LoadP(ip, MemOperand(r4, StandardFrameConstants::kFunctionOffset));
__ CmpP(ip, r3);
__ beq(&ok, Label::kNear);
__ Abort(kInvalidFrameForFastNewRestArgumentsStub);
__ bind(&ok);
}
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ LoadP(r5, MemOperand(r4, StandardFrameConstants::kCallerFPOffset));
__ LoadP(ip, MemOperand(r5, CommonFrameConstants::kContextOrFrameTypeOffset));
__ CmpSmiLiteral(ip, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
__ beq(&arguments_adaptor);
{
__ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
__ LoadW(r2, FieldMemOperand(
r6, SharedFunctionInfo::kFormalParameterCountOffset));
#if V8_TARGET_ARCH_S390X
__ SmiTag(r2);
#endif
__ SmiToPtrArrayOffset(r8, r2);
__ AddP(r4, r4, r8);
}
__ b(&arguments_done);
__ bind(&arguments_adaptor);
{
__ LoadP(r2, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ SmiToPtrArrayOffset(r8, r2);
__ AddP(r4, r5, r8);
}
__ bind(&arguments_done);
__ AddP(r4, r4, Operand(StandardFrameConstants::kCallerSPOffset));
// ----------- S t a t e -------------
// -- cp : context
// -- r2 : number of rest parameters (tagged)
// -- r3 : function
// -- r4 : pointer just past first rest parameters
// -- r8 : size of rest parameters
// -- lr : return address
// -----------------------------------
// Allocate space for the strict arguments object plus the backing store.
Label allocate, done_allocate;
__ mov(r9, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
__ AddP(r9, r9, r8);
__ Allocate(r9, r5, r6, r7, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the elements array in r5.
__ LoadRoot(r3, Heap::kFixedArrayMapRootIndex);
__ StoreP(r3, FieldMemOperand(r5, FixedArray::kMapOffset), r0);
__ StoreP(r2, FieldMemOperand(r5, FixedArray::kLengthOffset), r0);
__ AddP(r6, r5,
Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
{
Label loop, done_loop;
__ SmiUntag(r1, r2);
__ LoadAndTestP(r1, r1);
__ beq(&done_loop);
__ bind(&loop);
__ lay(r4, MemOperand(r4, -kPointerSize));
__ LoadP(ip, MemOperand(r4));
__ la(r6, MemOperand(r6, kPointerSize));
__ StoreP(ip, MemOperand(r6));
__ BranchOnCount(r1, &loop);
__ bind(&done_loop);
__ AddP(r6, r6, Operand(kPointerSize));
}
// Setup the rest parameter array in r6.
__ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, r3);
__ StoreP(r3, MemOperand(r6, JSStrictArgumentsObject::kMapOffset));
__ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
__ StoreP(r3, MemOperand(r6, JSStrictArgumentsObject::kPropertiesOffset));
__ StoreP(r5, MemOperand(r6, JSStrictArgumentsObject::kElementsOffset));
__ StoreP(r2, MemOperand(r6, JSStrictArgumentsObject::kLengthOffset));
STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
__ AddP(r2, r6, Operand(kHeapObjectTag));
__ Ret();
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
__ CmpP(r9, Operand(kMaxRegularHeapObjectSize));
__ bgt(&too_big_for_new_space);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r9);
__ Push(r2, r4, r9);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ LoadRR(r5, r2);
__ Pop(r2, r4);
}
__ b(&done_allocate);
// Fall back to %NewStrictArguments.
__ bind(&too_big_for_new_space);
__ push(r3);
__ TailCallRuntime(Runtime::kNewStrictArguments);
}
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
return ref0.address() - ref1.address();
}
......
......@@ -65,24 +65,6 @@ void FastNewClosureDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r3};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewSloppyArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r3};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewStrictArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r3};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
// static
const Register TypeConversionDescriptor::ArgumentRegister() { return r2; }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment