Commit 111c5735 authored by jgruber's avatar jgruber Committed by Commit Bot

[builtins] Move ArrayConstructorStub to builtin

Calls from embedded builtins to stubs are expensive due to the
indirection through the builtins constants table. This moves
the ArrayConstructorStub to a builtin.

Bug: v8:6666
Change-Id: Iff4bff99cd911a7f5f138819801c7812b75ea969
Reviewed-on: https://chromium-review.googlesource.com/1071519
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarJaroslav Sevcik <jarin@chromium.org>
Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53357}
parent 0094defa
......@@ -318,97 +318,6 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
__ ldm(ia_w, sp, kSavedRegs | pc.bit());
}
template<class T>
static void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
if (mode == DISABLE_ALLOCATION_SITES) {
T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmp(r3, Operand(kind));
T stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq);
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
// r2 - allocation site (if mode != DISABLE_ALLOCATION_SITES)
// r3 - kind (if mode != DISABLE_ALLOCATION_SITES)
// r0 - number of arguments
// r1 - constructor?
// sp[0] - last argument
STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0);
STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
STATIC_ASSERT(PACKED_ELEMENTS == 2);
STATIC_ASSERT(HOLEY_ELEMENTS == 3);
STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4);
STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5);
if (mode == DISABLE_ALLOCATION_SITES) {
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
holey_initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
} else if (mode == DONT_OVERRIDE) {
// is the low bit set? If so, we are holey and that is good.
Label normal_sequence;
__ tst(r3, Operand(1));
__ b(ne, &normal_sequence);
// We are going to create a holey array, but our kind is non-holey.
// Fix kind and retry (only if we have an allocation site in the slot).
__ add(r3, r3, Operand(1));
if (FLAG_debug_code) {
__ ldr(r5, FieldMemOperand(r2, 0));
__ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
__ Assert(eq, AbortReason::kExpectedAllocationSite);
}
// Save the resulting elements kind in type info. We can't just store r3
// in the AllocationSite::transition_info field because elements kind is
// restricted to a portion of the field...upper bits need to be left alone.
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ ldr(r4, FieldMemOperand(
r2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ add(r4, r4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
__ str(r4, FieldMemOperand(
r2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ bind(&normal_sequence);
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmp(r3, Operand(kind));
ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq);
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
template<class T>
static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
int to_index =
......@@ -439,82 +348,6 @@ void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
}
}
void ArrayConstructorStub::GenerateDispatchToArrayStub(
MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
Label not_zero_case, not_one_case;
__ tst(r0, r0);
__ b(ne, &not_zero_case);
CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
__ bind(&not_zero_case);
__ cmp(r0, Operand(1));
__ b(gt, &not_one_case);
CreateArrayDispatchOneArgument(masm, mode);
__ bind(&not_one_case);
__ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
RelocInfo::CODE_TARGET);
}
void ArrayConstructorStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r0 : argc (only if argument_count() == ANY)
// -- r1 : constructor
// -- r2 : AllocationSite or undefined
// -- r3 : new target
// -- sp[0] : return address
// -- sp[4] : last argument
// -----------------------------------
if (FLAG_debug_code) {
// The array construct code is only set for the global and natives
// builtin Array functions which always have maps.
// Initial map for the builtin Array function should be a map.
__ ldr(r4, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a nullptr and a Smi.
__ tst(r4, Operand(kSmiTagMask));
__ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction);
__ CompareObjectType(r4, r4, r5, MAP_TYPE);
__ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction);
// We should either have undefined in r2 or a valid AllocationSite
__ AssertUndefinedOrAllocationSite(r2, r4);
}
// Enter the context of the Array function.
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
Label subclassing;
__ cmp(r3, r1);
__ b(ne, &subclassing);
Label no_info;
// Get the elements kind and case on that.
__ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
__ b(eq, &no_info);
__ ldr(r3, FieldMemOperand(
r2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ SmiUntag(r3);
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ and_(r3, r3, Operand(AllocationSite::ElementsKindBits::kMask));
GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
__ bind(&no_info);
GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
__ bind(&subclassing);
__ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
__ add(r0, r0, Operand(3));
__ Push(r3, r2);
__ JumpToExternalReference(ExternalReference::Create(Runtime::kNewArray));
}
void InternalArrayConstructorStub::GenerateCase(
MacroAssembler* masm, ElementsKind kind) {
__ cmp(r0, Operand(1));
......
......@@ -316,115 +316,6 @@ void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
__ Call(GetCode(), RelocInfo::CODE_TARGET);
}
template<class T>
static void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
ASM_LOCATION("CreateArrayDispatch");
if (mode == DISABLE_ALLOCATION_SITES) {
T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
Register kind = x3;
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
Label next;
ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i);
// TODO(jbramley): Is this the best way to handle this? Can we make the
// tail calls conditional, rather than hopping over each one?
__ CompareAndBranch(kind, candidate_kind, ne, &next);
T stub(masm->isolate(), candidate_kind);
__ TailCallStub(&stub);
__ Bind(&next);
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
// TODO(jbramley): If this needs to be a special case, make it a proper template
// specialization, and not a separate function.
static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
ASM_LOCATION("CreateArrayDispatchOneArgument");
// x0 - argc
// x1 - constructor?
// x2 - allocation site (if mode != DISABLE_ALLOCATION_SITES)
// x3 - kind (if mode != DISABLE_ALLOCATION_SITES)
// sp[0] - last argument
Register allocation_site = x2;
Register kind = x3;
STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0);
STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
STATIC_ASSERT(PACKED_ELEMENTS == 2);
STATIC_ASSERT(HOLEY_ELEMENTS == 3);
STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4);
STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5);
if (mode == DISABLE_ALLOCATION_SITES) {
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
holey_initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
} else if (mode == DONT_OVERRIDE) {
// Is the low bit set? If so, the array is holey.
Label normal_sequence;
__ Tbnz(kind, 0, &normal_sequence);
// We are going to create a holey array, but our kind is non-holey.
// Fix kind and retry (only if we have an allocation site in the slot).
__ Orr(kind, kind, 1);
if (FLAG_debug_code) {
__ Ldr(x10, FieldMemOperand(allocation_site, 0));
__ JumpIfNotRoot(x10, Heap::kAllocationSiteMapRootIndex,
&normal_sequence);
__ Assert(eq, AbortReason::kExpectedAllocationSite);
}
// Save the resulting elements kind in type info. We can't just store 'kind'
// in the AllocationSite::transition_info field because elements kind is
// restricted to a portion of the field; upper bits need to be left alone.
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ Ldr(x11,
FieldMemOperand(allocation_site,
AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ Add(x11, x11, Smi::FromInt(kFastElementsKindPackedToHoley));
__ Str(x11,
FieldMemOperand(allocation_site,
AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ Bind(&normal_sequence);
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
Label next;
ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i);
__ CompareAndBranch(kind, candidate_kind, ne, &next);
ArraySingleArgumentConstructorStub stub(masm->isolate(), candidate_kind);
__ TailCallStub(&stub);
__ Bind(&next);
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
template<class T>
static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
int to_index =
......@@ -455,93 +346,6 @@ void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
}
}
void ArrayConstructorStub::GenerateDispatchToArrayStub(
MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
Register argc = x0;
Label zero_case, n_case;
__ Cbz(argc, &zero_case);
__ Cmp(argc, 1);
__ B(ne, &n_case);
// One argument.
CreateArrayDispatchOneArgument(masm, mode);
__ Bind(&zero_case);
// No arguments.
CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
__ Bind(&n_case);
// N arguments.
__ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
RelocInfo::CODE_TARGET);
}
void ArrayConstructorStub::Generate(MacroAssembler* masm) {
ASM_LOCATION("ArrayConstructorStub::Generate");
// ----------- S t a t e -------------
// -- x0 : argc (only if argument_count() is ANY or MORE_THAN_ONE)
// -- x1 : constructor
// -- x2 : AllocationSite or undefined
// -- x3 : new target
// -- sp[0] : last argument
// -----------------------------------
Register constructor = x1;
Register allocation_site = x2;
Register new_target = x3;
if (FLAG_debug_code) {
// The array construct code is only set for the global and natives
// builtin Array functions which always have maps.
Label unexpected_map, map_ok;
// Initial map for the builtin Array function should be a map.
__ Ldr(x10, FieldMemOperand(constructor,
JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a nullptr and a Smi.
__ JumpIfSmi(x10, &unexpected_map);
__ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok);
__ Bind(&unexpected_map);
__ Abort(AbortReason::kUnexpectedInitialMapForArrayFunction);
__ Bind(&map_ok);
// We should either have undefined in the allocation_site register or a
// valid AllocationSite.
__ AssertUndefinedOrAllocationSite(allocation_site);
}
// Enter the context of the Array function.
__ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
Label subclassing;
__ Cmp(new_target, constructor);
__ B(ne, &subclassing);
Register kind = x3;
Label no_info;
// Get the elements kind and case on that.
__ JumpIfRoot(allocation_site, Heap::kUndefinedValueRootIndex, &no_info);
__ Ldrsw(kind, UntagSmiFieldMemOperand(
allocation_site,
AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ And(kind, kind, AllocationSite::ElementsKindBits::kMask);
GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
__ Bind(&no_info);
GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
// Subclassing support.
__ Bind(&subclassing);
__ Poke(constructor, Operand(x0, LSL, kPointerSizeLog2));
__ Add(x0, x0, Operand(3));
__ Push(new_target, allocation_site);
__ JumpToExternalReference(ExternalReference::Create(Runtime::kNewArray));
}
void InternalArrayConstructorStub::GenerateCase(
MacroAssembler* masm, ElementsKind kind) {
Label zero_case, n_case;
......
......@@ -141,9 +141,8 @@ void Builtins::Generate_ArrayConstructor(MacroAssembler* masm) {
__ mov(r3, r1, LeaveCC, eq);
// Run the native code for the Array function called as a normal function.
// tail call a stub
ArrayConstructorStub stub(masm->isolate());
__ TailCallStub(&stub);
Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
__ Jump(code, RelocInfo::CODE_TARGET);
}
static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
......@@ -1165,8 +1164,8 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
// Tail call to the array construct stub (still in the caller
// context at this point).
ArrayConstructorStub array_constructor_stub(masm->isolate());
__ Jump(array_constructor_stub.GetCode(), RelocInfo::CODE_TARGET);
Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
__ Jump(code, RelocInfo::CODE_TARGET);
} else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
// Call the constructor with r0, r1, and r3 unmodified.
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
......@@ -2913,6 +2912,169 @@ void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
__ Ret();
}
namespace {
template <class T>
void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
if (mode == DISABLE_ALLOCATION_SITES) {
T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmp(r3, Operand(kind));
T stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq);
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
void CreateArrayDispatchOneArgument(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
// r2 - allocation site (if mode != DISABLE_ALLOCATION_SITES)
// r3 - kind (if mode != DISABLE_ALLOCATION_SITES)
// r0 - number of arguments
// r1 - constructor?
// sp[0] - last argument
STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0);
STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
STATIC_ASSERT(PACKED_ELEMENTS == 2);
STATIC_ASSERT(HOLEY_ELEMENTS == 3);
STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4);
STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5);
if (mode == DISABLE_ALLOCATION_SITES) {
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
ArraySingleArgumentConstructorStub stub_holey(
masm->isolate(), holey_initial, DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
} else if (mode == DONT_OVERRIDE) {
// is the low bit set? If so, we are holey and that is good.
Label normal_sequence;
__ tst(r3, Operand(1));
__ b(ne, &normal_sequence);
// We are going to create a holey array, but our kind is non-holey.
// Fix kind and retry (only if we have an allocation site in the slot).
__ add(r3, r3, Operand(1));
if (FLAG_debug_code) {
__ ldr(r5, FieldMemOperand(r2, 0));
__ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
__ Assert(eq, AbortReason::kExpectedAllocationSite);
}
// Save the resulting elements kind in type info. We can't just store r3
// in the AllocationSite::transition_info field because elements kind is
// restricted to a portion of the field...upper bits need to be left alone.
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ ldr(r4, FieldMemOperand(
r2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ add(r4, r4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
__ str(r4, FieldMemOperand(
r2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ bind(&normal_sequence);
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmp(r3, Operand(kind));
ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq);
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
void GenerateDispatchToArrayStub(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
Label not_zero_case, not_one_case;
__ tst(r0, r0);
__ b(ne, &not_zero_case);
CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
__ bind(&not_zero_case);
__ cmp(r0, Operand(1));
__ b(gt, &not_one_case);
CreateArrayDispatchOneArgument(masm, mode);
__ bind(&not_one_case);
Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor);
__ Jump(code, RelocInfo::CODE_TARGET);
}
} // namespace
void Builtins::Generate_ArrayConstructorImpl(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r0 : argc (only if argument_count() == ANY)
// -- r1 : constructor
// -- r2 : AllocationSite or undefined
// -- r3 : new target
// -- sp[0] : return address
// -- sp[4] : last argument
// -----------------------------------
if (FLAG_debug_code) {
// The array construct code is only set for the global and natives
// builtin Array functions which always have maps.
// Initial map for the builtin Array function should be a map.
__ ldr(r4, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a nullptr and a Smi.
__ tst(r4, Operand(kSmiTagMask));
__ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction);
__ CompareObjectType(r4, r4, r5, MAP_TYPE);
__ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction);
// We should either have undefined in r2 or a valid AllocationSite
__ AssertUndefinedOrAllocationSite(r2, r4);
}
// Enter the context of the Array function.
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
Label subclassing;
__ cmp(r3, r1);
__ b(ne, &subclassing);
Label no_info;
// Get the elements kind and case on that.
__ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
__ b(eq, &no_info);
__ ldr(r3, FieldMemOperand(
r2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ SmiUntag(r3);
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ and_(r3, r3, Operand(AllocationSite::ElementsKindBits::kMask));
GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
__ bind(&no_info);
GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
__ bind(&subclassing);
__ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
__ add(r0, r0, Operand(3));
__ Push(r3, r2);
__ JumpToExternalReference(ExternalReference::Create(Runtime::kNewArray));
}
void Builtins::Generate_ArrayNArgumentsConstructor(MacroAssembler* masm) {
__ lsl(r5, r0, Operand(kPointerSizeLog2));
__ str(r1, MemOperand(sp, r5));
......
......@@ -136,8 +136,8 @@ void Builtins::Generate_ArrayConstructor(MacroAssembler* masm) {
__ CmovX(x3, x1, eq);
// Run the native code for the Array function called as a normal function.
ArrayConstructorStub stub(masm->isolate());
__ TailCallStub(&stub);
Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
__ Jump(code, RelocInfo::CODE_TARGET);
}
static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
......@@ -1277,8 +1277,8 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
// Tail call to the array construct stub (still in the caller
// context at this point).
ArrayConstructorStub array_constructor_stub(masm->isolate());
__ Jump(array_constructor_stub.GetCode(), RelocInfo::CODE_TARGET);
Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
__ Jump(code, RelocInfo::CODE_TARGET);
} else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
// Call the constructor with x0, x1, and x3 unmodified.
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
......@@ -3444,6 +3444,199 @@ void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
__ Ret();
}
namespace {
// TODO(jbramley): If this needs to be a special case, make it a proper template
// specialization, and not a separate function.
void CreateArrayDispatchOneArgument(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
ASM_LOCATION("CreateArrayDispatchOneArgument");
// x0 - argc
// x1 - constructor?
// x2 - allocation site (if mode != DISABLE_ALLOCATION_SITES)
// x3 - kind (if mode != DISABLE_ALLOCATION_SITES)
// sp[0] - last argument
Register allocation_site = x2;
Register kind = x3;
STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0);
STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
STATIC_ASSERT(PACKED_ELEMENTS == 2);
STATIC_ASSERT(HOLEY_ELEMENTS == 3);
STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4);
STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5);
if (mode == DISABLE_ALLOCATION_SITES) {
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
ArraySingleArgumentConstructorStub stub_holey(
masm->isolate(), holey_initial, DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
} else if (mode == DONT_OVERRIDE) {
// Is the low bit set? If so, the array is holey.
Label normal_sequence;
__ Tbnz(kind, 0, &normal_sequence);
// We are going to create a holey array, but our kind is non-holey.
// Fix kind and retry (only if we have an allocation site in the slot).
__ Orr(kind, kind, 1);
if (FLAG_debug_code) {
__ Ldr(x10, FieldMemOperand(allocation_site, 0));
__ JumpIfNotRoot(x10, Heap::kAllocationSiteMapRootIndex,
&normal_sequence);
__ Assert(eq, AbortReason::kExpectedAllocationSite);
}
// Save the resulting elements kind in type info. We can't just store 'kind'
// in the AllocationSite::transition_info field because elements kind is
// restricted to a portion of the field; upper bits need to be left alone.
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ Ldr(x11,
FieldMemOperand(allocation_site,
AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ Add(x11, x11, Smi::FromInt(kFastElementsKindPackedToHoley));
__ Str(x11,
FieldMemOperand(allocation_site,
AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ Bind(&normal_sequence);
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
Label next;
ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i);
__ CompareAndBranch(kind, candidate_kind, ne, &next);
ArraySingleArgumentConstructorStub stub(masm->isolate(), candidate_kind);
__ TailCallStub(&stub);
__ Bind(&next);
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
template <class T>
void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
ASM_LOCATION("CreateArrayDispatch");
if (mode == DISABLE_ALLOCATION_SITES) {
T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
Register kind = x3;
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
Label next;
ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i);
// TODO(jbramley): Is this the best way to handle this? Can we make the
// tail calls conditional, rather than hopping over each one?
__ CompareAndBranch(kind, candidate_kind, ne, &next);
T stub(masm->isolate(), candidate_kind);
__ TailCallStub(&stub);
__ Bind(&next);
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
void GenerateDispatchToArrayStub(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
Register argc = x0;
Label zero_case, n_case;
__ Cbz(argc, &zero_case);
__ Cmp(argc, 1);
__ B(ne, &n_case);
// One argument.
CreateArrayDispatchOneArgument(masm, mode);
__ Bind(&zero_case);
// No arguments.
CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
__ Bind(&n_case);
// N arguments.
Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor);
__ Jump(code, RelocInfo::CODE_TARGET);
}
} // namespace
void Builtins::Generate_ArrayConstructorImpl(MacroAssembler* masm) {
ASM_LOCATION("ArrayConstructorStub::Generate");
// ----------- S t a t e -------------
// -- x0 : argc (only if argument_count() is ANY or MORE_THAN_ONE)
// -- x1 : constructor
// -- x2 : AllocationSite or undefined
// -- x3 : new target
// -- sp[0] : last argument
// -----------------------------------
Register constructor = x1;
Register allocation_site = x2;
Register new_target = x3;
if (FLAG_debug_code) {
// The array construct code is only set for the global and natives
// builtin Array functions which always have maps.
Label unexpected_map, map_ok;
// Initial map for the builtin Array function should be a map.
__ Ldr(x10, FieldMemOperand(constructor,
JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a nullptr and a Smi.
__ JumpIfSmi(x10, &unexpected_map);
__ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok);
__ Bind(&unexpected_map);
__ Abort(AbortReason::kUnexpectedInitialMapForArrayFunction);
__ Bind(&map_ok);
// We should either have undefined in the allocation_site register or a
// valid AllocationSite.
__ AssertUndefinedOrAllocationSite(allocation_site);
}
// Enter the context of the Array function.
__ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
Label subclassing;
__ Cmp(new_target, constructor);
__ B(ne, &subclassing);
Register kind = x3;
Label no_info;
// Get the elements kind and case on that.
__ JumpIfRoot(allocation_site, Heap::kUndefinedValueRootIndex, &no_info);
__ Ldrsw(kind, UntagSmiFieldMemOperand(
allocation_site,
AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ And(kind, kind, AllocationSite::ElementsKindBits::kMask);
GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
__ Bind(&no_info);
GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
// Subclassing support.
__ Bind(&subclassing);
__ Poke(constructor, Operand(x0, LSL, kPointerSizeLog2));
__ Add(x0, x0, Operand(3));
__ Push(new_target, allocation_site);
__ JumpToExternalReference(ExternalReference::Create(Runtime::kNewArray));
}
void Builtins::Generate_ArrayNArgumentsConstructor(MacroAssembler* masm) {
__ Mov(x5, Operand(x0, LSL, kPointerSizeLog2));
__ Poke(x1, Operand(x5));
......
......@@ -236,6 +236,7 @@ namespace internal {
\
/* Array */ \
ASM(ArrayConstructor) \
ASM(ArrayConstructorImpl) \
ASM(ArrayNArgumentsConstructor) \
ASM(InternalArrayConstructor) \
CPP(ArrayConcat) \
......
......@@ -1219,8 +1219,8 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
// Tail call to the array construct stub (still in the caller
// context at this point).
__ AssertFunction(edi);
ArrayConstructorStub array_constructor_stub(masm->isolate());
__ Jump(array_constructor_stub.GetCode(), RelocInfo::CODE_TARGET);
Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
__ Jump(code, RelocInfo::CODE_TARGET);
} else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
// Call the constructor with unmodified eax, edi, edx values.
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
......@@ -1951,8 +1951,8 @@ void Builtins::Generate_ArrayConstructor(MacroAssembler* masm) {
// Run the native code for the Array function called as a normal function.
__ bind(&call);
ArrayConstructorStub stub(masm->isolate());
__ TailCallStub(&stub);
Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
__ Jump(code, RelocInfo::CODE_TARGET);
}
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
......@@ -3149,6 +3149,181 @@ void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
__ ret(0);
}
namespace {
template <class T>
void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
if (mode == DISABLE_ALLOCATION_SITES) {
T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
Label next;
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmp(edx, kind);
__ j(not_equal, &next);
T stub(masm->isolate(), kind);
__ TailCallStub(&stub);
__ bind(&next);
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
void CreateArrayDispatchOneArgument(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
// ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
// edx - kind (if mode != DISABLE_ALLOCATION_SITES)
// eax - number of arguments
// edi - constructor?
// esp[0] - return address
// esp[4] - last argument
STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0);
STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
STATIC_ASSERT(PACKED_ELEMENTS == 2);
STATIC_ASSERT(HOLEY_ELEMENTS == 3);
STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4);
STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5);
if (mode == DISABLE_ALLOCATION_SITES) {
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
ArraySingleArgumentConstructorStub stub_holey(
masm->isolate(), holey_initial, DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
} else if (mode == DONT_OVERRIDE) {
// is the low bit set? If so, we are holey and that is good.
Label normal_sequence;
__ test_b(edx, Immediate(1));
__ j(not_zero, &normal_sequence);
// We are going to create a holey array, but our kind is non-holey.
// Fix kind and retry.
__ inc(edx);
if (FLAG_debug_code) {
Handle<Map> allocation_site_map =
masm->isolate()->factory()->allocation_site_map();
__ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
__ Assert(equal, AbortReason::kExpectedAllocationSite);
}
// Save the resulting elements kind in type info. We can't just store r3
// in the AllocationSite::transition_info field because elements kind is
// restricted to a portion of the field...upper bits need to be left alone.
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ add(
FieldOperand(ebx, AllocationSite::kTransitionInfoOrBoilerplateOffset),
Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
__ bind(&normal_sequence);
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
Label next;
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmp(edx, kind);
__ j(not_equal, &next);
ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
__ TailCallStub(&stub);
__ bind(&next);
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
void GenerateDispatchToArrayStub(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
Label not_zero_case, not_one_case;
__ test(eax, eax);
__ j(not_zero, &not_zero_case);
CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
__ bind(&not_zero_case);
__ cmp(eax, 1);
__ j(greater, &not_one_case);
CreateArrayDispatchOneArgument(masm, mode);
__ bind(&not_one_case);
Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor);
__ Jump(code, RelocInfo::CODE_TARGET);
}
} // namespace
void Builtins::Generate_ArrayConstructorImpl(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : argc (only if argument_count() is ANY or MORE_THAN_ONE)
// -- ebx : AllocationSite or undefined
// -- edi : constructor
// -- edx : Original constructor
// -- esp[0] : return address
// -- esp[4] : last argument
// -----------------------------------
if (FLAG_debug_code) {
// The array construct code is only set for the global and natives
// builtin Array functions which always have maps.
// Initial map for the builtin Array function should be a map.
__ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a nullptr and a Smi.
__ test(ecx, Immediate(kSmiTagMask));
__ Assert(not_zero, AbortReason::kUnexpectedInitialMapForArrayFunction);
__ CmpObjectType(ecx, MAP_TYPE, ecx);
__ Assert(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
// We should either have undefined in ebx or a valid AllocationSite
__ AssertUndefinedOrAllocationSite(ebx);
}
Label subclassing;
// Enter the context of the Array function.
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
__ cmp(edx, edi);
__ j(not_equal, &subclassing);
Label no_info;
// If the feedback vector is the undefined value call an array constructor
// that doesn't use AllocationSites.
__ cmp(ebx, masm->isolate()->factory()->undefined_value());
__ j(equal, &no_info);
// Only look at the lower 16 bits of the transition info.
__ mov(edx,
FieldOperand(ebx, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ SmiUntag(edx);
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
__ bind(&no_info);
GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
// Subclassing.
__ bind(&subclassing);
__ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
__ add(eax, Immediate(3));
__ PopReturnAddressTo(ecx);
__ Push(edx);
__ Push(ebx);
__ PushReturnAddressFrom(ecx);
__ JumpToExternalReference(ExternalReference::Create(Runtime::kNewArray));
}
void Builtins::Generate_ArrayNArgumentsConstructor(MacroAssembler* masm) {
__ pop(ecx);
__ mov(MemOperand(esp, eax, times_4, 0), edi);
......
......@@ -142,8 +142,8 @@ void Builtins::Generate_ArrayConstructor(MacroAssembler* masm) {
// Run the native code for the Array function called as a normal function.
__ bind(&call);
ArrayConstructorStub stub(masm->isolate());
__ TailCallStub(&stub);
__ Jump(BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl),
RelocInfo::CODE_TARGET);
}
static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
......@@ -1163,8 +1163,8 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
// Tail call to the array construct stub (still in the caller
// context at this point).
ArrayConstructorStub array_constructor_stub(masm->isolate());
__ Jump(array_constructor_stub.GetCode(), RelocInfo::CODE_TARGET);
__ Jump(BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl),
RelocInfo::CODE_TARGET);
} else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
// Call the constructor with a0, a1, and a3 unmodified.
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
......@@ -3037,6 +3037,170 @@ void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
__ Ret();
}
namespace {
template <class T>
void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
if (mode == DISABLE_ALLOCATION_SITES) {
T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
T stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq, a3, Operand(kind));
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
void CreateArrayDispatchOneArgument(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
// a2 - allocation site (if mode != DISABLE_ALLOCATION_SITES)
// a3 - kind (if mode != DISABLE_ALLOCATION_SITES)
// a0 - number of arguments
// a1 - constructor?
// sp[0] - last argument
STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0);
STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
STATIC_ASSERT(PACKED_ELEMENTS == 2);
STATIC_ASSERT(HOLEY_ELEMENTS == 3);
STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4);
STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5);
if (mode == DISABLE_ALLOCATION_SITES) {
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
ArraySingleArgumentConstructorStub stub_holey(
masm->isolate(), holey_initial, DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
} else if (mode == DONT_OVERRIDE) {
// is the low bit set? If so, we are holey and that is good.
Label normal_sequence;
__ And(kScratchReg, a3, Operand(1));
__ Branch(&normal_sequence, ne, kScratchReg, Operand(zero_reg));
// We are going to create a holey array, but our kind is non-holey.
// Fix kind and retry (only if we have an allocation site in the slot).
__ Addu(a3, a3, Operand(1));
if (FLAG_debug_code) {
__ lw(t1, FieldMemOperand(a2, 0));
__ LoadRoot(kScratchReg, Heap::kAllocationSiteMapRootIndex);
__ Assert(eq, AbortReason::kExpectedAllocationSite, t1,
Operand(kScratchReg));
}
// Save the resulting elements kind in type info. We can't just store a3
// in the AllocationSite::transition_info field because elements kind is
// restricted to a portion of the field...upper bits need to be left alone.
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ lw(t0, FieldMemOperand(
a2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ Addu(t0, t0, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
__ sw(t0, FieldMemOperand(
a2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ bind(&normal_sequence);
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq, a3, Operand(kind));
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
void GenerateDispatchToArrayStub(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
Label not_zero_case, not_one_case;
__ And(kScratchReg, a0, a0);
__ Branch(&not_zero_case, ne, kScratchReg, Operand(zero_reg));
CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
__ bind(&not_zero_case);
__ Branch(&not_one_case, gt, a0, Operand(1));
CreateArrayDispatchOneArgument(masm, mode);
__ bind(&not_one_case);
__ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
RelocInfo::CODE_TARGET);
}
} // namespace
void Builtins::Generate_ArrayConstructorImpl(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : argc (only if argument_count() is ANY or MORE_THAN_ONE)
// -- a1 : constructor
// -- a2 : AllocationSite or undefined
// -- a3 : Original constructor
// -- sp[0] : last argument
// -----------------------------------
if (FLAG_debug_code) {
// The array construct code is only set for the global and natives
// builtin Array functions which always have maps.
// Initial map for the builtin Array function should be a map.
__ lw(t0, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a nullptr and a Smi.
__ SmiTst(t0, kScratchReg);
__ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction,
kScratchReg, Operand(zero_reg));
__ GetObjectType(t0, t0, t1);
__ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction, t1,
Operand(MAP_TYPE));
// We should either have undefined in a2 or a valid AllocationSite
__ AssertUndefinedOrAllocationSite(a2, t0);
}
// Enter the context of the Array function.
__ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
Label subclassing;
__ Branch(&subclassing, ne, a1, Operand(a3));
Label no_info;
// Get the elements kind and case on that.
__ LoadRoot(kScratchReg, Heap::kUndefinedValueRootIndex);
__ Branch(&no_info, eq, a2, Operand(kScratchReg));
__ lw(a3, FieldMemOperand(
a2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ SmiUntag(a3);
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask));
GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
__ bind(&no_info);
GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
// Subclassing.
__ bind(&subclassing);
__ Lsa(kScratchReg, sp, a0, kPointerSizeLog2);
__ sw(a1, MemOperand(kScratchReg));
__ li(kScratchReg, Operand(3));
__ addu(a0, a0, kScratchReg);
__ Push(a3, a2);
__ JumpToExternalReference(ExternalReference::Create(Runtime::kNewArray));
}
void Builtins::Generate_ArrayNArgumentsConstructor(MacroAssembler* masm) {
__ sll(t9, a0, kPointerSizeLog2);
__ Addu(t9, sp, t9);
......
......@@ -142,8 +142,8 @@ void Builtins::Generate_ArrayConstructor(MacroAssembler* masm) {
// Run the native code for the Array function called as a normal function.
__ bind(&call);
ArrayConstructorStub stub(masm->isolate());
__ TailCallStub(&stub);
__ Jump(BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl),
RelocInfo::CODE_TARGET);
}
static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
......@@ -1162,8 +1162,8 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
// Tail call to the function-specific construct stub (still in the caller
// context at this point).
ArrayConstructorStub array_constructor_stub(masm->isolate());
__ Jump(array_constructor_stub.GetCode(), RelocInfo::CODE_TARGET);
__ Jump(BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl),
RelocInfo::CODE_TARGET);
} else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
// Call the constructor with a0, a1, and a3 unmodified.
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
......@@ -3059,6 +3059,170 @@ void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
__ Ret();
}
namespace {
template <class T>
void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
if (mode == DISABLE_ALLOCATION_SITES) {
T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
T stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq, a3, Operand(kind));
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
void CreateArrayDispatchOneArgument(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
// a2 - allocation site (if mode != DISABLE_ALLOCATION_SITES)
// a3 - kind (if mode != DISABLE_ALLOCATION_SITES)
// a0 - number of arguments
// a1 - constructor?
// sp[0] - last argument
STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0);
STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
STATIC_ASSERT(PACKED_ELEMENTS == 2);
STATIC_ASSERT(HOLEY_ELEMENTS == 3);
STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4);
STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5);
if (mode == DISABLE_ALLOCATION_SITES) {
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
ArraySingleArgumentConstructorStub stub_holey(
masm->isolate(), holey_initial, DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
} else if (mode == DONT_OVERRIDE) {
// is the low bit set? If so, we are holey and that is good.
Label normal_sequence;
__ And(kScratchReg, a3, Operand(1));
__ Branch(&normal_sequence, ne, kScratchReg, Operand(zero_reg));
// We are going to create a holey array, but our kind is non-holey.
// Fix kind and retry (only if we have an allocation site in the slot).
__ Daddu(a3, a3, Operand(1));
if (FLAG_debug_code) {
__ Ld(a5, FieldMemOperand(a2, 0));
__ LoadRoot(kScratchReg, Heap::kAllocationSiteMapRootIndex);
__ Assert(eq, AbortReason::kExpectedAllocationSite, a5,
Operand(kScratchReg));
}
// Save the resulting elements kind in type info. We can't just store a3
// in the AllocationSite::transition_info field because elements kind is
// restricted to a portion of the field...upper bits need to be left alone.
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ Ld(a4, FieldMemOperand(
a2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ Daddu(a4, a4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
__ Sd(a4, FieldMemOperand(
a2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ bind(&normal_sequence);
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq, a3, Operand(kind));
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
void GenerateDispatchToArrayStub(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
Label not_zero_case, not_one_case;
__ And(kScratchReg, a0, a0);
__ Branch(&not_zero_case, ne, kScratchReg, Operand(zero_reg));
CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
__ bind(&not_zero_case);
__ Branch(&not_one_case, gt, a0, Operand(1));
CreateArrayDispatchOneArgument(masm, mode);
__ bind(&not_one_case);
__ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
RelocInfo::CODE_TARGET);
}
} // namespace
void Builtins::Generate_ArrayConstructorImpl(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : argc (only if argument_count() == ANY)
// -- a1 : constructor
// -- a2 : AllocationSite or undefined
// -- a3 : new target
// -- sp[0] : last argument
// -----------------------------------
if (FLAG_debug_code) {
// The array construct code is only set for the global and natives
// builtin Array functions which always have maps.
// Initial map for the builtin Array function should be a map.
__ Ld(a4, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a nullptr and a Smi.
__ SmiTst(a4, kScratchReg);
__ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction,
kScratchReg, Operand(zero_reg));
__ GetObjectType(a4, a4, a5);
__ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction, a5,
Operand(MAP_TYPE));
// We should either have undefined in a2 or a valid AllocationSite
__ AssertUndefinedOrAllocationSite(a2, a4);
}
// Enter the context of the Array function.
__ Ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
Label subclassing;
__ Branch(&subclassing, ne, a1, Operand(a3));
Label no_info;
// Get the elements kind and case on that.
__ LoadRoot(kScratchReg, Heap::kUndefinedValueRootIndex);
__ Branch(&no_info, eq, a2, Operand(kScratchReg));
__ Ld(a3, FieldMemOperand(
a2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ SmiUntag(a3);
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask));
GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
__ bind(&no_info);
GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
// Subclassing.
__ bind(&subclassing);
__ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
__ Sd(a1, MemOperand(kScratchReg));
__ li(kScratchReg, Operand(3));
__ Daddu(a0, a0, kScratchReg);
__ Push(a3, a2);
__ JumpToExternalReference(ExternalReference::Create(Runtime::kNewArray));
}
void Builtins::Generate_ArrayNArgumentsConstructor(MacroAssembler* masm) {
__ dsll(t9, a0, kPointerSizeLog2);
__ Daddu(t9, sp, t9);
......
......@@ -1194,8 +1194,8 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
// context at this point).
__ AssertFunction(rdi);
// Jump to the constructor function (rax, rbx, rdx passed on).
ArrayConstructorStub array_constructor_stub(masm->isolate());
__ Jump(array_constructor_stub.GetCode(), RelocInfo::CODE_TARGET);
Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
__ Jump(code, RelocInfo::CODE_TARGET);
} else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
// Call the constructor (rax, rdx, rdi passed on).
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
......@@ -1932,8 +1932,8 @@ void Builtins::Generate_ArrayConstructor(MacroAssembler* masm) {
// Run the native code for the Array function called as a normal function.
__ bind(&call);
ArrayConstructorStub stub(masm->isolate());
__ TailCallStub(&stub);
Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
__ Jump(code, RelocInfo::CODE_TARGET);
}
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
......@@ -3096,6 +3096,183 @@ void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
__ ret(0);
}
namespace {
template <class T>
void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
if (mode == DISABLE_ALLOCATION_SITES) {
T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
Label next;
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmpl(rdx, Immediate(kind));
__ j(not_equal, &next);
T stub(masm->isolate(), kind);
__ TailCallStub(&stub);
__ bind(&next);
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
void CreateArrayDispatchOneArgument(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
// rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
// rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
// rax - number of arguments
// rdi - constructor?
// rsp[0] - return address
// rsp[8] - last argument
STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0);
STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
STATIC_ASSERT(PACKED_ELEMENTS == 2);
STATIC_ASSERT(HOLEY_ELEMENTS == 3);
STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4);
STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5);
if (mode == DISABLE_ALLOCATION_SITES) {
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
ArraySingleArgumentConstructorStub stub_holey(
masm->isolate(), holey_initial, DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
} else if (mode == DONT_OVERRIDE) {
// is the low bit set? If so, we are holey and that is good.
Label normal_sequence;
__ testb(rdx, Immediate(1));
__ j(not_zero, &normal_sequence);
// We are going to create a holey array, but our kind is non-holey.
// Fix kind and retry (only if we have an allocation site in the slot).
__ incl(rdx);
if (FLAG_debug_code) {
Handle<Map> allocation_site_map =
masm->isolate()->factory()->allocation_site_map();
__ Cmp(FieldOperand(rbx, 0), allocation_site_map);
__ Assert(equal, AbortReason::kExpectedAllocationSite);
}
// Save the resulting elements kind in type info. We can't just store r3
// in the AllocationSite::transition_info field because elements kind is
// restricted to a portion of the field...upper bits need to be left alone.
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ SmiAddConstant(
FieldOperand(rbx, AllocationSite::kTransitionInfoOrBoilerplateOffset),
Smi::FromInt(kFastElementsKindPackedToHoley));
__ bind(&normal_sequence);
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
Label next;
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmpl(rdx, Immediate(kind));
__ j(not_equal, &next);
ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
__ TailCallStub(&stub);
__ bind(&next);
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
void GenerateDispatchToArrayStub(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
Label not_zero_case, not_one_case;
__ testp(rax, rax);
__ j(not_zero, &not_zero_case);
CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
__ bind(&not_zero_case);
__ cmpl(rax, Immediate(1));
__ j(greater, &not_one_case);
CreateArrayDispatchOneArgument(masm, mode);
__ bind(&not_one_case);
Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor);
__ Jump(code, RelocInfo::CODE_TARGET);
}
} // namespace
void Builtins::Generate_ArrayConstructorImpl(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : argc
// -- rbx : AllocationSite or undefined
// -- rdi : constructor
// -- rdx : new target
// -- rsp[0] : return address
// -- rsp[8] : last argument
// -----------------------------------
if (FLAG_debug_code) {
// The array construct code is only set for the global and natives
// builtin Array functions which always have maps.
// Initial map for the builtin Array function should be a map.
__ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a nullptr and a Smi.
STATIC_ASSERT(kSmiTag == 0);
Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
__ Check(not_smi, AbortReason::kUnexpectedInitialMapForArrayFunction);
__ CmpObjectType(rcx, MAP_TYPE, rcx);
__ Check(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
// We should either have undefined in rbx or a valid AllocationSite
__ AssertUndefinedOrAllocationSite(rbx);
}
// Enter the context of the Array function.
__ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Label subclassing;
__ cmpp(rdi, rdx);
__ j(not_equal, &subclassing);
Label no_info;
// If the feedback vector is the undefined value call an array constructor
// that doesn't use AllocationSites.
__ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
__ j(equal, &no_info);
// Only look at the lower 16 bits of the transition info.
__ movp(rdx, FieldOperand(
rbx, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ SmiToInteger32(rdx, rdx);
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
__ bind(&no_info);
GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
// Subclassing
__ bind(&subclassing);
StackArgumentsAccessor args(rsp, rax);
__ movp(args.GetReceiverOperand(), rdi);
__ addp(rax, Immediate(3));
__ PopReturnAddressTo(rcx);
__ Push(rdx);
__ Push(rbx);
__ PushReturnAddressFrom(rcx);
__ JumpToExternalReference(ExternalReference::Create(Runtime::kNewArray));
}
void Builtins::Generate_ArrayNArgumentsConstructor(MacroAssembler* masm) {
__ popq(rcx);
__ movq(MemOperand(rsp, rax, times_8, 0), rdi);
......
......@@ -333,12 +333,6 @@ Callable CodeFactory::InterpreterOnStackReplacement(Isolate* isolate) {
ContextOnlyDescriptor(isolate));
}
// static
Callable CodeFactory::ArrayConstructor(Isolate* isolate) {
ArrayConstructorStub stub(isolate);
return make_callable(stub);
}
// static
Callable CodeFactory::ArrayPop(Isolate* isolate) {
return Callable(BUILTIN_CODE(isolate, ArrayPop), BuiltinDescriptor(isolate));
......
......@@ -85,7 +85,6 @@ class V8_EXPORT_PRIVATE CodeFactory final {
static Callable InterpreterCEntry(Isolate* isolate, int result_size = 1);
static Callable InterpreterOnStackReplacement(Isolate* isolate);
static Callable ArrayConstructor(Isolate* isolate);
static Callable ArrayPop(Isolate* isolate);
static Callable ArrayPush(Isolate* isolate);
static Callable ArrayShift(Isolate* isolate);
......
......@@ -592,9 +592,6 @@ TF_STUB(InternalArraySingleArgumentConstructorStub, ArrayConstructorAssembler) {
stub->elements_kind(), DONT_TRACK_ALLOCATION_SITE);
}
ArrayConstructorStub::ArrayConstructorStub(Isolate* isolate)
: PlatformCodeStub(isolate) {}
InternalArrayConstructorStub::InternalArrayConstructorStub(Isolate* isolate)
: PlatformCodeStub(isolate) {}
......
......@@ -28,7 +28,6 @@ class Node;
// List of code stubs used on all platforms.
#define CODE_STUB_LIST_ALL_PLATFORMS(V) \
/* --- PlatformCodeStubs --- */ \
V(ArrayConstructor) \
V(CallApiCallback) \
V(CallApiGetter) \
V(InternalArrayConstructor) \
......@@ -485,19 +484,6 @@ enum AllocationSiteOverrideMode {
LAST_ALLOCATION_SITE_OVERRIDE_MODE = DISABLE_ALLOCATION_SITES
};
// TODO(jgruber): Convert this stub into a builtin.
class ArrayConstructorStub: public PlatformCodeStub {
public:
explicit ArrayConstructorStub(Isolate* isolate);
private:
void GenerateDispatchToArrayStub(MacroAssembler* masm,
AllocationSiteOverrideMode mode);
DEFINE_CALL_INTERFACE_DESCRIPTOR(ArrayConstructor);
DEFINE_PLATFORM_CODE_STUB(ArrayConstructor, PlatformCodeStub);
};
// TODO(jgruber): Convert this stub into a builtin.
class InternalArrayConstructorStub: public PlatformCodeStub {
public:
......
......@@ -118,7 +118,7 @@ DEFINE_GETTER(AllocateInOldSpaceStubConstant,
HeapConstant(BUILTIN_CODE(isolate(), AllocateInOldSpace)))
DEFINE_GETTER(ArrayConstructorStubConstant,
HeapConstant(ArrayConstructorStub(isolate()).GetCode()))
HeapConstant(BUILTIN_CODE(isolate(), ArrayConstructorImpl)))
DEFINE_GETTER(ToNumberBuiltinConstant,
HeapConstant(BUILTIN_CODE(isolate(), ToNumber)))
......
......@@ -163,105 +163,6 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
__ ret(0);
}
template<class T>
static void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
if (mode == DISABLE_ALLOCATION_SITES) {
T stub(masm->isolate(),
GetInitialFastElementsKind(),
mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
Label next;
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmp(edx, kind);
__ j(not_equal, &next);
T stub(masm->isolate(), kind);
__ TailCallStub(&stub);
__ bind(&next);
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
// ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
// edx - kind (if mode != DISABLE_ALLOCATION_SITES)
// eax - number of arguments
// edi - constructor?
// esp[0] - return address
// esp[4] - last argument
STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0);
STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
STATIC_ASSERT(PACKED_ELEMENTS == 2);
STATIC_ASSERT(HOLEY_ELEMENTS == 3);
STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4);
STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5);
if (mode == DISABLE_ALLOCATION_SITES) {
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
holey_initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
} else if (mode == DONT_OVERRIDE) {
// is the low bit set? If so, we are holey and that is good.
Label normal_sequence;
__ test_b(edx, Immediate(1));
__ j(not_zero, &normal_sequence);
// We are going to create a holey array, but our kind is non-holey.
// Fix kind and retry.
__ inc(edx);
if (FLAG_debug_code) {
Handle<Map> allocation_site_map =
masm->isolate()->factory()->allocation_site_map();
__ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
__ Assert(equal, AbortReason::kExpectedAllocationSite);
}
// Save the resulting elements kind in type info. We can't just store r3
// in the AllocationSite::transition_info field because elements kind is
// restricted to a portion of the field...upper bits need to be left alone.
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ add(
FieldOperand(ebx, AllocationSite::kTransitionInfoOrBoilerplateOffset),
Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
__ bind(&normal_sequence);
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
Label next;
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmp(edx, kind);
__ j(not_equal, &next);
ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
__ TailCallStub(&stub);
__ bind(&next);
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
template<class T>
static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
int to_index =
......@@ -293,85 +194,6 @@ void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
}
}
void ArrayConstructorStub::GenerateDispatchToArrayStub(
MacroAssembler* masm, AllocationSiteOverrideMode mode) {
Label not_zero_case, not_one_case;
__ test(eax, eax);
__ j(not_zero, &not_zero_case);
CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
__ bind(&not_zero_case);
__ cmp(eax, 1);
__ j(greater, &not_one_case);
CreateArrayDispatchOneArgument(masm, mode);
__ bind(&not_one_case);
__ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
RelocInfo::CODE_TARGET);
}
void ArrayConstructorStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : argc (only if argument_count() is ANY or MORE_THAN_ONE)
// -- ebx : AllocationSite or undefined
// -- edi : constructor
// -- edx : Original constructor
// -- esp[0] : return address
// -- esp[4] : last argument
// -----------------------------------
if (FLAG_debug_code) {
// The array construct code is only set for the global and natives
// builtin Array functions which always have maps.
// Initial map for the builtin Array function should be a map.
__ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a nullptr and a Smi.
__ test(ecx, Immediate(kSmiTagMask));
__ Assert(not_zero, AbortReason::kUnexpectedInitialMapForArrayFunction);
__ CmpObjectType(ecx, MAP_TYPE, ecx);
__ Assert(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
// We should either have undefined in ebx or a valid AllocationSite
__ AssertUndefinedOrAllocationSite(ebx);
}
Label subclassing;
// Enter the context of the Array function.
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
__ cmp(edx, edi);
__ j(not_equal, &subclassing);
Label no_info;
// If the feedback vector is the undefined value call an array constructor
// that doesn't use AllocationSites.
__ cmp(ebx, isolate()->factory()->undefined_value());
__ j(equal, &no_info);
// Only look at the lower 16 bits of the transition info.
__ mov(edx,
FieldOperand(ebx, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ SmiUntag(edx);
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
__ bind(&no_info);
GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
// Subclassing.
__ bind(&subclassing);
__ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
__ add(eax, Immediate(3));
__ PopReturnAddressTo(ecx);
__ Push(edx);
__ Push(ebx);
__ PushReturnAddressFrom(ecx);
__ JumpToExternalReference(ExternalReference::Create(Runtime::kNewArray));
}
void InternalArrayConstructorStub::GenerateCase(
MacroAssembler* masm, ElementsKind kind) {
Label not_zero_case, not_one_case;
......
......@@ -693,6 +693,7 @@ class IteratingArrayBuiltinDescriptor : public BuiltinDescriptor {
DECLARE_BUILTIN_DESCRIPTOR(IteratingArrayBuiltinDescriptor)
};
// TODO(jgruber): Replace with generic TFS descriptor.
class ArrayConstructorDescriptor : public CallInterfaceDescriptor {
public:
DEFINE_PARAMETERS(kTarget, kNewTarget, kActualArgumentsCount, kAllocationSite)
......
......@@ -312,96 +312,6 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
__ Ret();
}
template<class T>
static void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
if (mode == DISABLE_ALLOCATION_SITES) {
T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
T stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq, a3, Operand(kind));
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
// a2 - allocation site (if mode != DISABLE_ALLOCATION_SITES)
// a3 - kind (if mode != DISABLE_ALLOCATION_SITES)
// a0 - number of arguments
// a1 - constructor?
// sp[0] - last argument
STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0);
STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
STATIC_ASSERT(PACKED_ELEMENTS == 2);
STATIC_ASSERT(HOLEY_ELEMENTS == 3);
STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4);
STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5);
if (mode == DISABLE_ALLOCATION_SITES) {
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
holey_initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
} else if (mode == DONT_OVERRIDE) {
// is the low bit set? If so, we are holey and that is good.
Label normal_sequence;
__ And(kScratchReg, a3, Operand(1));
__ Branch(&normal_sequence, ne, kScratchReg, Operand(zero_reg));
// We are going to create a holey array, but our kind is non-holey.
// Fix kind and retry (only if we have an allocation site in the slot).
__ Addu(a3, a3, Operand(1));
if (FLAG_debug_code) {
__ lw(t1, FieldMemOperand(a2, 0));
__ LoadRoot(kScratchReg, Heap::kAllocationSiteMapRootIndex);
__ Assert(eq, AbortReason::kExpectedAllocationSite, t1,
Operand(kScratchReg));
}
// Save the resulting elements kind in type info. We can't just store a3
// in the AllocationSite::transition_info field because elements kind is
// restricted to a portion of the field...upper bits need to be left alone.
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ lw(t0, FieldMemOperand(
a2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ Addu(t0, t0, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
__ sw(t0, FieldMemOperand(
a2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ bind(&normal_sequence);
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq, a3, Operand(kind));
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
template<class T>
static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
int to_index =
......@@ -432,84 +342,6 @@ void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
}
}
void ArrayConstructorStub::GenerateDispatchToArrayStub(
MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
Label not_zero_case, not_one_case;
__ And(kScratchReg, a0, a0);
__ Branch(&not_zero_case, ne, kScratchReg, Operand(zero_reg));
CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
__ bind(&not_zero_case);
__ Branch(&not_one_case, gt, a0, Operand(1));
CreateArrayDispatchOneArgument(masm, mode);
__ bind(&not_one_case);
__ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
RelocInfo::CODE_TARGET);
}
void ArrayConstructorStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : argc (only if argument_count() is ANY or MORE_THAN_ONE)
// -- a1 : constructor
// -- a2 : AllocationSite or undefined
// -- a3 : Original constructor
// -- sp[0] : last argument
// -----------------------------------
if (FLAG_debug_code) {
// The array construct code is only set for the global and natives
// builtin Array functions which always have maps.
// Initial map for the builtin Array function should be a map.
__ lw(t0, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a nullptr and a Smi.
__ SmiTst(t0, kScratchReg);
__ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction,
kScratchReg, Operand(zero_reg));
__ GetObjectType(t0, t0, t1);
__ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction, t1,
Operand(MAP_TYPE));
// We should either have undefined in a2 or a valid AllocationSite
__ AssertUndefinedOrAllocationSite(a2, t0);
}
// Enter the context of the Array function.
__ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
Label subclassing;
__ Branch(&subclassing, ne, a1, Operand(a3));
Label no_info;
// Get the elements kind and case on that.
__ LoadRoot(kScratchReg, Heap::kUndefinedValueRootIndex);
__ Branch(&no_info, eq, a2, Operand(kScratchReg));
__ lw(a3, FieldMemOperand(
a2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ SmiUntag(a3);
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask));
GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
__ bind(&no_info);
GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
// Subclassing.
__ bind(&subclassing);
__ Lsa(kScratchReg, sp, a0, kPointerSizeLog2);
__ sw(a1, MemOperand(kScratchReg));
__ li(kScratchReg, Operand(3));
__ addu(a0, a0, kScratchReg);
__ Push(a3, a2);
__ JumpToExternalReference(ExternalReference::Create(Runtime::kNewArray));
}
void InternalArrayConstructorStub::GenerateCase(
MacroAssembler* masm, ElementsKind kind) {
......
......@@ -314,96 +314,6 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
__ Ret();
}
template<class T>
static void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
if (mode == DISABLE_ALLOCATION_SITES) {
T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
T stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq, a3, Operand(kind));
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
// a2 - allocation site (if mode != DISABLE_ALLOCATION_SITES)
// a3 - kind (if mode != DISABLE_ALLOCATION_SITES)
// a0 - number of arguments
// a1 - constructor?
// sp[0] - last argument
STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0);
STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
STATIC_ASSERT(PACKED_ELEMENTS == 2);
STATIC_ASSERT(HOLEY_ELEMENTS == 3);
STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4);
STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5);
if (mode == DISABLE_ALLOCATION_SITES) {
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
holey_initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
} else if (mode == DONT_OVERRIDE) {
// is the low bit set? If so, we are holey and that is good.
Label normal_sequence;
__ And(kScratchReg, a3, Operand(1));
__ Branch(&normal_sequence, ne, kScratchReg, Operand(zero_reg));
// We are going to create a holey array, but our kind is non-holey.
// Fix kind and retry (only if we have an allocation site in the slot).
__ Daddu(a3, a3, Operand(1));
if (FLAG_debug_code) {
__ Ld(a5, FieldMemOperand(a2, 0));
__ LoadRoot(kScratchReg, Heap::kAllocationSiteMapRootIndex);
__ Assert(eq, AbortReason::kExpectedAllocationSite, a5,
Operand(kScratchReg));
}
// Save the resulting elements kind in type info. We can't just store a3
// in the AllocationSite::transition_info field because elements kind is
// restricted to a portion of the field...upper bits need to be left alone.
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ Ld(a4, FieldMemOperand(
a2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ Daddu(a4, a4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
__ Sd(a4, FieldMemOperand(
a2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ bind(&normal_sequence);
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq, a3, Operand(kind));
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
template<class T>
static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
int to_index =
......@@ -435,83 +345,6 @@ void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
}
void ArrayConstructorStub::GenerateDispatchToArrayStub(
MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
Label not_zero_case, not_one_case;
__ And(kScratchReg, a0, a0);
__ Branch(&not_zero_case, ne, kScratchReg, Operand(zero_reg));
CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
__ bind(&not_zero_case);
__ Branch(&not_one_case, gt, a0, Operand(1));
CreateArrayDispatchOneArgument(masm, mode);
__ bind(&not_one_case);
__ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
RelocInfo::CODE_TARGET);
}
void ArrayConstructorStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : argc (only if argument_count() == ANY)
// -- a1 : constructor
// -- a2 : AllocationSite or undefined
// -- a3 : new target
// -- sp[0] : last argument
// -----------------------------------
if (FLAG_debug_code) {
// The array construct code is only set for the global and natives
// builtin Array functions which always have maps.
// Initial map for the builtin Array function should be a map.
__ Ld(a4, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a nullptr and a Smi.
__ SmiTst(a4, kScratchReg);
__ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction,
kScratchReg, Operand(zero_reg));
__ GetObjectType(a4, a4, a5);
__ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction, a5,
Operand(MAP_TYPE));
// We should either have undefined in a2 or a valid AllocationSite
__ AssertUndefinedOrAllocationSite(a2, a4);
}
// Enter the context of the Array function.
__ Ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
Label subclassing;
__ Branch(&subclassing, ne, a1, Operand(a3));
Label no_info;
// Get the elements kind and case on that.
__ LoadRoot(kScratchReg, Heap::kUndefinedValueRootIndex);
__ Branch(&no_info, eq, a2, Operand(kScratchReg));
__ Ld(a3, FieldMemOperand(
a2, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ SmiUntag(a3);
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask));
GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
__ bind(&no_info);
GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
// Subclassing.
__ bind(&subclassing);
__ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
__ Sd(a1, MemOperand(kScratchReg));
__ li(kScratchReg, Operand(3));
__ Daddu(a0, a0, kScratchReg);
__ Push(a3, a2);
__ JumpToExternalReference(ExternalReference::Create(Runtime::kNewArray));
}
void InternalArrayConstructorStub::GenerateCase(
MacroAssembler* masm, ElementsKind kind) {
......
......@@ -230,103 +230,6 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
}
template<class T>
static void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
if (mode == DISABLE_ALLOCATION_SITES) {
T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
Label next;
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmpl(rdx, Immediate(kind));
__ j(not_equal, &next);
T stub(masm->isolate(), kind);
__ TailCallStub(&stub);
__ bind(&next);
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
// rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
// rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
// rax - number of arguments
// rdi - constructor?
// rsp[0] - return address
// rsp[8] - last argument
STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0);
STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1);
STATIC_ASSERT(PACKED_ELEMENTS == 2);
STATIC_ASSERT(HOLEY_ELEMENTS == 3);
STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4);
STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5);
if (mode == DISABLE_ALLOCATION_SITES) {
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
holey_initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
} else if (mode == DONT_OVERRIDE) {
// is the low bit set? If so, we are holey and that is good.
Label normal_sequence;
__ testb(rdx, Immediate(1));
__ j(not_zero, &normal_sequence);
// We are going to create a holey array, but our kind is non-holey.
// Fix kind and retry (only if we have an allocation site in the slot).
__ incl(rdx);
if (FLAG_debug_code) {
Handle<Map> allocation_site_map =
masm->isolate()->factory()->allocation_site_map();
__ Cmp(FieldOperand(rbx, 0), allocation_site_map);
__ Assert(equal, AbortReason::kExpectedAllocationSite);
}
// Save the resulting elements kind in type info. We can't just store r3
// in the AllocationSite::transition_info field because elements kind is
// restricted to a portion of the field...upper bits need to be left alone.
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ SmiAddConstant(
FieldOperand(rbx, AllocationSite::kTransitionInfoOrBoilerplateOffset),
Smi::FromInt(kFastElementsKindPackedToHoley));
__ bind(&normal_sequence);
int last_index =
GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
Label next;
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmpl(rdx, Immediate(kind));
__ j(not_equal, &next);
ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
__ TailCallStub(&stub);
__ bind(&next);
}
// If we reached this point there is a problem.
__ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor);
} else {
UNREACHABLE();
}
}
template<class T>
static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
int to_index =
......@@ -358,86 +261,6 @@ void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
}
}
void ArrayConstructorStub::GenerateDispatchToArrayStub(
MacroAssembler* masm, AllocationSiteOverrideMode mode) {
Label not_zero_case, not_one_case;
__ testp(rax, rax);
__ j(not_zero, &not_zero_case);
CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
__ bind(&not_zero_case);
__ cmpl(rax, Immediate(1));
__ j(greater, &not_one_case);
CreateArrayDispatchOneArgument(masm, mode);
__ bind(&not_one_case);
__ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
RelocInfo::CODE_TARGET);
}
void ArrayConstructorStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : argc
// -- rbx : AllocationSite or undefined
// -- rdi : constructor
// -- rdx : new target
// -- rsp[0] : return address
// -- rsp[8] : last argument
// -----------------------------------
if (FLAG_debug_code) {
// The array construct code is only set for the global and natives
// builtin Array functions which always have maps.
// Initial map for the builtin Array function should be a map.
__ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a nullptr and a Smi.
STATIC_ASSERT(kSmiTag == 0);
Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
__ Check(not_smi, AbortReason::kUnexpectedInitialMapForArrayFunction);
__ CmpObjectType(rcx, MAP_TYPE, rcx);
__ Check(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
// We should either have undefined in rbx or a valid AllocationSite
__ AssertUndefinedOrAllocationSite(rbx);
}
// Enter the context of the Array function.
__ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Label subclassing;
__ cmpp(rdi, rdx);
__ j(not_equal, &subclassing);
Label no_info;
// If the feedback vector is the undefined value call an array constructor
// that doesn't use AllocationSites.
__ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
__ j(equal, &no_info);
// Only look at the lower 16 bits of the transition info.
__ movp(rdx, FieldOperand(
rbx, AllocationSite::kTransitionInfoOrBoilerplateOffset));
__ SmiToInteger32(rdx, rdx);
STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
__ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
__ bind(&no_info);
GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
// Subclassing
__ bind(&subclassing);
StackArgumentsAccessor args(rsp, rax);
__ movp(args.GetReceiverOperand(), rdi);
__ addp(rax, Immediate(3));
__ PopReturnAddressTo(rcx);
__ Push(rdx);
__ Push(rbx);
__ PushReturnAddressFrom(rcx);
__ JumpToExternalReference(ExternalReference::Create(Runtime::kNewArray));
}
void InternalArrayConstructorStub::GenerateCase(
MacroAssembler* masm, ElementsKind kind) {
Label not_zero_case, not_one_case;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment