Commit 198c4769 authored by danno@chromium.org's avatar danno@chromium.org

Unify and simplify the FastCloneShallowArrayStub

- Don't bake in length/capacity into full codegen calls of stubs,
  allowing boilerplates to increase their capacity without regenerating
  code.
- Unify all variants of the clone stub into a single,
  length-independent version.
- Various tweaks to make sure that the clone stub doesn't spill and
  therefore need an eager stack frame.
- Handle all lengths of array literals in the fast case.

R=mvstanton@chromium.org

Review URL: https://codereview.chromium.org/257563004

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@20974 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 9a842209
......@@ -81,6 +81,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
static Register registers[] = { r3, r2, r1 };
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers;
static Representation representations[] = {
Representation::Tagged(),
Representation::Smi(),
Representation::Tagged() };
descriptor->register_param_representations_ = representations;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(
Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
......@@ -224,6 +229,11 @@ static void InitializeArrayConstructorDescriptor(
descriptor->stack_parameter_count_ = r0;
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
......@@ -251,6 +261,10 @@ static void InitializeInternalArrayConstructorDescriptor(
descriptor->stack_parameter_count_ = r0;
descriptor->register_param_count_ = 2;
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
......
......@@ -1821,33 +1821,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
__ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
__ mov(r1, Operand(constant_elements));
if (has_fast_elements && constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
__ CallStub(&stub);
__ IncrementCounter(
isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
} else if (expr->depth() > 1 || Serializer::enabled() ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
if (expr->depth() > 1) {
__ mov(r0, Operand(Smi::FromInt(flags)));
__ Push(r3, r2, r1, r0);
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
if (has_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
}
FastCloneShallowArrayStub stub(isolate(), mode, allocation_site_mode,
length);
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
}
......
......@@ -88,6 +88,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
static Register registers[] = { x3, x2, x1 };
descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]);
descriptor->register_params_ = registers;
static Representation representations[] = {
Representation::Tagged(),
Representation::Smi(),
Representation::Tagged() };
descriptor->register_param_representations_ = representations;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(
Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
......@@ -253,6 +258,11 @@ static void InitializeArrayConstructorDescriptor(
descriptor->register_param_count_ =
sizeof(registers_variable_args) / sizeof(registers_variable_args[0]);
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
......@@ -299,6 +309,10 @@ static void InitializeInternalArrayConstructorDescriptor(
descriptor->register_param_count_ =
sizeof(registers_variable_args) / sizeof(registers_variable_args[0]);
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
......
......@@ -1824,35 +1824,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
__ Mov(x2, Smi::FromInt(expr->literal_index()));
__ Mov(x1, Operand(constant_elements));
if (has_fast_elements && constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
__ CallStub(&stub);
__ IncrementCounter(
isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11);
} else if ((expr->depth() > 1) || Serializer::enabled() ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
if (expr->depth() > 1) {
__ Mov(x0, Smi::FromInt(flags));
__ Push(x3, x2, x1, x0);
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
if (has_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
}
FastCloneShallowArrayStub stub(isolate(),
mode,
allocation_site_mode,
length);
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
}
......
......@@ -150,9 +150,9 @@ bool CodeStubGraphBuilderBase::BuildGraph() {
bool runtime_stack_params = descriptor_->stack_parameter_count_.is_valid();
HInstruction* stack_parameter_count = NULL;
for (int i = 0; i < param_count; ++i) {
Representation r = descriptor_->IsParameterCountRegister(i)
? Representation::Integer32()
: Representation::Tagged();
Representation r = descriptor_->register_param_representations_ == NULL
? Representation::Tagged()
: descriptor_->register_param_representations_[i];
HParameter* param = Add<HParameter>(i, HParameter::REGISTER_PARAMETER, r);
start_environment->Bind(i, param);
parameters_[i] = param;
......@@ -353,8 +353,6 @@ HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
Factory* factory = isolate()->factory();
HValue* undefined = graph()->GetConstantUndefined();
AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
FastCloneShallowArrayStub::Mode mode = casted_stub()->mode();
int length = casted_stub()->length();
HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
GetParameter(1),
......@@ -369,46 +367,40 @@ HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
AllocationSite::kTransitionInfoOffset);
HInstruction* boilerplate = Add<HLoadNamedField>(
allocation_site, static_cast<HValue*>(NULL), access);
HValue* push_value;
if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) {
HValue* elements = AddLoadElements(boilerplate);
IfBuilder if_fixed_cow(this);
if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
if_fixed_cow.Then();
push_value = BuildCloneShallowArray(boilerplate,
allocation_site,
alloc_site_mode,
FAST_ELEMENTS,
0/*copy-on-write*/);
environment()->Push(push_value);
if_fixed_cow.Else();
IfBuilder if_fixed(this);
if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
if_fixed.Then();
push_value = BuildCloneShallowArray(boilerplate,
allocation_site,
alloc_site_mode,
FAST_ELEMENTS,
length);
environment()->Push(push_value);
if_fixed.Else();
push_value = BuildCloneShallowArray(boilerplate,
allocation_site,
alloc_site_mode,
FAST_DOUBLE_ELEMENTS,
length);
environment()->Push(push_value);
} else {
ElementsKind elements_kind = casted_stub()->ComputeElementsKind();
push_value = BuildCloneShallowArray(boilerplate,
allocation_site,
alloc_site_mode,
elements_kind,
length);
environment()->Push(push_value);
}
HValue* elements = AddLoadElements(boilerplate);
HValue* capacity = AddLoadFixedArrayLength(elements);
IfBuilder zero_capacity(this);
zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
Token::EQ);
zero_capacity.Then();
Push(BuildCloneShallowArrayEmpty(boilerplate,
allocation_site,
alloc_site_mode));
zero_capacity.Else();
IfBuilder if_fixed_cow(this);
if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
if_fixed_cow.Then();
Push(BuildCloneShallowArrayCow(boilerplate,
allocation_site,
alloc_site_mode,
FAST_ELEMENTS));
if_fixed_cow.Else();
IfBuilder if_fixed(this);
if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
if_fixed.Then();
Push(BuildCloneShallowArrayNonEmpty(boilerplate,
allocation_site,
alloc_site_mode,
FAST_ELEMENTS));
if_fixed.Else();
Push(BuildCloneShallowArrayNonEmpty(boilerplate,
allocation_site,
alloc_site_mode,
FAST_DOUBLE_ELEMENTS));
if_fixed.End();
if_fixed_cow.End();
zero_capacity.End();
checker.ElseDeopt("Uninitialized boilerplate literals");
checker.End();
......
......@@ -45,6 +45,7 @@ CodeStubInterfaceDescriptor::CodeStubInterfaceDescriptor()
hint_stack_parameter_count_(-1),
function_mode_(NOT_JS_FUNCTION_STUB_MODE),
register_params_(NULL),
register_param_representations_(NULL),
deoptimization_handler_(NULL),
handler_arguments_mode_(DONT_PASS_ARGUMENTS),
miss_handler_(),
......@@ -752,9 +753,7 @@ void FastNewContextStub::InstallDescriptors(Isolate* isolate) {
// static
void FastCloneShallowArrayStub::InstallDescriptors(Isolate* isolate) {
FastCloneShallowArrayStub stub(isolate,
FastCloneShallowArrayStub::CLONE_ELEMENTS,
DONT_TRACK_ALLOCATION_SITE, 0);
FastCloneShallowArrayStub stub(isolate, DONT_TRACK_ALLOCATION_SITE);
InstallDescriptor(isolate, &stub);
}
......
......@@ -300,6 +300,7 @@ struct CodeStubInterfaceDescriptor {
int hint_stack_parameter_count_;
StubFunctionMode function_mode_;
Register* register_params_;
Representation* register_param_representations_;
Address deoptimization_handler_;
HandlerArgumentsMode handler_arguments_mode_;
......@@ -604,50 +605,18 @@ class FastNewContextStub V8_FINAL : public HydrogenCodeStub {
class FastCloneShallowArrayStub : public HydrogenCodeStub {
public:
// Maximum length of copied elements array.
static const int kMaximumClonedLength = 8;
enum Mode {
CLONE_ELEMENTS,
CLONE_DOUBLE_ELEMENTS,
COPY_ON_WRITE_ELEMENTS,
CLONE_ANY_ELEMENTS,
LAST_CLONE_MODE = CLONE_ANY_ELEMENTS
};
static const int kFastCloneModeCount = LAST_CLONE_MODE + 1;
static const int kMaximumInlinedCloneLength = 8;
FastCloneShallowArrayStub(Isolate* isolate,
Mode mode,
AllocationSiteMode allocation_site_mode,
int length)
AllocationSiteMode allocation_site_mode)
: HydrogenCodeStub(isolate),
mode_(mode),
allocation_site_mode_(allocation_site_mode),
length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) {
ASSERT_GE(length_, 0);
ASSERT_LE(length_, kMaximumClonedLength);
}
allocation_site_mode_(allocation_site_mode) {}
Mode mode() const { return mode_; }
int length() const { return length_; }
AllocationSiteMode allocation_site_mode() const {
return allocation_site_mode_;
}
ElementsKind ComputeElementsKind() const {
switch (mode()) {
case CLONE_ELEMENTS:
case COPY_ON_WRITE_ELEMENTS:
return FAST_ELEMENTS;
case CLONE_DOUBLE_ELEMENTS:
return FAST_DOUBLE_ELEMENTS;
case CLONE_ANY_ELEMENTS:
/*fall-through*/;
}
UNREACHABLE();
return LAST_ELEMENTS_KIND;
}
virtual Handle<Code> GenerateCode() V8_OVERRIDE;
virtual Handle<Code> GenerateCode();
virtual void InitializeInterfaceDescriptor(
CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE;
......@@ -655,22 +624,13 @@ class FastCloneShallowArrayStub : public HydrogenCodeStub {
static void InstallDescriptors(Isolate* isolate);
private:
Mode mode_;
AllocationSiteMode allocation_site_mode_;
int length_;
class AllocationSiteModeBits: public BitField<AllocationSiteMode, 0, 1> {};
class ModeBits: public BitField<Mode, 1, 4> {};
class LengthBits: public BitField<int, 5, 4> {};
// Ensure data fits within available bits.
STATIC_ASSERT(LAST_ALLOCATION_SITE_MODE == 1);
STATIC_ASSERT(kFastCloneModeCount < 16);
STATIC_ASSERT(kMaximumClonedLength < 16);
Major MajorKey() { return FastCloneShallowArray; }
int NotMissMinorKey() {
return AllocationSiteModeBits::encode(allocation_site_mode_)
| ModeBits::encode(mode_)
| LengthBits::encode(length_);
return AllocationSiteModeBits::encode(allocation_site_mode_);
}
};
......
This diff is collapsed.
......@@ -1421,7 +1421,8 @@ class HGraphBuilder {
store_map->SkipWriteBarrier();
return store_map;
}
HLoadNamedField* AddLoadElements(HValue* object);
HLoadNamedField* AddLoadElements(HValue* object,
HValue* dependency = NULL);
bool MatchRotateRight(HValue* left,
HValue* right,
......@@ -1437,7 +1438,12 @@ class HGraphBuilder {
Maybe<int> fixed_right_arg,
HAllocationMode allocation_mode);
HLoadNamedField* AddLoadFixedArrayLength(HValue *object);
HLoadNamedField* AddLoadFixedArrayLength(HValue *object,
HValue *dependency = NULL);
HLoadNamedField* AddLoadArrayLength(HValue *object,
ElementsKind kind,
HValue *dependency = NULL);
HValue* AddLoadJSBuiltin(Builtins::JavaScript builtin);
......@@ -1780,18 +1786,33 @@ class HGraphBuilder {
HValue* from,
HValue* to);
void BuildCopyElements(HValue* from_elements,
void BuildCopyElements(HValue* array,
HValue* from_elements,
ElementsKind from_elements_kind,
HValue* to_elements,
ElementsKind to_elements_kind,
HValue* length,
HValue* capacity);
HValue* BuildCloneShallowArray(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind,
int length);
HValue* BuildCloneShallowArrayCommon(HValue* boilerplate,
HValue* allocation_site,
HValue* extra_size,
HValue** return_elements,
AllocationSiteMode mode);
HValue* BuildCloneShallowArrayCow(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind);
HValue* BuildCloneShallowArrayEmpty(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode);
HValue* BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind);
HValue* BuildElementIndexHash(HValue* index);
......
......@@ -86,6 +86,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
static Register registers[] = { eax, ebx, ecx };
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers;
static Representation representations[] = {
Representation::Tagged(),
Representation::Smi(),
Representation::Tagged() };
descriptor->register_param_representations_ = representations;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(
Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
......@@ -217,6 +222,11 @@ static void InitializeArrayConstructorDescriptor(
descriptor->stack_parameter_count_ = eax;
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
......@@ -244,6 +254,10 @@ static void InitializeInternalArrayConstructorDescriptor(
descriptor->stack_parameter_count_ = eax;
descriptor->register_param_count_ = 2;
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
......
......@@ -1758,24 +1758,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
Heap* heap = isolate()->heap();
if (has_constant_fast_elements &&
constant_elements_values->map() == heap->fixed_cow_array_map()) {
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
__ mov(ecx, Immediate(constant_elements));
FastCloneShallowArrayStub stub(
isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
__ CallStub(&stub);
} else if (expr->depth() > 1 || Serializer::enabled() ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
if (expr->depth() > 1) {
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ push(Immediate(Smi::FromInt(expr->literal_index())));
......@@ -1783,25 +1766,11 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ push(Immediate(Smi::FromInt(flags)));
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
if (has_constant_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
}
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
__ mov(ecx, Immediate(constant_elements));
FastCloneShallowArrayStub stub(isolate(),
mode,
allocation_site_mode,
length);
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
}
......
......@@ -1845,7 +1845,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1, a1, a2);
} else if (expr->depth() > 1 || Serializer::enabled() ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
length > FastCloneShallowArrayStub::kMaximumInlinedCloneLength) {
__ li(a0, Operand(Smi::FromInt(flags)));
__ Push(a3, a2, a1, a0);
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
......
......@@ -128,6 +128,7 @@ namespace internal {
SC(call_premonomorphic_stubs, V8.CallPreMonomorphicStubs) \
SC(call_normal_stubs, V8.CallNormalStubs) \
SC(call_megamorphic_stubs, V8.CallMegamorphicStubs) \
SC(inlined_copyied_elements, V8.InlinedCopiedElements) \
SC(arguments_adaptors, V8.ArgumentsAdaptors) \
SC(compilation_cache_hits, V8.CompilationCacheHits) \
SC(compilation_cache_misses, V8.CompilationCacheMisses) \
......
......@@ -82,6 +82,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
static Register registers[] = { rax, rbx, rcx };
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers;
static Representation representations[] = {
Representation::Tagged(),
Representation::Smi(),
Representation::Tagged() };
descriptor->register_param_representations_ = representations;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(
Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
......@@ -211,6 +216,11 @@ static void InitializeArrayConstructorDescriptor(
descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
descriptor->stack_parameter_count_ = rax;
descriptor->register_param_count_ = 3;
static Representation representations[] = {
Representation::Tagged(),
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
descriptor->register_params_ = registers_variable_args;
}
......@@ -239,6 +249,10 @@ static void InitializeInternalArrayConstructorDescriptor(
descriptor->stack_parameter_count_ = rax;
descriptor->register_param_count_ = 2;
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
......
......@@ -1796,24 +1796,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
Heap* heap = isolate()->heap();
if (has_constant_fast_elements &&
constant_elements_values->map() == heap->fixed_cow_array_map()) {
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
__ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Move(rbx, Smi::FromInt(expr->literal_index()));
__ Move(rcx, constant_elements);
FastCloneShallowArrayStub stub(
isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
__ CallStub(&stub);
} else if (expr->depth() > 1 || Serializer::enabled() ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
if (expr->depth() > 1) {
__ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(expr->literal_index()));
......@@ -1821,24 +1804,11 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ Push(Smi::FromInt(flags));
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
if (has_constant_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
}
__ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Move(rbx, Smi::FromInt(expr->literal_index()));
__ Move(rcx, constant_elements);
FastCloneShallowArrayStub stub(isolate(),
mode,
allocation_site_mode, length);
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment