Commit df2578d2 authored by danno's avatar danno Committed by Commit bot

[stubs] Port builtin for Array.push fast-case from Crankshaft to TF

Improves performance in simple, single element case by 5% and in multiple
elements cases by 2%.

BUG=chromium:608675
LOG=N

Review-Url: https://codereview.chromium.org/2497243002
Cr-Commit-Position: refs/heads/master@{#41368}
parent f8b89839
......@@ -33,11 +33,6 @@ void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kNewArray);
}
void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
descriptor->Initialize(r0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
void FastFunctionBindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
......
......@@ -33,11 +33,6 @@ void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kNewArray);
}
void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
descriptor->Initialize(x0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
void FastFunctionBindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
......
......@@ -150,8 +150,9 @@ MUST_USE_RESULT static Object* CallJsIntrinsic(Isolate* isolate,
isolate,
Execution::Call(isolate, function, args.receiver(), argc, argv.start()));
}
} // namespace
Object* DoArrayPush(Isolate* isolate, BuiltinArguments args) {
BUILTIN(ArrayPush) {
HandleScope scope(isolate);
Handle<Object> receiver = args.receiver();
if (!EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1)) {
......@@ -174,19 +175,163 @@ Object* DoArrayPush(Isolate* isolate, BuiltinArguments args) {
int new_length = accessor->Push(array, &args, to_add);
return Smi::FromInt(new_length);
}
} // namespace
BUILTIN(ArrayPush) { return DoArrayPush(isolate, args); }
// TODO(verwaest): This is a temporary helper until the FastArrayPush stub can
// tailcall to the builtin directly.
RUNTIME_FUNCTION(Runtime_ArrayPush) {
DCHECK_EQ(2, args.length());
Arguments* incoming = reinterpret_cast<Arguments*>(args[0]);
// Rewrap the arguments as builtins arguments.
int argc = incoming->length() + BuiltinArguments::kNumExtraArgsWithReceiver;
BuiltinArguments caller_args(argc, incoming->arguments() + 1);
return DoArrayPush(isolate, caller_args);
void Builtins::Generate_FastArrayPush(compiler::CodeAssemblerState* state) {
typedef compiler::Node Node;
typedef CodeStubAssembler::Label Label;
typedef CodeStubAssembler::Variable Variable;
CodeStubAssembler assembler(state);
Variable arg_index(&assembler, MachineType::PointerRepresentation());
Label default_label(&assembler, &arg_index);
Label smi_transition(&assembler);
Label object_push_pre(&assembler);
Label object_push(&assembler, &arg_index);
Label double_push(&assembler, &arg_index);
Label double_transition(&assembler);
Label runtime(&assembler, Label::kDeferred);
Node* argc = assembler.Parameter(1);
Node* context = assembler.Parameter(2);
Node* new_target = assembler.Parameter(0);
CodeStubArguments args(&assembler, argc);
Node* receiver = args.GetReceiver();
Node* kind = nullptr;
Label fast(&assembler);
{
assembler.BranchIfFastJSArray(
receiver, context, CodeStubAssembler::FastJSArrayAccessMode::ANY_ACCESS,
&fast, &runtime);
}
assembler.Bind(&fast);
{
// Disallow pushing onto prototypes. It might be the JSArray prototype.
// Disallow pushing onto non-extensible objects.
assembler.Comment("Disallow pushing onto prototypes");
Node* map = assembler.LoadMap(receiver);
Node* bit_field2 = assembler.LoadMapBitField2(map);
int mask = static_cast<int>(Map::IsPrototypeMapBits::kMask) |
(1 << Map::kIsExtensible);
Node* test = assembler.Word32And(bit_field2, assembler.Int32Constant(mask));
assembler.GotoIf(
assembler.Word32NotEqual(
test, assembler.Int32Constant(1 << Map::kIsExtensible)),
&runtime);
// Disallow pushing onto arrays in dictionary named property mode. We need
// to figure out whether the length property is still writable.
assembler.Comment(
"Disallow pushing onto arrays in dictionary named property mode");
Node* bit_field3 = assembler.LoadMapBitField3(map);
assembler.GotoIf(assembler.IsSetWord32<Map::DictionaryMap>(bit_field3),
&runtime);
// Check whether the length property is writable. The length property is the
// only default named property on arrays. It's nonconfigurable, hence is
// guaranteed to stay the first property.
Node* descriptors = assembler.LoadMapDescriptors(map);
Node* details = assembler.LoadFixedArrayElement(
descriptors,
assembler.Int32Constant(DescriptorArray::ToDetailsIndex(0)));
mask = READ_ONLY << PropertyDetails::AttributesField::kShift;
Node* mask_node = assembler.SmiConstant(mask);
test = assembler.WordAnd(details, mask_node);
assembler.GotoIf(assembler.WordEqual(test, mask_node), &runtime);
arg_index.Bind(assembler.IntPtrConstant(0));
kind = assembler.DecodeWord32<Map::ElementsKindBits>(bit_field2);
assembler.GotoIf(
assembler.IntPtrGreaterThan(
kind, assembler.IntPtrConstant(FAST_HOLEY_SMI_ELEMENTS)),
&object_push_pre);
Node* new_length = assembler.BuildAppendJSArray(
FAST_SMI_ELEMENTS, context, receiver, args, arg_index, &smi_transition);
args.PopAndReturn(new_length);
}
// If the argument is not a smi, then use a heavyweight SetProperty to
// transition the array for only the single next element. If the argument is
// a smi, the failure is due to some other reason and we should fall back on
// the most generic implementation for the rest of the array.
assembler.Bind(&smi_transition);
{
Node* arg = args.AtIndex(arg_index.value());
assembler.GotoIf(assembler.TaggedIsSmi(arg), &default_label);
Node* length = assembler.LoadJSArrayLength(receiver);
// TODO(danno): Use the KeyedStoreGeneric stub here when possible,
// calling into the runtime to do the elements transition is overkill.
assembler.CallRuntime(Runtime::kSetProperty, context, receiver, length, arg,
assembler.SmiConstant(STRICT));
assembler.Increment(arg_index);
assembler.GotoIfNotNumber(arg, &object_push);
assembler.Goto(&double_push);
}
assembler.Bind(&object_push_pre);
{
assembler.Branch(assembler.IntPtrGreaterThan(
kind, assembler.IntPtrConstant(FAST_HOLEY_ELEMENTS)),
&double_push, &object_push);
}
assembler.Bind(&object_push);
{
Node* new_length = assembler.BuildAppendJSArray(
FAST_ELEMENTS, context, receiver, args, arg_index, &default_label);
args.PopAndReturn(new_length);
}
assembler.Bind(&double_push);
{
Node* new_length =
assembler.BuildAppendJSArray(FAST_DOUBLE_ELEMENTS, context, receiver,
args, arg_index, &double_transition);
args.PopAndReturn(new_length);
}
// If the argument is not a double, then use a heavyweight SetProperty to
// transition the array for only the single next element. If the argument is
// a double, the failure is due to some other reason and we should fall back
// on the most generic implementation for the rest of the array.
assembler.Bind(&double_transition);
{
Node* arg = args.AtIndex(arg_index.value());
assembler.GotoIfNumber(arg, &default_label);
Node* length = assembler.LoadJSArrayLength(receiver);
// TODO(danno): Use the KeyedStoreGeneric stub here when possible,
// calling into the runtime to do the elements transition is overkill.
assembler.CallRuntime(Runtime::kSetProperty, context, receiver, length, arg,
assembler.SmiConstant(STRICT));
assembler.Increment(arg_index);
assembler.Goto(&object_push);
}
// Fallback that stores un-processed arguments using the full, heavyweight
// SetProperty machinery.
assembler.Bind(&default_label);
{
args.ForEach(
[receiver, context, &arg_index](CodeStubAssembler* assembler,
Node* arg) {
Node* length = assembler->LoadJSArrayLength(receiver);
assembler->CallRuntime(Runtime::kSetProperty, context, receiver,
length, arg, assembler->SmiConstant(STRICT));
},
arg_index.value());
args.PopAndReturn(assembler.LoadJSArrayLength(receiver));
}
assembler.Bind(&runtime);
{
Node* target = assembler.LoadFromFrame(
StandardFrameConstants::kFunctionOffset, MachineType::TaggedPointer());
assembler.TailCallStub(CodeFactory::ArrayPush(assembler.isolate()), context,
target, new_target, argc);
}
}
BUILTIN(ArrayPop) {
......@@ -1294,7 +1439,9 @@ void Builtins::Generate_ArrayIncludes(compiler::CodeAssemblerState* state) {
// Take slow path if not a JSArray, if retrieving elements requires
// traversing prototype, or if access checks are required.
assembler.BranchIfFastJSArray(array, context, &init_len, &call_runtime);
assembler.BranchIfFastJSArray(
array, context, CodeStubAssembler::FastJSArrayAccessMode::INBOUNDS_READ,
&init_len, &call_runtime);
assembler.Bind(&init_len);
{
......@@ -1735,7 +1882,9 @@ void Builtins::Generate_ArrayIndexOf(compiler::CodeAssemblerState* state) {
// Take slow path if not a JSArray, if retrieving elements requires
// traversing prototype, or if access checks are required.
assembler.BranchIfFastJSArray(array, context, &init_len, &call_runtime);
assembler.BranchIfFastJSArray(
array, context, CodeStubAssembler::FastJSArrayAccessMode::INBOUNDS_READ,
&init_len, &call_runtime);
assembler.Bind(&init_len);
{
......
......@@ -218,6 +218,7 @@ namespace internal {
TFJ(ArrayIndexOf, 2) \
CPP(ArrayPop) \
CPP(ArrayPush) \
TFJ(FastArrayPush, -1) \
CPP(ArrayShift) \
CPP(ArraySlice) \
CPP(ArraySplice) \
......
......@@ -495,5 +495,10 @@ Callable CodeFactory::InterpreterOnStackReplacement(Isolate* isolate) {
ContextOnlyDescriptor(isolate));
}
// static
Callable CodeFactory::ArrayPush(Isolate* isolate) {
return Callable(isolate->builtins()->ArrayPush(), BuiltinDescriptor(isolate));
}
} // namespace internal
} // namespace v8
......@@ -166,6 +166,8 @@ class V8_EXPORT_PRIVATE CodeFactory final {
static Callable InterpreterPushArgsAndConstructArray(Isolate* isolate);
static Callable InterpreterCEntry(Isolate* isolate, int result_size = 1);
static Callable InterpreterOnStackReplacement(Isolate* isolate);
static Callable ArrayPush(Isolate* isolate);
};
} // namespace internal
......
This diff is collapsed.
......@@ -15,6 +15,7 @@ namespace v8 {
namespace internal {
class CallInterfaceDescriptor;
class CodeStubArguments;
class StatsCounter;
class StubCache;
......@@ -74,6 +75,12 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
return Is64() ? INTPTR_PARAMETERS : SMI_PARAMETERS;
}
MachineRepresentation OptimalParameterRepresentation() const {
return OptimalParameterMode() == INTPTR_PARAMETERS
? MachineType::PointerRepresentation()
: MachineRepresentation::kTaggedSigned;
}
Node* UntagParameter(Node* value, ParameterMode mode) {
if (mode != SMI_PARAMETERS) value = SmiUntag(value);
return value;
......@@ -144,6 +151,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
// Smi | HeapNumber operations.
Node* NumberInc(Node* value);
void GotoIfNotNumber(Node* value, Label* is_not_number);
void GotoIfNumber(Node* value, Label* is_number);
// Allocate an object of the given size.
Node* Allocate(Node* size, AllocationFlags flags = kNone);
......@@ -158,6 +167,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
// Check a value for smi-ness
Node* TaggedIsSmi(Node* a);
Node* TaggedIsNotSmi(Node* a);
// Check that the value is a non-negative smi.
Node* WordIsPositiveSmi(Node* a);
// Check that a word has a word-aligned address.
......@@ -195,7 +205,10 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
void BranchIfJSReceiver(Node* object, Label* if_true, Label* if_false);
void BranchIfJSObject(Node* object, Label* if_true, Label* if_false);
void BranchIfFastJSArray(Node* object, Node* context, Label* if_true,
enum class FastJSArrayAccessMode { INBOUNDS_READ, ANY_ACCESS };
void BranchIfFastJSArray(Node* object, Node* context,
FastJSArrayAccessMode mode, Label* if_true,
Label* if_false);
// Load value from current frame by given offset in bytes.
......@@ -349,6 +362,10 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* object, Node* index, Node* value,
ParameterMode parameter_mode = INTEGER_PARAMETERS);
Node* BuildAppendJSArray(ElementsKind kind, Node* context, Node* array,
CodeStubArguments& args, Variable& arg_index,
Label* bailout);
void StoreFieldsNoWriteBarrier(Node* start_address, Node* end_address,
Node* value);
......@@ -518,6 +535,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* ChangeFloat64ToTagged(Node* value);
Node* ChangeInt32ToTagged(Node* value);
Node* ChangeUint32ToTagged(Node* value);
Node* ChangeNumberToFloat64(Node* value);
// Type conversions.
// Throws a TypeError for {method_name} if {value} is not coercible to Object,
......@@ -663,6 +681,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
void IncrementCounter(StatsCounter* counter, int delta);
void DecrementCounter(StatsCounter* counter, int delta);
void Increment(Variable& variable, int value = 1,
ParameterMode mode = INTPTR_PARAMETERS);
// Generates "if (false) goto label" code. Useful for marking a label as
// "live" to avoid assertion failures during graph building. In the resulting
// code this check will be eliminated.
......@@ -1018,13 +1039,15 @@ class CodeStubArguments {
CodeStubAssembler::ParameterMode mode =
CodeStubAssembler::INTPTR_PARAMETERS);
Node* GetReceiver();
Node* GetReceiver() const;
// |index| is zero-based and does not include the receiver
Node* AtIndex(Node* index, CodeStubAssembler::ParameterMode mode =
CodeStubAssembler::INTPTR_PARAMETERS);
CodeStubAssembler::INTPTR_PARAMETERS) const;
Node* AtIndex(int index) const;
Node* AtIndex(int index);
Node* GetLength() const { return argc_; }
typedef std::function<void(CodeStubAssembler* assembler, Node* arg)>
ForEachBodyFunction;
......
......@@ -375,158 +375,6 @@ HValue* CodeStubGraphBuilderBase::BuildPushElement(HValue* object, HValue* argc,
return new_length;
}
template <>
HValue* CodeStubGraphBuilder<FastArrayPushStub>::BuildCodeStub() {
// TODO(verwaest): Fix deoptimizer messages.
HValue* argc = GetArgumentsLength();
HInstruction* argument_elements = Add<HArgumentsElements>(false, false);
HInstruction* object = Add<HAccessArgumentsAt>(argument_elements, argc,
graph()->GetConstantMinus1());
BuildCheckHeapObject(object);
HValue* map = Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMap());
Add<HCheckInstanceType>(object, HCheckInstanceType::IS_JS_ARRAY);
// Disallow pushing onto prototypes. It might be the JSArray prototype.
// Disallow pushing onto non-extensible objects.
{
HValue* bit_field2 =
Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
HValue* mask =
Add<HConstant>(static_cast<int>(Map::IsPrototypeMapBits::kMask) |
(1 << Map::kIsExtensible));
HValue* bits = AddUncasted<HBitwise>(Token::BIT_AND, bit_field2, mask);
IfBuilder check(this);
check.If<HCompareNumericAndBranch>(
bits, Add<HConstant>(1 << Map::kIsExtensible), Token::NE);
check.ThenDeopt(DeoptimizeReason::kFastPathFailed);
check.End();
}
// Disallow pushing onto arrays in dictionary named property mode. We need to
// figure out whether the length property is still writable.
{
HValue* bit_field3 =
Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField3());
HValue* mask = Add<HConstant>(static_cast<int>(Map::DictionaryMap::kMask));
HValue* bit = AddUncasted<HBitwise>(Token::BIT_AND, bit_field3, mask);
IfBuilder check(this);
check.If<HCompareNumericAndBranch>(bit, mask, Token::EQ);
check.ThenDeopt(DeoptimizeReason::kFastPathFailed);
check.End();
}
// Check whether the length property is writable. The length property is the
// only default named property on arrays. It's nonconfigurable, hence is
// guaranteed to stay the first property.
{
HValue* descriptors =
Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapDescriptors());
HValue* details = Add<HLoadKeyed>(
descriptors, Add<HConstant>(DescriptorArray::ToDetailsIndex(0)),
nullptr, nullptr, FAST_SMI_ELEMENTS);
HValue* mask =
Add<HConstant>(READ_ONLY << PropertyDetails::AttributesField::kShift);
HValue* bit = AddUncasted<HBitwise>(Token::BIT_AND, details, mask);
IfBuilder readonly(this);
readonly.If<HCompareNumericAndBranch>(bit, mask, Token::EQ);
readonly.ThenDeopt(DeoptimizeReason::kFastPathFailed);
readonly.End();
}
HValue* null = Add<HLoadRoot>(Heap::kNullValueRootIndex);
HValue* empty = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
environment()->Push(map);
LoopBuilder check_prototypes(this);
check_prototypes.BeginBody(1);
{
HValue* parent_map = environment()->Pop();
HValue* prototype = Add<HLoadNamedField>(parent_map, nullptr,
HObjectAccess::ForPrototype());
IfBuilder is_null(this);
is_null.If<HCompareObjectEqAndBranch>(prototype, null);
is_null.Then();
check_prototypes.Break();
is_null.End();
HValue* prototype_map =
Add<HLoadNamedField>(prototype, nullptr, HObjectAccess::ForMap());
HValue* instance_type = Add<HLoadNamedField>(
prototype_map, nullptr, HObjectAccess::ForMapInstanceType());
IfBuilder check_instance_type(this);
check_instance_type.If<HCompareNumericAndBranch>(
instance_type, Add<HConstant>(LAST_CUSTOM_ELEMENTS_RECEIVER),
Token::LTE);
check_instance_type.ThenDeopt(DeoptimizeReason::kFastPathFailed);
check_instance_type.End();
HValue* elements = Add<HLoadNamedField>(
prototype, nullptr, HObjectAccess::ForElementsPointer());
IfBuilder no_elements(this);
no_elements.IfNot<HCompareObjectEqAndBranch>(elements, empty);
no_elements.ThenDeopt(DeoptimizeReason::kFastPathFailed);
no_elements.End();
environment()->Push(prototype_map);
}
check_prototypes.EndBody();
HValue* bit_field2 =
Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
HValue* kind = BuildDecodeField<Map::ElementsKindBits>(bit_field2);
// Below we only check the upper bound of the relevant ranges to include both
// holey and non-holey versions. We check them in order smi, object, double
// since smi < object < double.
STATIC_ASSERT(FAST_SMI_ELEMENTS < FAST_HOLEY_SMI_ELEMENTS);
STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS < FAST_HOLEY_ELEMENTS);
STATIC_ASSERT(FAST_ELEMENTS < FAST_HOLEY_ELEMENTS);
STATIC_ASSERT(FAST_HOLEY_ELEMENTS < FAST_HOLEY_DOUBLE_ELEMENTS);
STATIC_ASSERT(FAST_DOUBLE_ELEMENTS < FAST_HOLEY_DOUBLE_ELEMENTS);
IfBuilder has_smi_elements(this);
has_smi_elements.If<HCompareNumericAndBranch>(
kind, Add<HConstant>(FAST_HOLEY_SMI_ELEMENTS), Token::LTE);
has_smi_elements.Then();
{
HValue* new_length = BuildPushElement(object, argc, argument_elements,
FAST_HOLEY_SMI_ELEMENTS);
environment()->Push(new_length);
}
has_smi_elements.Else();
{
IfBuilder has_object_elements(this);
has_object_elements.If<HCompareNumericAndBranch>(
kind, Add<HConstant>(FAST_HOLEY_ELEMENTS), Token::LTE);
has_object_elements.Then();
{
HValue* new_length = BuildPushElement(object, argc, argument_elements,
FAST_HOLEY_ELEMENTS);
environment()->Push(new_length);
}
has_object_elements.Else();
{
IfBuilder has_double_elements(this);
has_double_elements.If<HCompareNumericAndBranch>(
kind, Add<HConstant>(FAST_HOLEY_DOUBLE_ELEMENTS), Token::LTE);
has_double_elements.Then();
{
HValue* new_length = BuildPushElement(object, argc, argument_elements,
FAST_HOLEY_DOUBLE_ELEMENTS);
environment()->Push(new_length);
}
has_double_elements.ElseDeopt(DeoptimizeReason::kFastPathFailed);
has_double_elements.End();
}
has_object_elements.End();
}
has_smi_elements.End();
return environment()->Pop();
}
Handle<Code> FastArrayPushStub::GenerateCode() { return DoGenerateCode(this); }
template <>
HValue* CodeStubGraphBuilder<FastFunctionBindStub>::BuildCodeStub() {
// TODO(verwaest): Fix deoptimizer messages.
......
......@@ -62,7 +62,6 @@ class ObjectLiteral;
/* These builtins w/ JS linkage are */ \
/* just fast-cases of C++ builtins. They */ \
/* require varg support from TF */ \
V(FastArrayPush) \
V(FastFunctionBind) \
/* These will be ported/eliminated */ \
/* as part of the new IC system, ask */ \
......@@ -1011,15 +1010,6 @@ class GrowArrayElementsStub : public TurboFanCodeStub {
DEFINE_TURBOFAN_CODE_STUB(GrowArrayElements, TurboFanCodeStub);
};
class FastArrayPushStub : public HydrogenCodeStub {
public:
explicit FastArrayPushStub(Isolate* isolate) : HydrogenCodeStub(isolate) {}
private:
DEFINE_CALL_INTERFACE_DESCRIPTOR(VarArgFunction);
DEFINE_HYDROGEN_CODE_STUB(FastArrayPush, HydrogenCodeStub);
};
class FastFunctionBindStub : public HydrogenCodeStub {
public:
explicit FastFunctionBindStub(Isolate* isolate) : HydrogenCodeStub(isolate) {}
......
......@@ -34,11 +34,6 @@ void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kNewArray);
}
void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
descriptor->Initialize(eax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
void FastFunctionBindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
......
......@@ -390,15 +390,16 @@ void CallFunctionWithFeedbackAndVectorDescriptor::InitializePlatformIndependent(
void BuiltinDescriptor::InitializePlatformIndependent(
CallInterfaceDescriptorData* data) {
MachineType machine_types[] = {MachineType::AnyTagged(),
MachineType::Int32()};
MachineType machine_types[] = {
MachineType::AnyTagged(), MachineType::AnyTagged(), MachineType::Int32()};
data->InitializePlatformIndependent(arraysize(machine_types), 0,
machine_types);
}
void BuiltinDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {NewTargetRegister(), ArgumentsCountRegister()};
Register registers[] = {TargetRegister(), NewTargetRegister(),
ArgumentsCountRegister()};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
......@@ -409,6 +410,10 @@ const Register BuiltinDescriptor::NewTargetRegister() {
return kJavaScriptCallNewTargetRegister;
}
const Register BuiltinDescriptor::TargetRegister() {
return kJSFunctionRegister;
}
void ArrayNoArgumentConstructorDescriptor::InitializePlatformIndependent(
CallInterfaceDescriptorData* data) {
// kFunction, kAllocationSite, kActualArgumentsCount, kFunctionParameter
......
......@@ -618,6 +618,7 @@ class BuiltinDescriptor : public CallInterfaceDescriptor {
CallInterfaceDescriptor)
static const Register ArgumentsCountRegister();
static const Register NewTargetRegister();
static const Register TargetRegister();
};
class ArrayNoArgumentConstructorDescriptor : public CallInterfaceDescriptor {
......
......@@ -33,11 +33,6 @@ void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kNewArray);
}
void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
descriptor->Initialize(a0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
void FastFunctionBindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
......
......@@ -32,11 +32,6 @@ void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kNewArray);
}
void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
descriptor->Initialize(a0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
void FastFunctionBindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
......
......@@ -65,12 +65,7 @@ RUNTIME_FUNCTION(Runtime_SpecialArrayFunctions) {
isolate->factory()->NewJSObject(isolate->object_function());
InstallBuiltin(isolate, holder, "pop", Builtins::kArrayPop);
if (FLAG_minimal) {
InstallBuiltin(isolate, holder, "push", Builtins::kArrayPush);
} else {
FastArrayPushStub stub(isolate);
InstallCode(isolate, holder, "push", stub.GetCode());
}
InstallBuiltin(isolate, holder, "push", Builtins::kFastArrayPush);
InstallBuiltin(isolate, holder, "shift", Builtins::kArrayShift);
InstallBuiltin(isolate, holder, "unshift", Builtins::kArrayUnshift);
InstallBuiltin(isolate, holder, "slice", Builtins::kArraySlice);
......
......@@ -45,7 +45,6 @@ namespace internal {
F(EstimateNumberOfElements, 1, 1) \
F(GetArrayKeys, 2, 1) \
F(NewArray, -1 /* >= 3 */, 1) \
F(ArrayPush, -1, 1) \
F(FunctionBind, -1, 1) \
F(NormalizeElements, 1, 1) \
F(GrowArrayElements, 2, 1) \
......
......@@ -32,11 +32,6 @@ void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kNewArray);
}
void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
descriptor->Initialize(rax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
void FastFunctionBindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
......
......@@ -1797,5 +1797,136 @@ TEST(IsDebugActive) {
*debug_is_active = false;
}
class AppendJSArrayCodeStubAssembler : public CodeStubAssembler {
public:
AppendJSArrayCodeStubAssembler(compiler::CodeAssemblerState* state,
ElementsKind kind)
: CodeStubAssembler(state), kind_(kind) {}
void TestAppendJSArrayImpl(Isolate* isolate, CodeAssemblerTester* tester,
Object* o1, Object* o2, Object* o3, Object* o4,
int initial_size, int result_size) {
typedef CodeStubAssembler::Variable Variable;
typedef CodeStubAssembler::Label Label;
Handle<JSArray> array = isolate->factory()->NewJSArray(
kind_, 2, initial_size, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
JSObject::SetElement(isolate, array, 0,
Handle<Smi>(Smi::FromInt(1), isolate), SLOPPY)
.Check();
JSObject::SetElement(isolate, array, 1,
Handle<Smi>(Smi::FromInt(2), isolate), SLOPPY)
.Check();
CodeStubArguments args(this, IntPtrConstant(kNumParams));
Variable arg_index(this, MachineType::PointerRepresentation());
Label bailout(this);
arg_index.Bind(IntPtrConstant(0));
Node* length = BuildAppendJSArray(
kind_, HeapConstant(Handle<HeapObject>(isolate->context(), isolate)),
HeapConstant(array), args, arg_index, &bailout);
Return(length);
Bind(&bailout);
Return(SmiTag(IntPtrAdd(arg_index.value(), IntPtrConstant(2))));
Handle<Code> code = tester->GenerateCode();
CHECK(!code.is_null());
FunctionTester ft(code, kNumParams);
Handle<Object> result =
ft.Call(Handle<Object>(o1, isolate), Handle<Object>(o2, isolate),
Handle<Object>(o3, isolate), Handle<Object>(o4, isolate))
.ToHandleChecked();
CHECK_EQ(kind_, array->GetElementsKind());
CHECK_EQ(result_size, Handle<Smi>::cast(result)->value());
CHECK_EQ(result_size, Smi::cast(array->length())->value());
Object* obj = *JSObject::GetElement(isolate, array, 2).ToHandleChecked();
CHECK_EQ(result_size < 3 ? isolate->heap()->undefined_value() : o1, obj);
obj = *JSObject::GetElement(isolate, array, 3).ToHandleChecked();
CHECK_EQ(result_size < 4 ? isolate->heap()->undefined_value() : o2, obj);
obj = *JSObject::GetElement(isolate, array, 4).ToHandleChecked();
CHECK_EQ(result_size < 5 ? isolate->heap()->undefined_value() : o3, obj);
obj = *JSObject::GetElement(isolate, array, 5).ToHandleChecked();
CHECK_EQ(result_size < 6 ? isolate->heap()->undefined_value() : o4, obj);
}
static void TestAppendJSArray(Isolate* isolate, ElementsKind kind, Object* o1,
Object* o2, Object* o3, Object* o4,
int initial_size, int result_size) {
CodeAssemblerTester data(isolate, kNumParams);
AppendJSArrayCodeStubAssembler m(data.state(), kind);
m.TestAppendJSArrayImpl(isolate, &data, o1, o2, o3, o4, initial_size,
result_size);
}
private:
static const int kNumParams = 4;
ElementsKind kind_;
};
TEST(BuildAppendJSArrayFastElement) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4), Smi::FromInt(5),
Smi::FromInt(6), 6, 6);
}
TEST(BuildAppendJSArrayFastElementGrow) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4), Smi::FromInt(5),
Smi::FromInt(6), 2, 6);
}
TEST(BuildAppendJSArrayFastSmiElement) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_SMI_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4),
Smi::FromInt(5), Smi::FromInt(6), 6, 6);
}
TEST(BuildAppendJSArrayFastSmiElementGrow) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_SMI_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4),
Smi::FromInt(5), Smi::FromInt(6), 2, 6);
}
TEST(BuildAppendJSArrayFastSmiElementObject) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_SMI_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4),
isolate->heap()->undefined_value(), Smi::FromInt(6), 6, 4);
}
TEST(BuildAppendJSArrayFastSmiElementObjectGrow) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_SMI_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4),
isolate->heap()->undefined_value(), Smi::FromInt(6), 2, 4);
}
TEST(BuildAppendJSArrayFastDoubleElements) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_DOUBLE_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4),
Smi::FromInt(5), Smi::FromInt(6), 6, 6);
}
TEST(BuildAppendJSArrayFastDoubleElementsGrow) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_DOUBLE_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4),
Smi::FromInt(5), Smi::FromInt(6), 2, 6);
}
TEST(BuildAppendJSArrayFastDoubleElementsObject) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_DOUBLE_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4),
isolate->heap()->undefined_value(), Smi::FromInt(6), 6, 4);
}
} // namespace internal
} // namespace v8
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax
[1].push(1);
(function PushHoleBitPattern() {
function g(src, dst, i) {
dst[i] = src[i];
}
var b = new ArrayBuffer(8);
var i32 = new Int32Array(b);
i32[0] = 0xFFF7FFFF;
i32[1] = 0xFFF7FFFF;
var f64 = new Float64Array(b);
var a = [,2.5];
a.push(f64[0]);
assertTrue(Number.isNaN(a[2]));
})();
......@@ -3,13 +3,16 @@
// found in the LICENSE file.
function __f_17(__v_9) {
var __v_10 = 0;
var count = 100000;
while (count-- != 0) {
var l = __v_9.push(0);
if (++__v_10 >= 2) return __v_9;
__v_10 = {};
}
var __v_10 = 0;
var count = 100000;
while (count-- != 0) {
var l = __v_9.push(0);
if (++__v_10 >= 2) return __v_9;
__v_10 = {};
}
return __v_9;
}
__f_17([]);
let a = __f_17([]);
assertEquals(a[0], 0);
assertEquals(a[10000], 0);
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
function __f_17(__v_9) {
for (var count = 0; count < 20000; ++count) {
if (count < 100) {
__v_9.push(3);
} else if (count < 2500) {
__v_9.push(2.5);
} else {
__v_9.push(true);
}
}
return __v_9;
}
let a = __f_17([]);
assertEquals(a[0], 3);
assertEquals(a[10], 3);
assertEquals(a[2499], 2.5);
assertEquals(a[10000], true);
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
function __f_17(__v_9) {
var __v_10 = 0;
var count = 100000;
while (count-- != 0) {
var l = __v_9.push(0.5);
if (++__v_10 >= 2) return __v_9;
__v_10 = {};
}
return __v_9;
}
let a = __f_17([2.2]);
assertEquals(a[0], 2.2);
assertEquals(a[10000], 0.5);
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment