Commit 44bed6a8 authored by Vaclav Brozek's avatar Vaclav Brozek Committed by Commit Bot

TF stubs out of ArrayIndexOf and ArrayInclude builtins

ArrayIndexOf is a monolithic builtin which does some checking and then handles
three groups of fast arrays: those with holey doubles, compacted doubles and
SMIs+objects. TF cannot reuse this efficiently, because calling the TFJ
ArrayIndexOf duplicates some checks and also does not allow passing arguments
through registers. Similarly for ArrayInclude.

This CL splits the three different types of fast array handling into a separate
TF stubs, and makes the parent TFJ as well as TurboFan itself use them where
appropriate.

The TODOs not tackled in this CL inculde:
* passing an empty context to spare a register when possible
* inlining the search loop if there is any performance gain to it

(This is the contiunation of http://crrev.com/2757853002, moved due to Rietveld
deprecation.)

BUG=v8:5985

Change-Id: I00c97b71be4892f8bc7e1ed6d72e02087618a9a6
Reviewed-on: https://chromium-review.googlesource.com/573020
Commit-Queue: Vaclav Brozek <vabr@chromium.org>
Reviewed-by: 's avatarSigurd Schneider <sigurds@chromium.org>
Cr-Commit-Position: refs/heads/master@{#51729}
parent efed5562
...@@ -2988,6 +2988,15 @@ class ArrayIncludesIndexofAssembler : public CodeStubAssembler { ...@@ -2988,6 +2988,15 @@ class ArrayIncludesIndexofAssembler : public CodeStubAssembler {
enum SearchVariant { kIncludes, kIndexOf }; enum SearchVariant { kIncludes, kIndexOf };
void Generate(SearchVariant variant); void Generate(SearchVariant variant);
void GenerateSmiOrObject(SearchVariant variant, Node* context, Node* elements,
Node* search_element, Node* array_length,
Node* from_index);
void GeneratePackedDoubles(SearchVariant variant, Node* elements,
Node* search_element, Node* array_length,
Node* from_index);
void GenerateHoleyDoubles(SearchVariant variant, Node* elements,
Node* search_element, Node* array_length,
Node* from_index);
}; };
void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant) { void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant) {
...@@ -3005,8 +3014,7 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant) { ...@@ -3005,8 +3014,7 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant) {
Node* intptr_zero = IntPtrConstant(0); Node* intptr_zero = IntPtrConstant(0);
Label init_index(this), return_found(this), return_not_found(this), Label init_index(this), return_not_found(this), call_runtime(this);
call_runtime(this);
// Take slow path if not a JSArray, if retrieving elements requires // Take slow path if not a JSArray, if retrieving elements requires
// traversing prototype, or if access checks are required. // traversing prototype, or if access checks are required.
...@@ -3018,7 +3026,8 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant) { ...@@ -3018,7 +3026,8 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant) {
// JSArray length is always a positive Smi for fast arrays. // JSArray length is always a positive Smi for fast arrays.
CSA_ASSERT(this, TaggedIsPositiveSmi(LoadJSArrayLength(array))); CSA_ASSERT(this, TaggedIsPositiveSmi(LoadJSArrayLength(array)));
Node* array_length = SmiUntag(LoadFastJSArrayLength(array)); Node* array_length = LoadFastJSArrayLength(array);
Node* array_length_untagged = SmiUntag(array_length);
{ {
// Initialize fromIndex. // Initialize fromIndex.
...@@ -3046,7 +3055,7 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant) { ...@@ -3046,7 +3055,7 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant) {
GotoIf(IntPtrGreaterThanOrEqual(index_var.value(), intptr_zero), &done); GotoIf(IntPtrGreaterThanOrEqual(index_var.value(), intptr_zero), &done);
// The fromIndex is negative: add it to the array's length. // The fromIndex is negative: add it to the array's length.
index_var.Bind(IntPtrAdd(array_length, index_var.value())); index_var.Bind(IntPtrAdd(array_length_untagged, index_var.value()));
// Clamp negative results at zero. // Clamp negative results at zero.
GotoIf(IntPtrGreaterThanOrEqual(index_var.value(), intptr_zero), &done); GotoIf(IntPtrGreaterThanOrEqual(index_var.value(), intptr_zero), &done);
index_var.Bind(intptr_zero); index_var.Bind(intptr_zero);
...@@ -3056,7 +3065,7 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant) { ...@@ -3056,7 +3065,7 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant) {
} }
// Fail early if startIndex >= array.length. // Fail early if startIndex >= array.length.
GotoIf(IntPtrGreaterThanOrEqual(index_var.value(), array_length), GotoIf(IntPtrGreaterThanOrEqual(index_var.value(), array_length_untagged),
&return_not_found); &return_not_found);
Label if_smiorobjects(this), if_packed_doubles(this), if_holey_doubles(this); Label if_smiorobjects(this), if_packed_doubles(this), if_holey_doubles(this);
...@@ -3077,179 +3086,139 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant) { ...@@ -3077,179 +3086,139 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant) {
BIND(&if_smiorobjects); BIND(&if_smiorobjects);
{ {
VARIABLE(search_num, MachineRepresentation::kFloat64); Callable callable =
Label ident_loop(this, &index_var), heap_num_loop(this, &search_num), (variant == kIncludes)
string_loop(this), bigint_loop(this, &index_var), ? Builtins::CallableFor(isolate(),
undef_loop(this, &index_var), not_smi(this), not_heap_num(this); Builtins::kArrayIncludesSmiOrObject)
: Builtins::CallableFor(isolate(),
GotoIfNot(TaggedIsSmi(search_element), &not_smi); Builtins::kArrayIndexOfSmiOrObject);
search_num.Bind(SmiToFloat64(CAST(search_element))); Node* result = CallStub(callable, context, elements, search_element,
Goto(&heap_num_loop); array_length, SmiTag(index_var.value()));
args.PopAndReturn(result);
}
BIND(&not_smi); BIND(&if_packed_doubles);
if (variant == kIncludes) { {
GotoIf(IsUndefined(search_element), &undef_loop); Callable callable =
} (variant == kIncludes)
Node* map = LoadMap(CAST(search_element)); ? Builtins::CallableFor(isolate(),
GotoIfNot(IsHeapNumberMap(map), &not_heap_num); Builtins::kArrayIncludesPackedDoubles)
search_num.Bind(LoadHeapNumberValue(CAST(search_element))); : Builtins::CallableFor(isolate(),
Goto(&heap_num_loop); Builtins::kArrayIndexOfPackedDoubles);
Node* result = CallStub(callable, context, elements, search_element,
BIND(&not_heap_num); array_length, SmiTag(index_var.value()));
Node* search_type = LoadMapInstanceType(map); args.PopAndReturn(result);
GotoIf(IsStringInstanceType(search_type), &string_loop); }
GotoIf(IsBigIntInstanceType(search_type), &bigint_loop);
Goto(&ident_loop);
BIND(&ident_loop); BIND(&if_holey_doubles);
{ {
GotoIfNot(UintPtrLessThan(index_var.value(), array_length), Callable callable =
&return_not_found); (variant == kIncludes)
Node* element_k = LoadFixedArrayElement(elements, index_var.value()); ? Builtins::CallableFor(isolate(),
GotoIf(WordEqual(element_k, search_element), &return_found); Builtins::kArrayIncludesHoleyDoubles)
: Builtins::CallableFor(isolate(),
Builtins::kArrayIndexOfHoleyDoubles);
Node* result = CallStub(callable, context, elements, search_element,
array_length, SmiTag(index_var.value()));
args.PopAndReturn(result);
}
Increment(&index_var); BIND(&return_not_found);
Goto(&ident_loop); if (variant == kIncludes) {
} args.PopAndReturn(FalseConstant());
} else {
args.PopAndReturn(NumberConstant(-1));
}
if (variant == kIncludes) { BIND(&call_runtime);
BIND(&undef_loop); {
Node* start_from =
args.GetOptionalArgumentValue(kFromIndexArg, UndefinedConstant());
Runtime::FunctionId function = variant == kIncludes
? Runtime::kArrayIncludes_Slow
: Runtime::kArrayIndexOf;
args.PopAndReturn(
CallRuntime(function, context, array, search_element, start_from));
}
}
GotoIfNot(UintPtrLessThan(index_var.value(), array_length), void ArrayIncludesIndexofAssembler::GenerateSmiOrObject(
&return_not_found); SearchVariant variant, Node* context, Node* elements, Node* search_element,
Node* element_k = LoadFixedArrayElement(elements, index_var.value()); Node* array_length, Node* from_index) {
GotoIf(IsUndefined(element_k), &return_found); VARIABLE(index_var, MachineType::PointerRepresentation(),
GotoIf(IsTheHole(element_k), &return_found); SmiUntag(from_index));
VARIABLE(search_num, MachineRepresentation::kFloat64);
Node* array_length_untagged = SmiUntag(array_length);
Increment(&index_var); Label ident_loop(this, &index_var), heap_num_loop(this, &search_num),
Goto(&undef_loop); string_loop(this), bigint_loop(this, &index_var),
} undef_loop(this, &index_var), not_smi(this), not_heap_num(this),
return_found(this), return_not_found(this);
BIND(&heap_num_loop); GotoIfNot(TaggedIsSmi(search_element), &not_smi);
{ search_num.Bind(SmiToFloat64(search_element));
Label nan_loop(this, &index_var), not_nan_loop(this, &index_var); Goto(&heap_num_loop);
Label* nan_handling =
variant == kIncludes ? &nan_loop : &return_not_found;
BranchIfFloat64IsNaN(search_num.value(), nan_handling, &not_nan_loop);
BIND(&not_nan_loop); BIND(&not_smi);
{ if (variant == kIncludes) {
Label continue_loop(this), not_smi(this); GotoIf(IsUndefined(search_element), &undef_loop);
GotoIfNot(UintPtrLessThan(index_var.value(), array_length), }
&return_not_found); Node* map = LoadMap(search_element);
Node* element_k = LoadFixedArrayElement(elements, index_var.value()); GotoIfNot(IsHeapNumberMap(map), &not_heap_num);
GotoIfNot(TaggedIsSmi(element_k), &not_smi); search_num.Bind(LoadHeapNumberValue(search_element));
Branch(Float64Equal(search_num.value(), SmiToFloat64(element_k)), Goto(&heap_num_loop);
&return_found, &continue_loop);
BIND(&not_smi);
GotoIfNot(IsHeapNumber(element_k), &continue_loop);
Branch(Float64Equal(search_num.value(), LoadHeapNumberValue(element_k)),
&return_found, &continue_loop);
BIND(&continue_loop);
Increment(&index_var);
Goto(&not_nan_loop);
}
// Array.p.includes uses SameValueZero comparisons, where NaN == NaN. BIND(&not_heap_num);
if (variant == kIncludes) { Node* search_type = LoadMapInstanceType(map);
BIND(&nan_loop); GotoIf(IsStringInstanceType(search_type), &string_loop);
Label continue_loop(this); GotoIf(IsBigIntInstanceType(search_type), &bigint_loop);
GotoIfNot(UintPtrLessThan(index_var.value(), array_length), Goto(&ident_loop);
&return_not_found);
Node* element_k = LoadFixedArrayElement(elements, index_var.value());
GotoIf(TaggedIsSmi(element_k), &continue_loop);
GotoIfNot(IsHeapNumber(element_k), &continue_loop);
BranchIfFloat64IsNaN(LoadHeapNumberValue(element_k), &return_found,
&continue_loop);
BIND(&continue_loop);
Increment(&index_var);
Goto(&nan_loop);
}
}
BIND(&string_loop); BIND(&ident_loop);
{ {
TNode<String> search_element_string = CAST(search_element); GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
Label continue_loop(this), next_iteration(this, &index_var), &return_not_found);
slow_compare(this), runtime(this, Label::kDeferred); Node* element_k = LoadFixedArrayElement(elements, index_var.value());
TNode<IntPtrT> search_length = GotoIf(WordEqual(element_k, search_element), &return_found);
LoadStringLengthAsWord(search_element_string);
Goto(&next_iteration);
BIND(&next_iteration);
GotoIfNot(UintPtrLessThan(index_var.value(), array_length),
&return_not_found);
Node* element_k = LoadFixedArrayElement(elements, index_var.value());
GotoIf(TaggedIsSmi(element_k), &continue_loop);
GotoIf(WordEqual(search_element_string, element_k), &return_found);
Node* element_k_type = LoadInstanceType(element_k);
GotoIfNot(IsStringInstanceType(element_k_type), &continue_loop);
Branch(WordEqual(search_length, LoadStringLengthAsWord(element_k)),
&slow_compare, &continue_loop);
BIND(&slow_compare);
StringBuiltinsAssembler string_asm(state());
string_asm.StringEqual_Core(context, search_element_string, search_type,
element_k, element_k_type, search_length,
&return_found, &continue_loop, &runtime);
BIND(&runtime);
TNode<Object> result = CallRuntime(Runtime::kStringEqual, context,
search_element_string, element_k);
Branch(WordEqual(result, TrueConstant()), &return_found, &continue_loop);
BIND(&continue_loop); Increment(&index_var);
Increment(&index_var); Goto(&ident_loop);
Goto(&next_iteration); }
}
BIND(&bigint_loop); if (variant == kIncludes) {
{ BIND(&undef_loop);
GotoIfNot(UintPtrLessThan(index_var.value(), array_length),
&return_not_found);
Node* element_k = LoadFixedArrayElement(elements, index_var.value()); GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
Label continue_loop(this); &return_not_found);
GotoIf(TaggedIsSmi(element_k), &continue_loop); Node* element_k = LoadFixedArrayElement(elements, index_var.value());
GotoIfNot(IsBigInt(element_k), &continue_loop); GotoIf(IsUndefined(element_k), &return_found);
TNode<Object> result = CallRuntime(Runtime::kBigIntEqualToBigInt, context, GotoIf(IsTheHole(element_k), &return_found);
search_element, element_k);
Branch(WordEqual(result, TrueConstant()), &return_found, &continue_loop);
BIND(&continue_loop); Increment(&index_var);
Increment(&index_var); Goto(&undef_loop);
Goto(&bigint_loop);
}
} }
BIND(&if_packed_doubles); BIND(&heap_num_loop);
{ {
Label nan_loop(this, &index_var), not_nan_loop(this, &index_var), Label nan_loop(this, &index_var), not_nan_loop(this, &index_var);
hole_loop(this, &index_var), search_notnan(this);
VARIABLE(search_num, MachineRepresentation::kFloat64);
GotoIfNot(TaggedIsSmi(search_element), &search_notnan);
search_num.Bind(SmiToFloat64(CAST(search_element)));
Goto(&not_nan_loop);
BIND(&search_notnan);
GotoIfNot(IsHeapNumber(search_element), &return_not_found);
search_num.Bind(LoadHeapNumberValue(CAST(search_element)));
Label* nan_handling = variant == kIncludes ? &nan_loop : &return_not_found; Label* nan_handling = variant == kIncludes ? &nan_loop : &return_not_found;
BranchIfFloat64IsNaN(search_num.value(), nan_handling, &not_nan_loop); BranchIfFloat64IsNaN(search_num.value(), nan_handling, &not_nan_loop);
BIND(&not_nan_loop); BIND(&not_nan_loop);
{ {
Label continue_loop(this); Label continue_loop(this), not_smi(this);
GotoIfNot(UintPtrLessThan(index_var.value(), array_length), GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
&return_not_found); &return_not_found);
Node* element_k = LoadFixedDoubleArrayElement(elements, index_var.value(), Node* element_k = LoadFixedArrayElement(elements, index_var.value());
MachineType::Float64()); GotoIfNot(TaggedIsSmi(element_k), &not_smi);
Branch(Float64Equal(element_k, search_num.value()), &return_found, Branch(Float64Equal(search_num.value(), SmiToFloat64(element_k)),
&continue_loop); &return_found, &continue_loop);
BIND(&not_smi);
GotoIfNot(IsHeapNumber(element_k), &continue_loop);
Branch(Float64Equal(search_num.value(), LoadHeapNumberValue(element_k)),
&return_found, &continue_loop);
BIND(&continue_loop); BIND(&continue_loop);
Increment(&index_var); Increment(&index_var);
Goto(&not_nan_loop); Goto(&not_nan_loop);
...@@ -3259,112 +3228,249 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant) { ...@@ -3259,112 +3228,249 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant) {
if (variant == kIncludes) { if (variant == kIncludes) {
BIND(&nan_loop); BIND(&nan_loop);
Label continue_loop(this); Label continue_loop(this);
GotoIfNot(UintPtrLessThan(index_var.value(), array_length), GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
&return_not_found); &return_not_found);
Node* element_k = LoadFixedDoubleArrayElement(elements, index_var.value(), Node* element_k = LoadFixedArrayElement(elements, index_var.value());
MachineType::Float64()); GotoIf(TaggedIsSmi(element_k), &continue_loop);
BranchIfFloat64IsNaN(element_k, &return_found, &continue_loop); GotoIfNot(IsHeapNumber(element_k), &continue_loop);
BranchIfFloat64IsNaN(LoadHeapNumberValue(element_k), &return_found,
&continue_loop);
BIND(&continue_loop); BIND(&continue_loop);
Increment(&index_var); Increment(&index_var);
Goto(&nan_loop); Goto(&nan_loop);
} }
} }
BIND(&if_holey_doubles); BIND(&string_loop);
{ {
Label nan_loop(this, &index_var), not_nan_loop(this, &index_var), TNode<String> search_element_string = CAST(search_element);
hole_loop(this, &index_var), search_notnan(this); Label continue_loop(this), next_iteration(this, &index_var),
VARIABLE(search_num, MachineRepresentation::kFloat64); slow_compare(this), runtime(this, Label::kDeferred);
TNode<IntPtrT> search_length =
LoadStringLengthAsWord(search_element_string);
Goto(&next_iteration);
BIND(&next_iteration);
GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
&return_not_found);
Node* element_k = LoadFixedArrayElement(elements, index_var.value());
GotoIf(TaggedIsSmi(element_k), &continue_loop);
GotoIf(WordEqual(search_element_string, element_k), &return_found);
Node* element_k_type = LoadInstanceType(element_k);
GotoIfNot(IsStringInstanceType(element_k_type), &continue_loop);
Branch(WordEqual(search_length, LoadStringLengthAsWord(element_k)),
&slow_compare, &continue_loop);
BIND(&slow_compare);
StringBuiltinsAssembler string_asm(state());
string_asm.StringEqual_Core(context, search_element_string, search_type,
element_k, element_k_type, search_length,
&return_found, &continue_loop, &runtime);
BIND(&runtime);
TNode<Object> result = CallRuntime(Runtime::kStringEqual, context,
search_element_string, element_k);
Branch(WordEqual(result, TrueConstant()), &return_found, &continue_loop);
GotoIfNot(TaggedIsSmi(search_element), &search_notnan); BIND(&continue_loop);
search_num.Bind(SmiToFloat64(CAST(search_element))); Increment(&index_var);
Goto(&not_nan_loop); Goto(&next_iteration);
}
BIND(&search_notnan); BIND(&bigint_loop);
if (variant == kIncludes) { {
GotoIf(IsUndefined(search_element), &hole_loop); GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
} &return_not_found);
GotoIfNot(IsHeapNumber(search_element), &return_not_found);
search_num.Bind(LoadHeapNumberValue(CAST(search_element))); Node* element_k = LoadFixedArrayElement(elements, index_var.value());
Label continue_loop(this);
GotoIf(TaggedIsSmi(element_k), &continue_loop);
GotoIfNot(IsBigInt(element_k), &continue_loop);
TNode<Object> result = CallRuntime(Runtime::kBigIntEqualToBigInt, context,
search_element, element_k);
Branch(WordEqual(result, TrueConstant()), &return_found, &continue_loop);
Label* nan_handling = variant == kIncludes ? &nan_loop : &return_not_found; BIND(&continue_loop);
BranchIfFloat64IsNaN(search_num.value(), nan_handling, &not_nan_loop); Increment(&index_var);
Goto(&bigint_loop);
}
BIND(&return_found);
if (variant == kIncludes) {
Return(TrueConstant());
} else {
Return(SmiTag(index_var.value()));
}
BIND(&not_nan_loop); BIND(&return_not_found);
{ if (variant == kIncludes) {
Label continue_loop(this); Return(FalseConstant());
GotoIfNot(UintPtrLessThan(index_var.value(), array_length), } else {
&return_not_found); Return(NumberConstant(-1));
}
}
// No need for hole checking here; the following Float64Equal will void ArrayIncludesIndexofAssembler::GeneratePackedDoubles(SearchVariant variant,
// return 'not equal' for holes anyway. Node* elements,
Node* element_k = LoadFixedDoubleArrayElement(elements, index_var.value(), Node* search_element,
MachineType::Float64()); Node* array_length,
Node* from_index) {
VARIABLE(index_var, MachineType::PointerRepresentation(),
SmiUntag(from_index));
Node* array_length_untagged = SmiUntag(array_length);
Branch(Float64Equal(element_k, search_num.value()), &return_found, Label nan_loop(this, &index_var), not_nan_loop(this, &index_var),
&continue_loop); hole_loop(this, &index_var), search_notnan(this), return_found(this),
BIND(&continue_loop); return_not_found(this);
Increment(&index_var); VARIABLE(search_num, MachineRepresentation::kFloat64);
Goto(&not_nan_loop); search_num.Bind(Float64Constant(0));
}
// Array.p.includes uses SameValueZero comparisons, where NaN == NaN. GotoIfNot(TaggedIsSmi(search_element), &search_notnan);
if (variant == kIncludes) { search_num.Bind(SmiToFloat64(search_element));
BIND(&nan_loop); Goto(&not_nan_loop);
Label continue_loop(this);
GotoIfNot(UintPtrLessThan(index_var.value(), array_length),
&return_not_found);
// Load double value or continue if it's the hole NaN. BIND(&search_notnan);
Node* element_k = LoadFixedDoubleArrayElement( GotoIfNot(IsHeapNumber(search_element), &return_not_found);
elements, index_var.value(), MachineType::Float64(), 0,
INTPTR_PARAMETERS, &continue_loop);
BranchIfFloat64IsNaN(element_k, &return_found, &continue_loop); search_num.Bind(LoadHeapNumberValue(search_element));
BIND(&continue_loop);
Increment(&index_var);
Goto(&nan_loop);
}
// Array.p.includes treats the hole as undefined. Label* nan_handling = variant == kIncludes ? &nan_loop : &return_not_found;
if (variant == kIncludes) { BranchIfFloat64IsNaN(search_num.value(), nan_handling, &not_nan_loop);
BIND(&hole_loop);
GotoIfNot(UintPtrLessThan(index_var.value(), array_length),
&return_not_found);
// Check if the element is a double hole, but don't load it. BIND(&not_nan_loop);
LoadFixedDoubleArrayElement(elements, index_var.value(), {
MachineType::None(), 0, INTPTR_PARAMETERS, Label continue_loop(this);
&return_found); GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
&return_not_found);
Node* element_k = LoadFixedDoubleArrayElement(elements, index_var.value(),
MachineType::Float64());
Branch(Float64Equal(element_k, search_num.value()), &return_found,
&continue_loop);
BIND(&continue_loop);
Increment(&index_var);
Goto(&not_nan_loop);
}
Increment(&index_var); // Array.p.includes uses SameValueZero comparisons, where NaN == NaN.
Goto(&hole_loop); if (variant == kIncludes) {
} BIND(&nan_loop);
Label continue_loop(this);
GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
&return_not_found);
Node* element_k = LoadFixedDoubleArrayElement(elements, index_var.value(),
MachineType::Float64());
BranchIfFloat64IsNaN(element_k, &return_found, &continue_loop);
BIND(&continue_loop);
Increment(&index_var);
Goto(&nan_loop);
} }
BIND(&return_found); BIND(&return_found);
if (variant == kIncludes) { if (variant == kIncludes) {
args.PopAndReturn(TrueConstant()); Return(TrueConstant());
} else { } else {
args.PopAndReturn(SmiTag(index_var.value())); Return(SmiTag(index_var.value()));
} }
BIND(&return_not_found); BIND(&return_not_found);
if (variant == kIncludes) { if (variant == kIncludes) {
args.PopAndReturn(FalseConstant()); Return(FalseConstant());
} else { } else {
args.PopAndReturn(NumberConstant(-1)); Return(NumberConstant(-1));
} }
}
BIND(&call_runtime); void ArrayIncludesIndexofAssembler::GenerateHoleyDoubles(SearchVariant variant,
Node* elements,
Node* search_element,
Node* array_length,
Node* from_index) {
VARIABLE(index_var, MachineType::PointerRepresentation(),
SmiUntag(from_index));
Node* array_length_untagged = SmiUntag(array_length);
Label nan_loop(this, &index_var), not_nan_loop(this, &index_var),
hole_loop(this, &index_var), search_notnan(this), return_found(this),
return_not_found(this);
VARIABLE(search_num, MachineRepresentation::kFloat64);
search_num.Bind(Float64Constant(0));
GotoIfNot(TaggedIsSmi(search_element), &search_notnan);
search_num.Bind(SmiToFloat64(search_element));
Goto(&not_nan_loop);
BIND(&search_notnan);
if (variant == kIncludes) {
GotoIf(IsUndefined(search_element), &hole_loop);
}
GotoIfNot(IsHeapNumber(search_element), &return_not_found);
search_num.Bind(LoadHeapNumberValue(search_element));
Label* nan_handling = variant == kIncludes ? &nan_loop : &return_not_found;
BranchIfFloat64IsNaN(search_num.value(), nan_handling, &not_nan_loop);
BIND(&not_nan_loop);
{ {
Node* start_from = args.GetOptionalArgumentValue(kFromIndexArg); Label continue_loop(this);
Runtime::FunctionId function = variant == kIncludes GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
? Runtime::kArrayIncludes_Slow &return_not_found);
: Runtime::kArrayIndexOf;
args.PopAndReturn( // No need for hole checking here; the following Float64Equal will
CallRuntime(function, context, array, search_element, start_from)); // return 'not equal' for holes anyway.
Node* element_k = LoadFixedDoubleArrayElement(elements, index_var.value(),
MachineType::Float64());
Branch(Float64Equal(element_k, search_num.value()), &return_found,
&continue_loop);
BIND(&continue_loop);
Increment(&index_var);
Goto(&not_nan_loop);
}
// Array.p.includes uses SameValueZero comparisons, where NaN == NaN.
if (variant == kIncludes) {
BIND(&nan_loop);
Label continue_loop(this);
GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
&return_not_found);
// Load double value or continue if it's the hole NaN.
Node* element_k = LoadFixedDoubleArrayElement(
elements, index_var.value(), MachineType::Float64(), 0,
INTPTR_PARAMETERS, &continue_loop);
BranchIfFloat64IsNaN(element_k, &return_found, &continue_loop);
BIND(&continue_loop);
Increment(&index_var);
Goto(&nan_loop);
}
// Array.p.includes treats the hole as undefined.
if (variant == kIncludes) {
BIND(&hole_loop);
GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
&return_not_found);
// Check if the element is a double hole, but don't load it.
LoadFixedDoubleArrayElement(elements, index_var.value(),
MachineType::None(), 0, INTPTR_PARAMETERS,
&return_found);
Increment(&index_var);
Goto(&hole_loop);
}
BIND(&return_found);
if (variant == kIncludes) {
Return(TrueConstant());
} else {
Return(SmiTag(index_var.value()));
}
BIND(&return_not_found);
if (variant == kIncludes) {
Return(FalseConstant());
} else {
Return(NumberConstant(-1));
} }
} }
...@@ -3372,8 +3478,70 @@ TF_BUILTIN(ArrayIncludes, ArrayIncludesIndexofAssembler) { ...@@ -3372,8 +3478,70 @@ TF_BUILTIN(ArrayIncludes, ArrayIncludesIndexofAssembler) {
Generate(kIncludes); Generate(kIncludes);
} }
TF_BUILTIN(ArrayIncludesSmiOrObject, ArrayIncludesIndexofAssembler) {
Node* context = Parameter(Descriptor::kContext);
Node* elements = Parameter(Descriptor::kElements);
Node* search_element = Parameter(Descriptor::kSearchElement);
Node* array_length = Parameter(Descriptor::kLength);
Node* from_index = Parameter(Descriptor::kFromIndex);
GenerateSmiOrObject(kIncludes, context, elements, search_element,
array_length, from_index);
}
TF_BUILTIN(ArrayIncludesPackedDoubles, ArrayIncludesIndexofAssembler) {
Node* elements = Parameter(Descriptor::kElements);
Node* search_element = Parameter(Descriptor::kSearchElement);
Node* array_length = Parameter(Descriptor::kLength);
Node* from_index = Parameter(Descriptor::kFromIndex);
GeneratePackedDoubles(kIncludes, elements, search_element, array_length,
from_index);
}
TF_BUILTIN(ArrayIncludesHoleyDoubles, ArrayIncludesIndexofAssembler) {
Node* elements = Parameter(Descriptor::kElements);
Node* search_element = Parameter(Descriptor::kSearchElement);
Node* array_length = Parameter(Descriptor::kLength);
Node* from_index = Parameter(Descriptor::kFromIndex);
GenerateHoleyDoubles(kIncludes, elements, search_element, array_length,
from_index);
}
TF_BUILTIN(ArrayIndexOf, ArrayIncludesIndexofAssembler) { Generate(kIndexOf); } TF_BUILTIN(ArrayIndexOf, ArrayIncludesIndexofAssembler) { Generate(kIndexOf); }
TF_BUILTIN(ArrayIndexOfSmiOrObject, ArrayIncludesIndexofAssembler) {
Node* context = Parameter(Descriptor::kContext);
Node* elements = Parameter(Descriptor::kElements);
Node* search_element = Parameter(Descriptor::kSearchElement);
Node* array_length = Parameter(Descriptor::kLength);
Node* from_index = Parameter(Descriptor::kFromIndex);
GenerateSmiOrObject(kIndexOf, context, elements, search_element, array_length,
from_index);
}
TF_BUILTIN(ArrayIndexOfPackedDoubles, ArrayIncludesIndexofAssembler) {
Node* elements = Parameter(Descriptor::kElements);
Node* search_element = Parameter(Descriptor::kSearchElement);
Node* array_length = Parameter(Descriptor::kLength);
Node* from_index = Parameter(Descriptor::kFromIndex);
GeneratePackedDoubles(kIndexOf, elements, search_element, array_length,
from_index);
}
TF_BUILTIN(ArrayIndexOfHoleyDoubles, ArrayIncludesIndexofAssembler) {
Node* elements = Parameter(Descriptor::kElements);
Node* search_element = Parameter(Descriptor::kSearchElement);
Node* array_length = Parameter(Descriptor::kLength);
Node* from_index = Parameter(Descriptor::kFromIndex);
GenerateHoleyDoubles(kIndexOf, elements, search_element, array_length,
from_index);
}
// ES #sec-array.prototype.values // ES #sec-array.prototype.values
TF_BUILTIN(ArrayPrototypeValues, CodeStubAssembler) { TF_BUILTIN(ArrayPrototypeValues, CodeStubAssembler) {
TNode<Context> context = CAST(Parameter(Descriptor::kContext)); TNode<Context> context = CAST(Parameter(Descriptor::kContext));
......
...@@ -247,8 +247,19 @@ namespace internal { ...@@ -247,8 +247,19 @@ namespace internal {
/* ES6 #sec-array.of */ \ /* ES6 #sec-array.of */ \
TFJ(ArrayOf, SharedFunctionInfo::kDontAdaptArgumentsSentinel) \ TFJ(ArrayOf, SharedFunctionInfo::kDontAdaptArgumentsSentinel) \
/* ES7 #sec-array.prototype.includes */ \ /* ES7 #sec-array.prototype.includes */ \
TFS(ArrayIncludesSmiOrObject, kElements, kSearchElement, kLength, \
kFromIndex) \
TFS(ArrayIncludesPackedDoubles, kElements, kSearchElement, kLength, \
kFromIndex) \
TFS(ArrayIncludesHoleyDoubles, kElements, kSearchElement, kLength, \
kFromIndex) \
TFJ(ArrayIncludes, SharedFunctionInfo::kDontAdaptArgumentsSentinel) \ TFJ(ArrayIncludes, SharedFunctionInfo::kDontAdaptArgumentsSentinel) \
/* ES6 #sec-array.prototype.indexof */ \ /* ES6 #sec-array.prototype.indexof */ \
TFS(ArrayIndexOfSmiOrObject, kElements, kSearchElement, kLength, kFromIndex) \
TFS(ArrayIndexOfPackedDoubles, kElements, kSearchElement, kLength, \
kFromIndex) \
TFS(ArrayIndexOfHoleyDoubles, kElements, kSearchElement, kLength, \
kFromIndex) \
TFJ(ArrayIndexOf, SharedFunctionInfo::kDontAdaptArgumentsSentinel) \ TFJ(ArrayIndexOf, SharedFunctionInfo::kDontAdaptArgumentsSentinel) \
/* ES6 #sec-array.prototype.pop */ \ /* ES6 #sec-array.prototype.pop */ \
CPP(ArrayPop) \ CPP(ArrayPop) \
......
...@@ -2439,6 +2439,109 @@ Reduction JSCallReducer::ReduceArrayEvery(Node* node, ...@@ -2439,6 +2439,109 @@ Reduction JSCallReducer::ReduceArrayEvery(Node* node,
return Replace(return_value); return Replace(return_value);
} }
namespace {
// Returns the correct Callable for Array's indexOf based on the receiver's
// |elements_kind| and |isolate|. Assumes that |elements_kind| is a fast one.
Callable GetCallableForArrayIndexOf(ElementsKind elements_kind,
Isolate* isolate) {
switch (elements_kind) {
case PACKED_SMI_ELEMENTS:
case HOLEY_SMI_ELEMENTS:
case PACKED_ELEMENTS:
case HOLEY_ELEMENTS:
return Builtins::CallableFor(isolate, Builtins::kArrayIndexOfSmiOrObject);
case PACKED_DOUBLE_ELEMENTS:
return Builtins::CallableFor(isolate,
Builtins::kArrayIndexOfPackedDoubles);
default:
DCHECK_EQ(HOLEY_DOUBLE_ELEMENTS, elements_kind);
return Builtins::CallableFor(isolate,
Builtins::kArrayIndexOfHoleyDoubles);
}
}
// Returns the correct Callable for Array's includes based on the receiver's
// |elements_kind| and |isolate|. Assumes that |elements_kind| is a fast one.
Callable GetCallableForArrayIncludes(ElementsKind elements_kind,
Isolate* isolate) {
switch (elements_kind) {
case PACKED_SMI_ELEMENTS:
case HOLEY_SMI_ELEMENTS:
case PACKED_ELEMENTS:
case HOLEY_ELEMENTS:
return Builtins::CallableFor(isolate,
Builtins::kArrayIncludesSmiOrObject);
case PACKED_DOUBLE_ELEMENTS:
return Builtins::CallableFor(isolate,
Builtins::kArrayIncludesPackedDoubles);
default:
DCHECK_EQ(HOLEY_DOUBLE_ELEMENTS, elements_kind);
return Builtins::CallableFor(isolate,
Builtins::kArrayIncludesHoleyDoubles);
}
}
} // namespace
// For search_variant == kIndexOf:
// ES6 Array.prototype.indexOf(searchElement[, fromIndex])
// #sec-array.prototype.indexof
// For search_variant == kIncludes:
// ES7 Array.prototype.inludes(searchElement[, fromIndex])
// #sec-array.prototype.includes
Reduction JSCallReducer::ReduceArrayIndexOfIncludes(
SearchVariant search_variant, Node* node) {
CallParameters const& p = CallParametersOf(node->op());
if (p.speculation_mode() == SpeculationMode::kDisallowSpeculation) {
return NoChange();
}
Handle<Map> receiver_map;
if (!NodeProperties::GetMapWitness(node).ToHandle(&receiver_map))
return NoChange();
if (!IsFastElementsKind(receiver_map->elements_kind())) return NoChange();
Callable const callable =
search_variant == SearchVariant::kIndexOf
? GetCallableForArrayIndexOf(receiver_map->elements_kind(), isolate())
: GetCallableForArrayIncludes(receiver_map->elements_kind(),
isolate());
CallDescriptor const* const desc = Linkage::GetStubCallDescriptor(
isolate(), graph()->zone(), callable.descriptor(), 0,
CallDescriptor::kNoFlags, Operator::kEliminatable);
// The stub expects the following arguments: the receiver array, its elements,
// the search_element, the array length, and the index to start searching
// from.
Node* receiver = NodeProperties::GetValueInput(node, 1);
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
Node* elements = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSObjectElements()), receiver,
effect, control);
Node* search_element = (node->op()->ValueInputCount() >= 3)
? NodeProperties::GetValueInput(node, 2)
: jsgraph()->UndefinedConstant();
Node* length = effect = graph()->NewNode(
simplified()->LoadField(
AccessBuilder::ForJSArrayLength(receiver_map->elements_kind())),
receiver, effect, control);
Node* new_from_index = jsgraph()->ZeroConstant();
if (node->op()->ValueInputCount() >= 4) {
Node* from_index = NodeProperties::GetValueInput(node, 3);
new_from_index = effect = graph()->NewNode(
simplified()->CheckSmi(p.feedback()), from_index, effect, control);
}
Node* context = NodeProperties::GetContextInput(node);
Node* replacement_node = effect = graph()->NewNode(
common()->Call(desc), jsgraph()->HeapConstant(callable.code()), elements,
search_element, length, new_from_index, context, effect);
ReplaceWithValue(node, replacement_node, effect);
return Replace(replacement_node);
}
Reduction JSCallReducer::ReduceArraySome(Node* node, Reduction JSCallReducer::ReduceArraySome(Node* node,
Handle<SharedFunctionInfo> shared) { Handle<SharedFunctionInfo> shared) {
if (!FLAG_turbo_inline_array_builtins) return NoChange(); if (!FLAG_turbo_inline_array_builtins) return NoChange();
...@@ -3228,6 +3331,10 @@ Reduction JSCallReducer::ReduceJSCall(Node* node, ...@@ -3228,6 +3331,10 @@ Reduction JSCallReducer::ReduceJSCall(Node* node,
return ReduceArrayFind(node, ArrayFindVariant::kFindIndex, shared); return ReduceArrayFind(node, ArrayFindVariant::kFindIndex, shared);
case Builtins::kArrayEvery: case Builtins::kArrayEvery:
return ReduceArrayEvery(node, shared); return ReduceArrayEvery(node, shared);
case Builtins::kArrayIndexOf:
return ReduceArrayIndexOfIncludes(SearchVariant::kIndexOf, node);
case Builtins::kArrayIncludes:
return ReduceArrayIndexOfIncludes(SearchVariant::kIncludes, node);
case Builtins::kArraySome: case Builtins::kArraySome:
return ReduceArraySome(node, shared); return ReduceArraySome(node, shared);
case Builtins::kArrayPrototypePush: case Builtins::kArrayPrototypePush:
......
...@@ -83,6 +83,9 @@ class V8_EXPORT_PRIVATE JSCallReducer final : public AdvancedReducer { ...@@ -83,6 +83,9 @@ class V8_EXPORT_PRIVATE JSCallReducer final : public AdvancedReducer {
Reduction ReduceArrayFind(Node* node, ArrayFindVariant variant, Reduction ReduceArrayFind(Node* node, ArrayFindVariant variant,
Handle<SharedFunctionInfo> shared); Handle<SharedFunctionInfo> shared);
Reduction ReduceArrayEvery(Node* node, Handle<SharedFunctionInfo> shared); Reduction ReduceArrayEvery(Node* node, Handle<SharedFunctionInfo> shared);
enum class SearchVariant { kIncludes, kIndexOf };
Reduction ReduceArrayIndexOfIncludes(SearchVariant search_variant,
Node* node);
Reduction ReduceArraySome(Node* node, Handle<SharedFunctionInfo> shared); Reduction ReduceArraySome(Node* node, Handle<SharedFunctionInfo> shared);
Reduction ReduceArrayPrototypePush(Node* node); Reduction ReduceArrayPrototypePush(Node* node);
Reduction ReduceArrayPrototypePop(Node* node); Reduction ReduceArrayPrototypePop(Node* node);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment