Commit 6148cbfa authored by Daniel Clifford's avatar Daniel Clifford Committed by Commit Bot

Add FixedArray extraction and cloning utils to CSA

This adds a single bottleneck that properly handles the copying of empty, COW
and FixedDoubleArray arrays under the control of flags. This is in preparation
of adding new CSA-based array builtins on Array.prototype.

Drive by: Fix SmiConstant handling when ENABLE_VERIFY_CSA is not active and 
make the use of constant detection/folding consistent in the CSA depending
on ParameterMode.

Change-Id: If1889ab8cbff1805286b7b4344c29ffbe7191b39
Reviewed-on: https://chromium-review.googlesource.com/715798
Commit-Queue: Daniel Clifford <danno@chromium.org>
Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48624}
parent e0e08d52
......@@ -78,7 +78,7 @@ ArgumentsBuiltinsAssembler::AllocateArgumentsObject(Node* map,
base_size += FixedArray::kHeaderSize;
element_count = IntPtrOrSmiAdd(element_count, parameter_map_count, mode);
}
bool empty = IsIntPtrOrSmiConstantZero(arguments_count);
bool empty = IsIntPtrOrSmiConstantZero(arguments_count, mode);
DCHECK_IMPLIES(empty, parameter_map_count == nullptr);
Node* size =
empty ? IntPtrConstant(base_size)
......
......@@ -55,38 +55,6 @@ TF_BUILTIN(ConstructWithSpread, CallOrConstructBuiltinsAssembler) {
typedef compiler::Node Node;
Node* ConstructorBuiltinsAssembler::CopyFixedArrayBase(Node* fixed_array) {
Label if_fixed_array(this), if_fixed_double_array(this), done(this);
VARIABLE(result, MachineRepresentation::kTagged);
Node* capacity = LoadAndUntagFixedArrayBaseLength(fixed_array);
Branch(IsFixedDoubleArrayMap(LoadMap(fixed_array)), &if_fixed_double_array,
&if_fixed_array);
BIND(&if_fixed_double_array);
{
ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
Node* copy = AllocateFixedArray(kind, capacity);
CopyFixedArrayElements(kind, fixed_array, kind, copy, capacity, capacity,
SKIP_WRITE_BARRIER);
result.Bind(copy);
Goto(&done);
}
BIND(&if_fixed_array);
{
ElementsKind kind = PACKED_ELEMENTS;
Node* copy = AllocateFixedArray(kind, capacity);
CopyFixedArrayElements(kind, fixed_array, kind, copy, capacity, capacity,
UPDATE_WRITE_BARRIER);
result.Bind(copy);
Goto(&done);
}
BIND(&done);
// Manually copy over the map of the incoming array to preserve the elements
// kind.
StoreMap(result.value(), LoadMap(fixed_array));
return result.value();
}
Node* ConstructorBuiltinsAssembler::EmitFastNewClosure(Node* shared_info,
Node* feedback_vector,
Node* slot,
......@@ -614,7 +582,11 @@ Node* ConstructorBuiltinsAssembler::EmitCreateShallowObjectLiteral(
BIND(&if_copy_elements);
CSA_ASSERT(this, Word32BinaryNot(
IsFixedCOWArrayMap(LoadMap(boilerplate_elements))));
var_elements.Bind(CopyFixedArrayBase(boilerplate_elements));
ExtractFixedArrayFlags flags;
flags |= ExtractFixedArrayFlag::kAllFixedArrays;
flags |= ExtractFixedArrayFlag::kForceCOWCopy;
flags |= ExtractFixedArrayFlag::kNewSpaceAllocationOnly;
var_elements.Bind(CloneFixedArray(boilerplate_elements, flags));
Goto(&done);
BIND(&done);
}
......
......@@ -45,7 +45,6 @@ class ConstructorBuiltinsAssembler : public CodeStubAssembler {
Node* NonEmptyShallowClone(Node* boilerplate, Node* boilerplate_map,
Node* boilerplate_elements, Node* allocation_site,
Node* capacity, ElementsKind kind);
Node* CopyFixedArrayBase(Node* elements);
Node* NotHasBoilerplate(Node* literal_site);
Node* LoadAllocationSiteBoilerplate(Node* allocation_site);
......
......@@ -31,37 +31,13 @@ TF_BUILTIN(CopyFastSmiOrObjectElements, CodeStubAssembler) {
Node* object = Parameter(Descriptor::kObject);
// Load the {object}s elements.
Node* source = LoadElements(object);
ParameterMode mode = OptimalParameterMode();
Node* length = TaggedToParameter(LoadFixedArrayBaseLength(source), mode);
// Check if we can allocate in new space.
ElementsKind kind = PACKED_ELEMENTS;
int max_elements = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(kind);
Label if_newspace(this), if_lospace(this, Label::kDeferred);
Branch(UintPtrOrSmiLessThan(length, IntPtrOrSmiConstant(max_elements, mode),
mode),
&if_newspace, &if_lospace);
BIND(&if_newspace);
{
Node* target = AllocateFixedArray(kind, length, mode);
CopyFixedArrayElements(kind, source, target, length, SKIP_WRITE_BARRIER,
mode);
StoreObjectField(object, JSObject::kElementsOffset, target);
Return(target);
}
BIND(&if_lospace);
{
Node* target =
AllocateFixedArray(kind, length, mode, kAllowLargeObjectAllocation);
CopyFixedArrayElements(kind, source, target, length, UPDATE_WRITE_BARRIER,
mode);
StoreObjectField(object, JSObject::kElementsOffset, target);
Return(target);
}
Node* source = LoadObjectField(object, JSObject::kElementsOffset);
ExtractFixedArrayFlags flags;
flags |= ExtractFixedArrayFlag::kFixedArrays;
flags |= ExtractFixedArrayFlag::kForceCOWCopy;
Node* target = CloneFixedArray(source, flags);
StoreObjectField(object, JSObject::kElementsOffset, target);
Return(target);
}
TF_BUILTIN(GrowFastDoubleElements, CodeStubAssembler) {
......
......@@ -330,16 +330,15 @@ void PromiseBuiltinsAssembler::AppendPromiseCallback(int offset, Node* promise,
Node* delta = IntPtrOrSmiConstant(1, mode);
Node* new_capacity = IntPtrOrSmiAdd(length, delta, mode);
const ElementsKind kind = PACKED_ELEMENTS;
const WriteBarrierMode barrier_mode = UPDATE_WRITE_BARRIER;
const CodeStubAssembler::AllocationFlags flags =
CodeStubAssembler::kAllowLargeObjectAllocation;
int additional_offset = 0;
Node* new_elements = AllocateFixedArray(kind, new_capacity, mode, flags);
ExtractFixedArrayFlags flags;
flags |= ExtractFixedArrayFlag::kFixedArrays;
flags |= ExtractFixedArrayFlag::kForceCOWCopy;
Node* new_elements =
ExtractFixedArray(elements, nullptr, length, new_capacity, flags, mode);
CopyFixedArrayElements(kind, elements, new_elements, length, barrier_mode,
mode);
StoreFixedArrayElement(new_elements, length, value, barrier_mode,
additional_offset, mode);
......
......@@ -1956,18 +1956,13 @@ class GrowableFixedArray {
CSA_ASSERT(a, a->IntPtrGreaterThan(new_capacity, a->IntPtrConstant(0)));
CSA_ASSERT(a, a->IntPtrGreaterThanOrEqual(new_capacity, element_count));
const ElementsKind kind = PACKED_ELEMENTS;
const WriteBarrierMode barrier_mode = UPDATE_WRITE_BARRIER;
const CodeStubAssembler::ParameterMode mode =
CodeStubAssembler::INTPTR_PARAMETERS;
const CodeStubAssembler::AllocationFlags flags =
CodeStubAssembler::kAllowLargeObjectAllocation;
Node* const from_array = var_array_.value();
Node* const to_array =
a->AllocateFixedArray(kind, new_capacity, mode, flags);
a->CopyFixedArrayElements(kind, from_array, kind, to_array, element_count,
new_capacity, barrier_mode, mode);
CodeStubAssembler::ExtractFixedArrayFlags flags;
flags |= CodeStubAssembler::ExtractFixedArrayFlag::kFixedArrays;
flags |= CodeStubAssembler::ExtractFixedArrayFlag::kForceCOWCopy;
Node* to_array = a->ExtractFixedArray(from_array, nullptr, element_count,
new_capacity, flags);
return to_array;
}
......
......@@ -217,27 +217,39 @@ Node* CodeStubAssembler::IntPtrOrSmiConstant(int value, ParameterMode mode) {
}
}
bool CodeStubAssembler::IsIntPtrOrSmiConstantZero(Node* test) {
bool CodeStubAssembler::IsIntPtrOrSmiConstantZero(Node* test,
ParameterMode mode) {
int32_t constant_test;
Smi* smi_test;
if ((ToInt32Constant(test, constant_test) && constant_test == 0) ||
(ToSmiConstant(test, smi_test) && smi_test->value() == 0)) {
return true;
if (mode == INTPTR_PARAMETERS) {
if (ToInt32Constant(test, constant_test) && constant_test == 0) {
return true;
}
} else {
DCHECK_EQ(mode, SMI_PARAMETERS);
if (ToSmiConstant(test, smi_test) && smi_test->value() == 0) {
return true;
}
}
return false;
}
bool CodeStubAssembler::TryGetIntPtrOrSmiConstantValue(Node* maybe_constant,
int* value) {
int* value,
ParameterMode mode) {
int32_t int32_constant;
if (ToInt32Constant(maybe_constant, int32_constant)) {
*value = int32_constant;
return true;
}
Smi* smi_constant;
if (ToSmiConstant(maybe_constant, smi_constant)) {
*value = Smi::ToInt(smi_constant);
return true;
if (mode == INTPTR_PARAMETERS) {
if (ToInt32Constant(maybe_constant, int32_constant)) {
*value = int32_constant;
return true;
}
} else {
DCHECK_EQ(mode, SMI_PARAMETERS);
Smi* smi_constant;
if (ToSmiConstant(maybe_constant, smi_constant)) {
*value = Smi::ToInt(smi_constant);
return true;
}
}
return false;
}
......@@ -2551,14 +2563,15 @@ Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
int capacity_as_constant;
Node *array = nullptr, *elements = nullptr;
if (IsIntPtrOrSmiConstantZero(capacity)) {
if (IsIntPtrOrSmiConstantZero(capacity, capacity_mode)) {
// Array is empty. Use the shared empty fixed array instead of allocating a
// new one.
array = AllocateUninitializedJSArrayWithoutElements(array_map, length,
allocation_site);
StoreObjectFieldRoot(array, JSArray::kElementsOffset,
Heap::kEmptyFixedArrayRootIndex);
} else if (TryGetIntPtrOrSmiConstantValue(capacity, &capacity_as_constant) &&
} else if (TryGetIntPtrOrSmiConstantValue(capacity, &capacity_as_constant,
capacity_mode) &&
capacity_as_constant > 0) {
// Allocate both array and elements object, and initialize the JSArray.
std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
......@@ -2606,10 +2619,47 @@ Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
return array;
}
Node* CodeStubAssembler::ExtractFastJSArray(Node* context, Node* array,
Node* begin, Node* count,
ParameterMode mode, Node* capacity,
Node* allocation_site) {
Node* original_array_map = LoadMap(array);
Node* elements_kind = LoadMapElementsKind(original_array_map);
// Use the cannonical map for the Array's ElementsKind
Node* native_context = LoadNativeContext(context);
Node* array_map = LoadJSArrayElementsMap(elements_kind, native_context);
Node* new_elements =
ExtractFixedArray(LoadElements(array), begin, count, capacity,
ExtractFixedArrayFlag::kAllFixedArrays, mode);
Node* result = AllocateUninitializedJSArrayWithoutElements(
array_map, ParameterToTagged(count, mode), allocation_site);
StoreObjectField(result, JSObject::kElementsOffset, new_elements);
return result;
}
Node* CodeStubAssembler::CloneFastJSArray(Node* context, Node* array,
ParameterMode mode, Node* capacity,
Node* allocation_site) {
// Use the cannonical map for the Array's ElementsKind
Node* tagged_length = LoadJSArrayLength(array);
Node* length = TaggedToParameter(tagged_length, mode);
if (capacity == nullptr) {
capacity = length;
}
return ExtractFastJSArray(context, array, IntPtrOrSmiConstant(0, mode),
length, mode, capacity, allocation_site);
}
Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind,
Node* capacity_node,
ParameterMode mode,
AllocationFlags flags) {
AllocationFlags flags,
Node* fixed_array_map) {
CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity_node, mode));
CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node,
IntPtrOrSmiConstant(0, mode), mode));
......@@ -2618,16 +2668,173 @@ Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind,
if (IsDoubleElementsKind(kind)) flags |= kDoubleAlignment;
// Allocate both array and elements object, and initialize the JSArray.
Node* array = Allocate(total_size, flags);
Heap::RootListIndex map_index = IsDoubleElementsKind(kind)
? Heap::kFixedDoubleArrayMapRootIndex
: Heap::kFixedArrayMapRootIndex;
DCHECK(Heap::RootIsImmortalImmovable(map_index));
StoreMapNoWriteBarrier(array, map_index);
if (fixed_array_map != nullptr) {
// Conservatively only skip the write barrier if there are no allocation
// flags, this ensures that the object hasn't ended up in LOS. Note that the
// fixed array map is currently alwasys immortal and technically wouldn't
// need the write barrier even in LOS, but it's better to not take chances
// in case this invariant changes later, since it's difficult to enforce
// locally here.
if (flags == CodeStubAssembler::kNone) {
StoreMapNoWriteBarrier(array, fixed_array_map);
} else {
StoreMap(array, fixed_array_map);
}
} else {
Heap::RootListIndex map_index = IsDoubleElementsKind(kind)
? Heap::kFixedDoubleArrayMapRootIndex
: Heap::kFixedArrayMapRootIndex;
DCHECK(Heap::RootIsImmortalImmovable(map_index));
StoreMapNoWriteBarrier(array, map_index);
}
StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
ParameterToTagged(capacity_node, mode));
return array;
}
Node* CodeStubAssembler::ExtractFixedArray(Node* fixed_array, Node* first,
Node* count, Node* capacity,
ExtractFixedArrayFlags extract_flags,
ParameterMode parameter_mode) {
VARIABLE(result, MachineRepresentation::kTagged);
const AllocationFlags flags =
(extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly)
? CodeStubAssembler::kNone
: CodeStubAssembler::kAllowLargeObjectAllocation;
if (first == nullptr) {
first = IntPtrOrSmiConstant(0, parameter_mode);
}
if (count == nullptr) {
count =
IntPtrOrSmiSub(TaggedToParameter(LoadFixedArrayBaseLength(fixed_array),
parameter_mode),
first, parameter_mode);
CSA_ASSERT(
this, IntPtrOrSmiLessThanOrEqual(IntPtrOrSmiConstant(0, parameter_mode),
count, parameter_mode));
}
if (capacity == nullptr) {
capacity = count;
} else {
CSA_ASSERT(this, Word32BinaryNot(IntPtrOrSmiGreaterThan(
IntPtrOrSmiAdd(first, count, parameter_mode), capacity,
parameter_mode)));
}
Label if_fixed_double_array(this), empty(this), cow(this),
done(this, {&result});
GotoIf(WordEqual(IntPtrOrSmiConstant(0, parameter_mode), count), &empty);
Node* fixed_array_map = LoadMap(fixed_array);
if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
GotoIf(IsFixedDoubleArrayMap(fixed_array_map), &if_fixed_double_array);
} else {
CSA_ASSERT(this, IsFixedDoubleArrayMap(fixed_array_map));
}
} else {
DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays);
CSA_ASSERT(this, Word32BinaryNot(IsFixedDoubleArrayMap(fixed_array_map)));
}
if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
Label new_space_check(this);
if (!(extract_flags & ExtractFixedArrayFlag::kForceCOWCopy)) {
Branch(WordEqual(fixed_array_map,
LoadRoot(Heap::kFixedCOWArrayMapRootIndex)),
&cow, &new_space_check);
} else {
Goto(&new_space_check);
}
BIND(&new_space_check);
bool handle_old_space = true;
if (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly) {
handle_old_space = false;
CSA_ASSERT(this, Word32BinaryNot(FixedArraySizeDoesntFitInNewSpace(
count, FixedArray::kHeaderSize, parameter_mode)));
} else {
int constant_count;
handle_old_space =
!TryGetIntPtrOrSmiConstantValue(count, &constant_count,
parameter_mode) ||
(constant_count >
FixedArray::GetMaxLengthForNewSpaceAllocation(PACKED_ELEMENTS));
}
Label old_space(this, Label::kDeferred);
if (handle_old_space) {
GotoIfFixedArraySizeDoesntFitInNewSpace(
capacity, &old_space, FixedArray::kHeaderSize, parameter_mode);
}
Comment("Copy PACKED_ELEMENTS new space");
ElementsKind kind = PACKED_ELEMENTS;
Node* to_elements = AllocateFixedArray(
kind, capacity, parameter_mode, AllocationFlag::kNone, fixed_array_map);
result.Bind(to_elements);
CopyFixedArrayElements(kind, fixed_array, kind, to_elements, first, count,
capacity, SKIP_WRITE_BARRIER, parameter_mode);
Goto(&done);
if (handle_old_space) {
BIND(&old_space);
{
Comment("Copy PACKED_ELEMENTS old space");
to_elements = AllocateFixedArray(kind, capacity, parameter_mode, flags,
fixed_array_map);
result.Bind(to_elements);
CopyFixedArrayElements(kind, fixed_array, kind, to_elements, first,
count, capacity, UPDATE_WRITE_BARRIER,
parameter_mode);
Goto(&done);
}
}
if (!(extract_flags & ExtractFixedArrayFlag::kForceCOWCopy)) {
BIND(&cow);
{
GotoIf(WordNotEqual(IntPtrOrSmiConstant(0, parameter_mode), first),
&new_space_check);
result.Bind(fixed_array);
Goto(&done);
}
}
} else {
Goto(&if_fixed_double_array);
}
if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
BIND(&if_fixed_double_array);
Comment("Copy PACKED_DOUBLE_ELEMENTS");
ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
Node* to_elements = AllocateFixedArray(kind, capacity, parameter_mode,
flags, fixed_array_map);
result.Bind(to_elements);
CopyFixedArrayElements(kind, fixed_array, kind, to_elements, first, count,
capacity, SKIP_WRITE_BARRIER, parameter_mode);
Goto(&done);
}
BIND(&empty);
{
Comment("Copy empty array");
result.Bind(EmptyFixedArrayConstant());
Goto(&done);
}
BIND(&done);
return result.value();
}
void CodeStubAssembler::InitializePropertyArrayLength(Node* property_array,
Node* length,
ParameterMode mode) {
......@@ -2727,7 +2934,7 @@ void CodeStubAssembler::FillFixedArrayWithValue(
void CodeStubAssembler::CopyFixedArrayElements(
ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
Node* to_array, Node* element_count, Node* capacity,
Node* to_array, Node* first_element, Node* element_count, Node* capacity,
WriteBarrierMode barrier_mode, ParameterMode mode) {
CSA_SLOW_ASSERT(this, MatchesParameterMode(element_count, mode));
CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
......@@ -2744,13 +2951,14 @@ void CodeStubAssembler::CopyFixedArrayElements(
Label done(this);
bool from_double_elements = IsDoubleElementsKind(from_kind);
bool to_double_elements = IsDoubleElementsKind(to_kind);
bool element_size_matches = Is64() || IsDoubleElementsKind(from_kind) ==
IsDoubleElementsKind(to_kind);
bool doubles_to_objects_conversion =
IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind);
bool needs_write_barrier =
doubles_to_objects_conversion ||
(barrier_mode == UPDATE_WRITE_BARRIER && IsObjectElementsKind(to_kind));
bool element_offset_matches =
!needs_write_barrier && (Is64() || IsDoubleElementsKind(from_kind) ==
IsDoubleElementsKind(to_kind));
Node* double_hole =
Is64() ? UncheckedCast<UintPtrT>(Int64Constant(kHoleNanInt64))
: UncheckedCast<UintPtrT>(Int32Constant(kHoleNanLower32));
......@@ -2766,15 +2974,18 @@ void CodeStubAssembler::CopyFixedArrayElements(
Heap::kTheHoleValueRootIndex, mode);
}
Node* limit_offset = ElementOffsetFromIndex(
IntPtrOrSmiConstant(0, mode), from_kind, mode, first_element_offset);
VARIABLE(var_from_offset, MachineType::PointerRepresentation(),
ElementOffsetFromIndex(element_count, from_kind, mode,
first_element_offset));
Node* first_from_element_offset =
ElementOffsetFromIndex(first_element, from_kind, mode, 0);
Node* limit_offset = IntPtrAdd(first_from_element_offset,
IntPtrConstant(first_element_offset));
VARIABLE(
var_from_offset, MachineType::PointerRepresentation(),
ElementOffsetFromIndex(IntPtrOrSmiAdd(first_element, element_count, mode),
from_kind, mode, first_element_offset));
// This second variable is used only when the element sizes of source and
// destination arrays do not match.
VARIABLE(var_to_offset, MachineType::PointerRepresentation());
if (element_size_matches) {
if (element_offset_matches) {
var_to_offset.Bind(var_from_offset.value());
} else {
var_to_offset.Bind(ElementOffsetFromIndex(element_count, to_kind, mode,
......@@ -2784,6 +2995,11 @@ void CodeStubAssembler::CopyFixedArrayElements(
Variable* vars[] = {&var_from_offset, &var_to_offset};
Label decrement(this, 2, vars);
Node* to_array_adjusted =
element_offset_matches
? IntPtrSub(BitcastTaggedToWord(to_array), first_from_element_offset)
: to_array;
Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
BIND(&decrement);
......@@ -2794,7 +3010,7 @@ void CodeStubAssembler::CopyFixedArrayElements(
var_from_offset.Bind(from_offset);
Node* to_offset;
if (element_size_matches) {
if (element_offset_matches) {
to_offset = from_offset;
} else {
to_offset = IntPtrSub(
......@@ -2820,13 +3036,14 @@ void CodeStubAssembler::CopyFixedArrayElements(
from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
if (needs_write_barrier) {
Store(to_array, to_offset, value);
CHECK_EQ(to_array, to_array_adjusted);
Store(to_array_adjusted, to_offset, value);
} else if (to_double_elements) {
StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array, to_offset,
value);
StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
to_offset, value);
} else {
StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array, to_offset,
value);
StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array_adjusted,
to_offset, value);
}
Goto(&next_iter);
......@@ -2841,12 +3058,12 @@ void CodeStubAssembler::CopyFixedArrayElements(
// preserves double bits during manipulation, remove this code/change
// this to an indexed Float64 store.
if (Is64()) {
StoreNoWriteBarrier(MachineRepresentation::kWord64, to_array, to_offset,
double_hole);
StoreNoWriteBarrier(MachineRepresentation::kWord64, to_array_adjusted,
to_offset, double_hole);
} else {
StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array, to_offset,
double_hole);
StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array,
StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
to_offset, double_hole);
StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
IntPtrAdd(to_offset, IntPtrConstant(kPointerSize)),
double_hole);
}
......@@ -2935,14 +3152,17 @@ void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
int to_index_constant = 0, from_index_constant = 0;
Smi* to_index_smi = nullptr;
Smi* from_index_smi = nullptr;
bool index_same = (from_encoding == to_encoding) &&
(from_index == to_index ||
(ToInt32Constant(from_index, from_index_constant) &&
ToInt32Constant(to_index, to_index_constant) &&
from_index_constant == to_index_constant) ||
(ToSmiConstant(from_index, from_index_smi) &&
ToSmiConstant(to_index, to_index_smi) &&
to_index_smi == from_index_smi));
bool index_same =
(from_encoding == to_encoding) &&
(from_index == to_index ||
((mode == INTPTR_PARAMETERS) &&
ToInt32Constant(from_index, from_index_constant) &&
ToInt32Constant(to_index, to_index_constant) &&
from_index_constant == to_index_constant) ||
((mode == SMI_PARAMETERS) && ToSmiConstant(from_index, from_index_smi) &&
ToSmiConstant(to_index, to_index_smi) &&
to_index_smi == from_index_smi));
BuildFastLoop(vars, from_offset, limit_offset,
[this, from_string, to_string, &current_to_offset, to_increment,
type, rep, index_same](Node* offset) {
......@@ -3992,6 +4212,15 @@ Node* CodeStubAssembler::IsNumber(Node* object) {
MachineRepresentation::kWord32);
}
Node* CodeStubAssembler::FixedArraySizeDoesntFitInNewSpace(Node* element_count,
int base_size,
ParameterMode mode) {
int max_newspace_elements =
(kMaxRegularHeapObjectSize - base_size) / kPointerSize;
return IntPtrOrSmiGreaterThan(
element_count, IntPtrOrSmiConstant(max_newspace_elements, mode), mode);
}
Node* CodeStubAssembler::IsNumberNormalized(Node* number) {
CSA_ASSERT(this, IsNumber(number));
......@@ -7654,11 +7883,7 @@ void CodeStubAssembler::BuildFastFixedArrayForEach(
void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
Node* element_count, Label* doesnt_fit, int base_size, ParameterMode mode) {
int max_newspace_parameters =
(kMaxRegularHeapObjectSize - base_size) / kPointerSize;
GotoIf(IntPtrOrSmiGreaterThan(
element_count, IntPtrOrSmiConstant(max_newspace_parameters, mode),
mode),
GotoIf(FixedArraySizeDoesntFitInNewSpace(element_count, base_size, mode),
doesnt_fit);
}
......
......@@ -177,8 +177,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* IntPtrOrSmiConstant(int value, ParameterMode mode);
bool IsIntPtrOrSmiConstantZero(Node* test);
bool TryGetIntPtrOrSmiConstantValue(Node* maybe_constant, int* value);
bool IsIntPtrOrSmiConstantZero(Node* test, ParameterMode mode);
bool TryGetIntPtrOrSmiConstantValue(Node* maybe_constant, int* value,
ParameterMode mode);
// Round the 32bits payload of the provided word up to the next power of two.
Node* IntPtrRoundUpToPowerOfTwo32(Node* value);
......@@ -727,9 +728,20 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* length, Node* allocation_site = nullptr,
ParameterMode capacity_mode = INTPTR_PARAMETERS);
Node* CloneFastJSArray(Node* context, Node* array,
ParameterMode mode = INTPTR_PARAMETERS,
Node* capacity = nullptr,
Node* allocation_site = nullptr);
Node* ExtractFastJSArray(Node* context, Node* array, Node* begin, Node* count,
ParameterMode mode = INTPTR_PARAMETERS,
Node* capacity = nullptr,
Node* allocation_site = nullptr);
Node* AllocateFixedArray(ElementsKind kind, Node* capacity,
ParameterMode mode = INTPTR_PARAMETERS,
AllocationFlags flags = kNone);
AllocationFlags flags = kNone,
Node* fixed_array_map = nullptr);
Node* AllocatePropertyArray(Node* capacity,
ParameterMode mode = INTPTR_PARAMETERS,
......@@ -765,18 +777,92 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
ElementsKind kind, Node* from_array, Node* to_array, Node* length,
WriteBarrierMode barrier_mode = UPDATE_WRITE_BARRIER,
ParameterMode mode = INTPTR_PARAMETERS) {
CopyFixedArrayElements(kind, from_array, kind, to_array, length, length,
CopyFixedArrayElements(kind, from_array, kind, to_array,
IntPtrOrSmiConstant(0, mode), length, length,
barrier_mode, mode);
}
// Copies |element_count| elements from |from_array| to |to_array| of
// |capacity| size respecting both array's elements kinds.
// Copies |element_count| elements from |from_array| starting from element
// zero to |to_array| of |capacity| size respecting both array's elements
// kinds.
void CopyFixedArrayElements(
ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
Node* to_array, Node* element_count, Node* capacity,
WriteBarrierMode barrier_mode = UPDATE_WRITE_BARRIER,
ParameterMode mode = INTPTR_PARAMETERS) {
CopyFixedArrayElements(from_kind, from_array, to_kind, to_array,
IntPtrOrSmiConstant(0, mode), element_count,
capacity, barrier_mode, mode);
}
// Copies |element_count| elements from |from_array| starting from element
// |first_element| to |to_array| of |capacity| size respecting both array's
// elements kinds.
void CopyFixedArrayElements(
ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
Node* to_array, Node* first_element, Node* element_count, Node* capacity,
WriteBarrierMode barrier_mode = UPDATE_WRITE_BARRIER,
ParameterMode mode = INTPTR_PARAMETERS);
enum class ExtractFixedArrayFlag {
kFixedArrays = 1,
kFixedDoubleArrays = 2,
// Forcing COW copying removes special COW handling, resulting in better
// code if the source array has already been validated to not be COW.
kForceCOWCopy = 4,
kNewSpaceAllocationOnly = 8,
kAllFixedArrays = kFixedArrays | kFixedDoubleArrays
};
typedef base::Flags<ExtractFixedArrayFlag> ExtractFixedArrayFlags;
// Copy a portion of an existing FixedArray or FixedDoubleArray into a new
// FixedArray, including special appropriate handling for empty arrays and COW
// arrays.
//
// * |source| is either a FixedArray or FixedDoubleArray from which to copy
// elements.
// * |first| is the starting element index to copy from, if nullptr is passed
// then index zero is used by default.
// * |count| is the number of elements to copy out of the source array
// starting from and including the element indexed by |start|. If |count| is
// nullptr, then all of the elements from |start| to the end of |source| are
// copied.
// * |capacity| determines the size of the allocated result array, with
// |capacity| >= |count|. If |capacity| is nullptr, then |count| is used as
// the destination array's capacity.
// * |extract_flags| determines whether FixedArrays, FixedDoubleArrays or both
// are detected and copied. Although it's always correct to pass
// kAllFixedArrays, the generated code is more compact and efficient if the
// caller can specify whether only FixedArrays or FixedDoubleArrays will be
// passed as the |source| parameter.
// * |parameter_mode| determines the parameter mode of |first|, |count| and
// |capacity|.
Node* ExtractFixedArray(Node* source, Node* first, Node* count = nullptr,
Node* capacity = nullptr,
ExtractFixedArrayFlags extract_flags =
ExtractFixedArrayFlag::kAllFixedArrays,
ParameterMode parameter_mode = INTPTR_PARAMETERS);
// Copy the entire contents of a FixedArray or FixedDoubleArray to a new
// array, including special appropriate handling for empty arrays and COW
// arrays.
//
// * |source| is either a FixedArray or FixedDoubleArray from which to copy
// elements.
// * |extract_flags| determines whether FixedArrays, FixedDoubleArrays or both
// are detected and copied. Although it's always correct to pass
// kAllFixedArrays, the generated code is more compact and efficient if the
// caller can specify whether only FixedArrays or FixedDoubleArrays will be
// passed as the |source| parameter.
Node* CloneFixedArray(
Node* source,
ExtractFixedArrayFlags flags = ExtractFixedArrayFlag::kAllFixedArrays) {
ParameterMode mode = OptimalParameterMode();
return ExtractFixedArray(source, IntPtrOrSmiConstant(0, mode), nullptr,
nullptr, flags, mode);
}
// Copies |character_count| elements from |from_string| to |to_string|
// starting at the |from_index|'th character. |from_string| and |to_string|
// can either be one-byte strings or two-byte strings, although if
......@@ -978,6 +1064,10 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* IsElementsKindGreaterThan(Node* target_kind,
ElementsKind reference_kind);
Node* FixedArraySizeDoesntFitInNewSpace(
Node* element_count, int base_size = FixedArray::kHeaderSize,
ParameterMode mode = INTPTR_PARAMETERS);
// String helpers.
// Load a character from a String (might flatten a ConsString).
TNode<Uint32T> StringCharCodeAt(
......
......@@ -281,12 +281,13 @@ bool CodeAssembler::ToInt64Constant(Node* node, int64_t& out_value) {
bool CodeAssembler::ToSmiConstant(Node* node, Smi*& out_value) {
if (node->opcode() == IrOpcode::kBitcastWordToTaggedSigned) {
node = node->InputAt(0);
} else {
return false;
}
IntPtrMatcher m(node);
if (m.HasValue()) {
out_value = Smi::cast(bit_cast<Object*>(m.Value()));
intptr_t value = m.Value();
// Make sure that the value is actually a smi
CHECK_EQ(0, value & ((static_cast<intptr_t>(1) << kSmiShiftSize) - 1));
out_value = Smi::cast(bit_cast<Object*>(value));
return true;
}
return false;
......
......@@ -2854,6 +2854,221 @@ TEST(NumberAddSub) {
CHECK_EQ(ft_sub.CallChecked<HeapNumber>(double_a, smi_1)->value(), 1.5);
}
TEST(CloneEmptyFixedArray) {
Isolate* isolate(CcTest::InitIsolateOnce());
const int kNumParams = 1;
CodeAssemblerTester asm_tester(isolate, kNumParams);
{
CodeStubAssembler m(asm_tester.state());
m.Return(m.CloneFixedArray(m.Parameter(0)));
}
FunctionTester ft(asm_tester.GenerateCode(), kNumParams);
Handle<FixedArray> source(isolate->factory()->empty_fixed_array());
Handle<Object> result_raw = ft.Call(source).ToHandleChecked();
FixedArray* result(FixedArray::cast(*result_raw));
CHECK_EQ(0, result->length());
CHECK_EQ(*(isolate->factory()->empty_fixed_array()), result);
}
TEST(CloneFixedArray) {
Isolate* isolate(CcTest::InitIsolateOnce());
const int kNumParams = 1;
CodeAssemblerTester asm_tester(isolate, kNumParams);
{
CodeStubAssembler m(asm_tester.state());
m.Return(m.CloneFixedArray(m.Parameter(0)));
}
FunctionTester ft(asm_tester.GenerateCode(), kNumParams);
Handle<FixedArray> source(isolate->factory()->NewFixedArrayWithHoles(5));
source->set(1, Smi::FromInt(1234));
Handle<Object> result_raw = ft.Call(source).ToHandleChecked();
FixedArray* result(FixedArray::cast(*result_raw));
CHECK_EQ(5, result->length());
CHECK(result->get(0)->IsTheHole(isolate));
CHECK_EQ(Smi::cast(result->get(1))->value(), 1234);
CHECK(result->get(2)->IsTheHole(isolate));
CHECK(result->get(3)->IsTheHole(isolate));
CHECK(result->get(4)->IsTheHole(isolate));
}
TEST(CloneFixedArrayCOW) {
Isolate* isolate(CcTest::InitIsolateOnce());
const int kNumParams = 1;
CodeAssemblerTester asm_tester(isolate, kNumParams);
{
CodeStubAssembler m(asm_tester.state());
m.Return(m.CloneFixedArray(m.Parameter(0)));
}
FunctionTester ft(asm_tester.GenerateCode(), kNumParams);
Handle<FixedArray> source(isolate->factory()->NewFixedArrayWithHoles(5));
source->set(1, Smi::FromInt(1234));
source->set_map(isolate->heap()->fixed_cow_array_map());
Handle<Object> result_raw = ft.Call(source).ToHandleChecked();
FixedArray* result(FixedArray::cast(*result_raw));
CHECK_EQ(*source, result);
}
TEST(CloneFixedArrayCOWForceCopy) {
Isolate* isolate(CcTest::InitIsolateOnce());
const int kNumParams = 1;
CodeAssemblerTester asm_tester(isolate, kNumParams);
{
CodeStubAssembler m(asm_tester.state());
CodeStubAssembler::ExtractFixedArrayFlags flags;
flags |= CodeStubAssembler::ExtractFixedArrayFlag::kAllFixedArrays;
flags |= CodeStubAssembler::ExtractFixedArrayFlag::kForceCOWCopy;
m.Return(m.CloneFixedArray(m.Parameter(0), flags));
}
FunctionTester ft(asm_tester.GenerateCode(), kNumParams);
Handle<FixedArray> source(isolate->factory()->NewFixedArrayWithHoles(5));
source->set(1, Smi::FromInt(1234));
source->set_map(isolate->heap()->fixed_cow_array_map());
Handle<Object> result_raw = ft.Call(source).ToHandleChecked();
FixedArray* result(FixedArray::cast(*result_raw));
CHECK_NE(*source, result);
CHECK_EQ(5, result->length());
CHECK(result->get(0)->IsTheHole(isolate));
CHECK_EQ(Smi::cast(result->get(1))->value(), 1234);
CHECK(result->get(2)->IsTheHole(isolate));
CHECK(result->get(3)->IsTheHole(isolate));
CHECK(result->get(4)->IsTheHole(isolate));
}
TEST(ExtractFixedArraySimple) {
Isolate* isolate(CcTest::InitIsolateOnce());
const int kNumParams = 3;
CodeAssemblerTester asm_tester(isolate, kNumParams);
{
CodeStubAssembler m(asm_tester.state());
m.Return(m.ExtractFixedArray(
m.Parameter(0), m.Parameter(1), m.Parameter(2), nullptr,
CodeStubAssembler::ExtractFixedArrayFlag::kAllFixedArrays,
CodeStubAssembler::SMI_PARAMETERS));
}
FunctionTester ft(asm_tester.GenerateCode(), kNumParams);
Handle<FixedArray> source(isolate->factory()->NewFixedArrayWithHoles(5));
source->set(1, Smi::FromInt(1234));
Handle<Object> result_raw =
ft.Call(source, Handle<Smi>(Smi::FromInt(1), isolate),
Handle<Smi>(Smi::FromInt(2), isolate))
.ToHandleChecked();
FixedArray* result(FixedArray::cast(*result_raw));
CHECK_EQ(2, result->length());
CHECK_EQ(Smi::cast(result->get(0))->value(), 1234);
CHECK(result->get(1)->IsTheHole(isolate));
}
TEST(ExtractFixedArraySimpleSmiConstant) {
Isolate* isolate(CcTest::InitIsolateOnce());
const int kNumParams = 1;
CodeAssemblerTester asm_tester(isolate, kNumParams);
{
CodeStubAssembler m(asm_tester.state());
m.Return(m.ExtractFixedArray(
m.Parameter(0), m.SmiConstant(1), m.SmiConstant(2), nullptr,
CodeStubAssembler::ExtractFixedArrayFlag::kAllFixedArrays,
CodeStubAssembler::SMI_PARAMETERS));
}
FunctionTester ft(asm_tester.GenerateCode(), kNumParams);
Handle<FixedArray> source(isolate->factory()->NewFixedArrayWithHoles(5));
source->set(1, Smi::FromInt(1234));
Handle<Object> result_raw = ft.Call(source).ToHandleChecked();
FixedArray* result(FixedArray::cast(*result_raw));
CHECK_EQ(2, result->length());
CHECK_EQ(Smi::cast(result->get(0))->value(), 1234);
CHECK(result->get(1)->IsTheHole(isolate));
}
TEST(ExtractFixedArraySimpleIntPtrConstant) {
Isolate* isolate(CcTest::InitIsolateOnce());
const int kNumParams = 1;
CodeAssemblerTester asm_tester(isolate, kNumParams);
{
CodeStubAssembler m(asm_tester.state());
m.Return(m.ExtractFixedArray(
m.Parameter(0), m.IntPtrConstant(1), m.IntPtrConstant(2), nullptr,
CodeStubAssembler::ExtractFixedArrayFlag::kAllFixedArrays,
CodeStubAssembler::INTPTR_PARAMETERS));
}
FunctionTester ft(asm_tester.GenerateCode(), kNumParams);
Handle<FixedArray> source(isolate->factory()->NewFixedArrayWithHoles(5));
source->set(1, Smi::FromInt(1234));
Handle<Object> result_raw = ft.Call(source).ToHandleChecked();
FixedArray* result(FixedArray::cast(*result_raw));
CHECK_EQ(2, result->length());
CHECK_EQ(Smi::cast(result->get(0))->value(), 1234);
CHECK(result->get(1)->IsTheHole(isolate));
}
TEST(ExtractFixedArraySimpleIntPtrConstantNoDoubles) {
Isolate* isolate(CcTest::InitIsolateOnce());
const int kNumParams = 1;
CodeAssemblerTester asm_tester(isolate, kNumParams);
{
CodeStubAssembler m(asm_tester.state());
m.Return(m.ExtractFixedArray(
m.Parameter(0), m.IntPtrConstant(1), m.IntPtrConstant(2), nullptr,
CodeStubAssembler::ExtractFixedArrayFlag::kFixedArrays,
CodeStubAssembler::INTPTR_PARAMETERS));
}
FunctionTester ft(asm_tester.GenerateCode(), kNumParams);
Handle<FixedArray> source(isolate->factory()->NewFixedArrayWithHoles(5));
source->set(1, Smi::FromInt(1234));
Handle<Object> result_raw = ft.Call(source).ToHandleChecked();
FixedArray* result(FixedArray::cast(*result_raw));
CHECK_EQ(2, result->length());
CHECK_EQ(Smi::cast(result->get(0))->value(), 1234);
CHECK(result->get(1)->IsTheHole(isolate));
}
TEST(ExtractFixedArraySimpleIntPtrParameters) {
Isolate* isolate(CcTest::InitIsolateOnce());
const int kNumParams = 3;
CodeAssemblerTester asm_tester(isolate, kNumParams);
{
CodeStubAssembler m(asm_tester.state());
Node* p1_untagged = m.SmiUntag(m.Parameter(1));
Node* p2_untagged = m.SmiUntag(m.Parameter(2));
m.Return(m.ExtractFixedArray(m.Parameter(0), p1_untagged, p2_untagged));
}
FunctionTester ft(asm_tester.GenerateCode(), kNumParams);
Handle<FixedArray> source(isolate->factory()->NewFixedArrayWithHoles(5));
source->set(1, Smi::FromInt(1234));
Handle<Object> result_raw =
ft.Call(source, Handle<Smi>(Smi::FromInt(1), isolate),
Handle<Smi>(Smi::FromInt(2), isolate))
.ToHandleChecked();
FixedArray* result(FixedArray::cast(*result_raw));
CHECK_EQ(2, result->length());
CHECK_EQ(Smi::cast(result->get(0))->value(), 1234);
CHECK(result->get(1)->IsTheHole(isolate));
Handle<FixedDoubleArray> source_double(Handle<FixedDoubleArray>::cast(
isolate->factory()->NewFixedDoubleArray(5)));
source_double->set(0, 10);
source_double->set(1, 11);
source_double->set(2, 12);
source_double->set(3, 13);
source_double->set(4, 14);
Handle<Object> double_result_raw =
ft.Call(source_double, Handle<Smi>(Smi::FromInt(1), isolate),
Handle<Smi>(Smi::FromInt(2), isolate))
.ToHandleChecked();
FixedDoubleArray* double_result(FixedDoubleArray::cast(*double_result_raw));
CHECK_EQ(2, double_result->length());
CHECK_EQ(double_result->get_scalar(0), 11);
CHECK_EQ(double_result->get_scalar(1), 12);
}
} // namespace compiler
} // namespace internal
} // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment