Commit 8a1a2867 authored by Igor Sheludko's avatar Igor Sheludko Committed by Commit Bot

[csa] Remove ParameterMode from CSA::BuildFastLoop

Bug: v8:9708
Change-Id: I305cc007a4e7302c8587b999cbb11f23ced4cfd3
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1800579
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/master@{#63735}
parent 6cf125a9
...@@ -262,16 +262,17 @@ Node* ArgumentsBuiltinsAssembler::EmitFastNewSloppyArguments(Node* context, ...@@ -262,16 +262,17 @@ Node* ArgumentsBuiltinsAssembler::EmitFastNewSloppyArguments(Node* context,
TVARIABLE(IntPtrT, current_argument, TVARIABLE(IntPtrT, current_argument,
Signed(arguments.AtIndexPtr(info.argument_count, mode))); Signed(arguments.AtIndexPtr(info.argument_count, mode)));
VariableList var_list1({&current_argument}, zone()); VariableList var_list1({&current_argument}, zone());
mapped_offset = UncheckedCast<IntPtrT>(BuildFastLoop( mapped_offset = BuildFastLoop<IntPtrT>(
var_list1, argument_offset, mapped_offset, var_list1, argument_offset, mapped_offset,
[this, elements, &current_argument](Node* offset) { [&](TNode<IntPtrT> offset) {
Increment(&current_argument, kSystemPointerSize); Increment(&current_argument, kSystemPointerSize);
TNode<Object> arg = LoadBufferObject( TNode<Object> arg = LoadBufferObject(
ReinterpretCast<RawPtrT>(current_argument.value()), 0); ReinterpretCast<RawPtrT>(current_argument.value()), 0);
StoreNoWriteBarrier(MachineRepresentation::kTagged, elements, offset, StoreNoWriteBarrier(MachineRepresentation::kTagged, elements, offset,
arg); arg);
return;
}, },
-kTaggedSize, INTPTR_PARAMETERS)); -kTaggedSize);
// Copy the parameter slots and the holes in the arguments. // Copy the parameter slots and the holes in the arguments.
// We need to fill in mapped_count slots. They index the context, // We need to fill in mapped_count slots. They index the context,
...@@ -295,9 +296,9 @@ Node* ArgumentsBuiltinsAssembler::EmitFastNewSloppyArguments(Node* context, ...@@ -295,9 +296,9 @@ Node* ArgumentsBuiltinsAssembler::EmitFastNewSloppyArguments(Node* context,
IntPtrConstant(kParameterMapHeaderSize - FixedArray::kHeaderSize)); IntPtrConstant(kParameterMapHeaderSize - FixedArray::kHeaderSize));
TNode<IntPtrT> zero_offset = ElementOffsetFromIndex( TNode<IntPtrT> zero_offset = ElementOffsetFromIndex(
zero, PACKED_ELEMENTS, mode, FixedArray::kHeaderSize - kHeapObjectTag); zero, PACKED_ELEMENTS, mode, FixedArray::kHeaderSize - kHeapObjectTag);
BuildFastLoop( BuildFastLoop<IntPtrT>(
var_list2, mapped_offset, zero_offset, var_list2, mapped_offset, zero_offset,
[=, &context_index](Node* offset) { [&](TNode<IntPtrT> offset) {
StoreNoWriteBarrier(MachineRepresentation::kTagged, elements, offset, StoreNoWriteBarrier(MachineRepresentation::kTagged, elements, offset,
the_hole); the_hole);
StoreNoWriteBarrier(MachineRepresentation::kTagged, StoreNoWriteBarrier(MachineRepresentation::kTagged,
...@@ -305,7 +306,7 @@ Node* ArgumentsBuiltinsAssembler::EmitFastNewSloppyArguments(Node* context, ...@@ -305,7 +306,7 @@ Node* ArgumentsBuiltinsAssembler::EmitFastNewSloppyArguments(Node* context,
BIntToSmi(context_index.value())); BIntToSmi(context_index.value()));
Increment(&context_index); Increment(&context_index);
}, },
-kTaggedSize, INTPTR_PARAMETERS); -kTaggedSize);
result.Bind(argument_object); result.Bind(argument_object);
Goto(&done); Goto(&done);
......
...@@ -224,16 +224,8 @@ void ArrayBuiltinsAssembler::VisitAllTypedArrayElements( ...@@ -224,16 +224,8 @@ void ArrayBuiltinsAssembler::VisitAllTypedArrayElements(
ForEachDirection direction, TNode<JSTypedArray> typed_array) { ForEachDirection direction, TNode<JSTypedArray> typed_array) {
VariableList list({&a_, &k_, &to_}, zone()); VariableList list({&a_, &k_, &to_}, zone());
FastLoopBody body = [&](Node* index) { TNode<Smi> start = SmiConstant(0);
GotoIf(IsDetachedBuffer(CAST(array_buffer)), detached); TNode<Smi> end = CAST(len_);
TNode<RawPtrT> data_ptr = LoadJSTypedArrayBackingStore(typed_array);
auto value = LoadFixedTypedArrayElementAsTagged(
data_ptr, index, source_elements_kind_, SMI_PARAMETERS);
k_.Bind(index);
a_.Bind(processor(this, value, index));
};
Node* start = SmiConstant(0);
Node* end = len_;
IndexAdvanceMode advance_mode = IndexAdvanceMode::kPost; IndexAdvanceMode advance_mode = IndexAdvanceMode::kPost;
int incr = 1; int incr = 1;
if (direction == ForEachDirection::kReverse) { if (direction == ForEachDirection::kReverse) {
...@@ -241,8 +233,17 @@ void ArrayBuiltinsAssembler::VisitAllTypedArrayElements( ...@@ -241,8 +233,17 @@ void ArrayBuiltinsAssembler::VisitAllTypedArrayElements(
advance_mode = IndexAdvanceMode::kPre; advance_mode = IndexAdvanceMode::kPre;
incr = -1; incr = -1;
} }
BuildFastLoop(list, start, end, body, incr, ParameterMode::SMI_PARAMETERS, BuildFastLoop<Smi>(
advance_mode); list, start, end,
[&](TNode<Smi> index) {
GotoIf(IsDetachedBuffer(CAST(array_buffer)), detached);
TNode<RawPtrT> data_ptr = LoadJSTypedArrayBackingStore(typed_array);
TNode<Object> value = LoadFixedTypedArrayElementAsTagged(
data_ptr, index, source_elements_kind_, SMI_PARAMETERS);
k_.Bind(index);
a_.Bind(processor(this, value, index));
},
incr, advance_mode);
} }
// Perform ArraySpeciesCreate (ES6 #sec-arrayspeciescreate). // Perform ArraySpeciesCreate (ES6 #sec-arrayspeciescreate).
......
...@@ -259,7 +259,7 @@ void BaseCollectionsAssembler::AddConstructorEntriesFromFastJSArray( ...@@ -259,7 +259,7 @@ void BaseCollectionsAssembler::AddConstructorEntriesFromFastJSArray(
&if_doubles); &if_doubles);
BIND(&if_smiorobjects); BIND(&if_smiorobjects);
{ {
auto set_entry = [&](Node* index) { auto set_entry = [&](TNode<IntPtrT> index) {
TNode<Object> element = LoadAndNormalizeFixedArrayElement( TNode<Object> element = LoadAndNormalizeFixedArrayElement(
CAST(elements), UncheckedCast<IntPtrT>(index)); CAST(elements), UncheckedCast<IntPtrT>(index));
AddConstructorEntry(variant, context, collection, add_func, element, AddConstructorEntry(variant, context, collection, add_func, element,
...@@ -270,8 +270,8 @@ void BaseCollectionsAssembler::AddConstructorEntriesFromFastJSArray( ...@@ -270,8 +270,8 @@ void BaseCollectionsAssembler::AddConstructorEntriesFromFastJSArray(
// elements, a fast loop is used. This assumes that adding an element // elements, a fast loop is used. This assumes that adding an element
// to the collection does not call user code that could mutate the elements // to the collection does not call user code that could mutate the elements
// or collection. // or collection.
BuildFastLoop(IntPtrConstant(0), length, set_entry, 1, BuildFastLoop<IntPtrT>(IntPtrConstant(0), length, set_entry, 1,
ParameterMode::INTPTR_PARAMETERS, IndexAdvanceMode::kPost); IndexAdvanceMode::kPost);
Goto(&exit); Goto(&exit);
} }
BIND(&if_doubles); BIND(&if_doubles);
...@@ -286,13 +286,13 @@ void BaseCollectionsAssembler::AddConstructorEntriesFromFastJSArray( ...@@ -286,13 +286,13 @@ void BaseCollectionsAssembler::AddConstructorEntriesFromFastJSArray(
element); element);
} else { } else {
DCHECK(variant == kSet || variant == kWeakSet); DCHECK(variant == kSet || variant == kWeakSet);
auto set_entry = [&](Node* index) { auto set_entry = [&](TNode<IntPtrT> index) {
TNode<Object> entry = LoadAndNormalizeFixedDoubleArrayElement( TNode<Object> entry = LoadAndNormalizeFixedDoubleArrayElement(
elements, UncheckedCast<IntPtrT>(index)); elements, UncheckedCast<IntPtrT>(index));
AddConstructorEntry(variant, context, collection, add_func, entry); AddConstructorEntry(variant, context, collection, add_func, entry);
}; };
BuildFastLoop(IntPtrConstant(0), length, set_entry, 1, BuildFastLoop<IntPtrT>(IntPtrConstant(0), length, set_entry, 1,
ParameterMode::INTPTR_PARAMETERS, IndexAdvanceMode::kPost); IndexAdvanceMode::kPost);
Goto(&exit); Goto(&exit);
} }
} }
......
...@@ -263,13 +263,12 @@ TNode<Context> ConstructorBuiltinsAssembler::EmitFastNewFunctionContext( ...@@ -263,13 +263,12 @@ TNode<Context> ConstructorBuiltinsAssembler::EmitFastNewFunctionContext(
TNode<Oddball> undefined = UndefinedConstant(); TNode<Oddball> undefined = UndefinedConstant();
TNode<IntPtrT> start_offset = IntPtrConstant(Context::kTodoHeaderSize); TNode<IntPtrT> start_offset = IntPtrConstant(Context::kTodoHeaderSize);
CodeStubAssembler::VariableList vars(0, zone()); CodeStubAssembler::VariableList vars(0, zone());
BuildFastLoop( BuildFastLoop<IntPtrT>(
vars, start_offset, size, vars, start_offset, size,
[=](SloppyTNode<IntPtrT> offset) { [=](TNode<IntPtrT> offset) {
StoreObjectFieldNoWriteBarrier( StoreObjectFieldNoWriteBarrier(function_context, offset, undefined);
function_context, UncheckedCast<IntPtrT>(offset), undefined);
}, },
kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); kTaggedSize, IndexAdvanceMode::kPost);
return function_context; return function_context;
} }
...@@ -571,18 +570,18 @@ TNode<HeapObject> ConstructorBuiltinsAssembler::EmitCreateShallowObjectLiteral( ...@@ -571,18 +570,18 @@ TNode<HeapObject> ConstructorBuiltinsAssembler::EmitCreateShallowObjectLiteral(
BIND(&continue_with_write_barrier); BIND(&continue_with_write_barrier);
{ {
Comment("Copy in-object properties slow"); Comment("Copy in-object properties slow");
BuildFastLoop( BuildFastLoop<IntPtrT>(
offset.value(), instance_size, offset.value(), instance_size,
[=](SloppyTNode<IntPtrT> offset) { [=](TNode<IntPtrT> offset) {
// TODO(ishell): value decompression is not necessary here. // TODO(ishell): value decompression is not necessary here.
TNode<Object> field = LoadObjectField(boilerplate, offset); TNode<Object> field = LoadObjectField(boilerplate, offset);
StoreObjectFieldNoWriteBarrier(copy, offset, field); StoreObjectFieldNoWriteBarrier(copy, offset, field);
}, },
kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); kTaggedSize, IndexAdvanceMode::kPost);
Comment("Copy mutable HeapNumber values"); Comment("Copy mutable HeapNumber values");
BuildFastLoop( BuildFastLoop<IntPtrT>(
offset.value(), instance_size, offset.value(), instance_size,
[=](SloppyTNode<IntPtrT> offset) { [=](TNode<IntPtrT> offset) {
TNode<Object> field = LoadObjectField(copy, offset); TNode<Object> field = LoadObjectField(copy, offset);
Label copy_heap_number(this, Label::kDeferred), continue_loop(this); Label copy_heap_number(this, Label::kDeferred), continue_loop(this);
// We only have to clone complex field values. // We only have to clone complex field values.
...@@ -601,7 +600,7 @@ TNode<HeapObject> ConstructorBuiltinsAssembler::EmitCreateShallowObjectLiteral( ...@@ -601,7 +600,7 @@ TNode<HeapObject> ConstructorBuiltinsAssembler::EmitCreateShallowObjectLiteral(
} }
BIND(&continue_loop); BIND(&continue_loop);
}, },
kTaggedSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); kTaggedSize, IndexAdvanceMode::kPost);
Goto(&done_init); Goto(&done_init);
} }
BIND(&done_init); BIND(&done_init);
......
...@@ -61,7 +61,8 @@ TF_BUILTIN(StringToLowerCaseIntl, IntlBuiltinsAssembler) { ...@@ -61,7 +61,8 @@ TF_BUILTIN(StringToLowerCaseIntl, IntlBuiltinsAssembler) {
Node* const dst_ptr = PointerToSeqStringData(dst); Node* const dst_ptr = PointerToSeqStringData(dst);
TVARIABLE(IntPtrT, var_cursor, IntPtrConstant(0)); TVARIABLE(IntPtrT, var_cursor, IntPtrConstant(0));
TNode<RawPtrT> const start_address = to_direct.PointerToData(&call_c); TNode<IntPtrT> const start_address =
ReinterpretCast<IntPtrT>(to_direct.PointerToData(&call_c));
TNode<IntPtrT> const end_address = TNode<IntPtrT> const end_address =
Signed(IntPtrAdd(start_address, ChangeUint32ToWord(length))); Signed(IntPtrAdd(start_address, ChangeUint32ToWord(length)));
...@@ -71,9 +72,9 @@ TF_BUILTIN(StringToLowerCaseIntl, IntlBuiltinsAssembler) { ...@@ -71,9 +72,9 @@ TF_BUILTIN(StringToLowerCaseIntl, IntlBuiltinsAssembler) {
VARIABLE(var_did_change, MachineRepresentation::kWord32, Int32Constant(0)); VARIABLE(var_did_change, MachineRepresentation::kWord32, Int32Constant(0));
VariableList push_vars({&var_cursor, &var_did_change}, zone()); VariableList push_vars({&var_cursor, &var_did_change}, zone());
BuildFastLoop( BuildFastLoop<IntPtrT>(
push_vars, start_address, end_address, push_vars, start_address, end_address,
[=, &var_cursor, &var_did_change](Node* current) { [&](TNode<IntPtrT> current) {
TNode<Uint8T> c = Load<Uint8T>(current); TNode<Uint8T> c = Load<Uint8T>(current);
TNode<Uint8T> lower = TNode<Uint8T> lower =
Load<Uint8T>(to_lower_table_addr, ChangeInt32ToIntPtr(c)); Load<Uint8T>(to_lower_table_addr, ChangeInt32ToIntPtr(c));
...@@ -85,7 +86,7 @@ TF_BUILTIN(StringToLowerCaseIntl, IntlBuiltinsAssembler) { ...@@ -85,7 +86,7 @@ TF_BUILTIN(StringToLowerCaseIntl, IntlBuiltinsAssembler) {
Increment(&var_cursor); Increment(&var_cursor);
}, },
kCharSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); kCharSize, IndexAdvanceMode::kPost);
// Return the original string if it remained unchanged in order to preserve // Return the original string if it remained unchanged in order to preserve
// e.g. internalization and private symbols (such as the preserved object // e.g. internalization and private symbols (such as the preserved object
......
...@@ -679,9 +679,9 @@ TNode<HeapObject> RegExpBuiltinsAssembler::RegExpExecInternal( ...@@ -679,9 +679,9 @@ TNode<HeapObject> RegExpBuiltinsAssembler::RegExpExecInternal(
TVARIABLE(IntPtrT, var_to_offset, to_offset); TVARIABLE(IntPtrT, var_to_offset, to_offset);
VariableList vars({&var_to_offset}, zone()); VariableList vars({&var_to_offset}, zone());
BuildFastLoop( BuildFastLoop<IntPtrT>(
vars, IntPtrZero(), limit_offset, vars, IntPtrZero(), limit_offset,
[=, &var_to_offset](Node* offset) { [&](TNode<IntPtrT> offset) {
TNode<Int32T> value = UncheckedCast<Int32T>(Load( TNode<Int32T> value = UncheckedCast<Int32T>(Load(
MachineType::Int32(), static_offsets_vector_address, offset)); MachineType::Int32(), static_offsets_vector_address, offset));
TNode<Smi> smi_value = SmiFromInt32(value); TNode<Smi> smi_value = SmiFromInt32(value);
...@@ -689,7 +689,7 @@ TNode<HeapObject> RegExpBuiltinsAssembler::RegExpExecInternal( ...@@ -689,7 +689,7 @@ TNode<HeapObject> RegExpBuiltinsAssembler::RegExpExecInternal(
var_to_offset.value(), smi_value); var_to_offset.value(), smi_value);
Increment(&var_to_offset, kTaggedSize); Increment(&var_to_offset, kTaggedSize);
}, },
kInt32Size, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); kInt32Size, IndexAdvanceMode::kPost);
} }
var_result = match_info; var_result = match_info;
......
...@@ -1366,9 +1366,9 @@ TNode<JSArray> StringBuiltinsAssembler::StringToArray( ...@@ -1366,9 +1366,9 @@ TNode<JSArray> StringBuiltinsAssembler::StringToArray(
TNode<IntPtrT> string_data_offset = to_direct.offset(); TNode<IntPtrT> string_data_offset = to_direct.offset();
TNode<FixedArray> cache = SingleCharacterStringCacheConstant(); TNode<FixedArray> cache = SingleCharacterStringCacheConstant();
BuildFastLoop( BuildFastLoop<IntPtrT>(
IntPtrConstant(0), length, IntPtrConstant(0), length,
[&](Node* index) { [&](TNode<IntPtrT> index) {
// TODO(jkummerow): Implement a CSA version of DisallowHeapAllocation // TODO(jkummerow): Implement a CSA version of DisallowHeapAllocation
// and use that to guard ToDirectStringAssembler.PointerToData(). // and use that to guard ToDirectStringAssembler.PointerToData().
CSA_ASSERT(this, WordEqual(to_direct.PointerToData(&call_runtime), CSA_ASSERT(this, WordEqual(to_direct.PointerToData(&call_runtime),
...@@ -1385,7 +1385,7 @@ TNode<JSArray> StringBuiltinsAssembler::StringToArray( ...@@ -1385,7 +1385,7 @@ TNode<JSArray> StringBuiltinsAssembler::StringToArray(
StoreFixedArrayElement(elements, index, entry); StoreFixedArrayElement(elements, index, entry);
}, },
1, ParameterMode::INTPTR_PARAMETERS, IndexAdvanceMode::kPost); 1, IndexAdvanceMode::kPost);
TNode<Map> array_map = LoadJSArrayElementsMap(PACKED_ELEMENTS, context); TNode<Map> array_map = LoadJSArrayElementsMap(PACKED_ELEMENTS, context);
result_array = AllocateJSArray(array_map, elements, length_smi); result_array = AllocateJSArray(array_map, elements, length_smi);
......
...@@ -735,9 +735,9 @@ TF_BUILTIN(TypedArrayOf, TypedArrayBuiltinsAssembler) { ...@@ -735,9 +735,9 @@ TF_BUILTIN(TypedArrayOf, TypedArrayBuiltinsAssembler) {
DispatchTypedArrayByElementsKind( DispatchTypedArrayByElementsKind(
elements_kind, elements_kind,
[&](ElementsKind kind, int size, int typed_array_fun_index) { [&](ElementsKind kind, int size, int typed_array_fun_index) {
BuildFastLoop( BuildFastLoop<IntPtrT>(
IntPtrConstant(0), length, IntPtrConstant(0), length,
[&](Node* index) { [&](TNode<IntPtrT> index) {
TNode<Object> item = args.AtIndex(index, INTPTR_PARAMETERS); TNode<Object> item = args.AtIndex(index, INTPTR_PARAMETERS);
Node* value = Node* value =
PrepareValueForWriteToTypedArray(item, kind, context); PrepareValueForWriteToTypedArray(item, kind, context);
...@@ -755,7 +755,7 @@ TF_BUILTIN(TypedArrayOf, TypedArrayBuiltinsAssembler) { ...@@ -755,7 +755,7 @@ TF_BUILTIN(TypedArrayOf, TypedArrayBuiltinsAssembler) {
StoreElement(backing_store, kind, index, value, StoreElement(backing_store, kind, index, value,
INTPTR_PARAMETERS); INTPTR_PARAMETERS);
}, },
1, ParameterMode::INTPTR_PARAMETERS, IndexAdvanceMode::kPost); 1, IndexAdvanceMode::kPost);
}); });
// 8. Return newObj. // 8. Return newObj.
...@@ -948,9 +948,9 @@ TF_BUILTIN(TypedArrayFrom, TypedArrayBuiltinsAssembler) { ...@@ -948,9 +948,9 @@ TF_BUILTIN(TypedArrayFrom, TypedArrayBuiltinsAssembler) {
TNode<Int32T> elements_kind = LoadElementsKind(target_obj.value()); TNode<Int32T> elements_kind = LoadElementsKind(target_obj.value());
// 7e/13 : Copy the elements // 7e/13 : Copy the elements
BuildFastLoop( BuildFastLoop<Smi>(
SmiConstant(0), final_length.value(), SmiConstant(0), final_length.value(),
[&](Node* index) { [&](TNode<Smi> index) {
TNode<Object> const k_value = TNode<Object> const k_value =
GetProperty(context, final_source.value(), index); GetProperty(context, final_source.value(), index);
...@@ -978,7 +978,7 @@ TF_BUILTIN(TypedArrayFrom, TypedArrayBuiltinsAssembler) { ...@@ -978,7 +978,7 @@ TF_BUILTIN(TypedArrayFrom, TypedArrayBuiltinsAssembler) {
SMI_PARAMETERS); SMI_PARAMETERS);
}); });
}, },
1, ParameterMode::SMI_PARAMETERS, IndexAdvanceMode::kPost); 1, IndexAdvanceMode::kPost);
args.PopAndReturn(target_obj.value()); args.PopAndReturn(target_obj.value());
......
This diff is collapsed.
...@@ -1663,8 +1663,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler ...@@ -1663,8 +1663,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
void BuildAppendJSArray(ElementsKind kind, Node* array, Node* value, void BuildAppendJSArray(ElementsKind kind, Node* array, Node* value,
Label* bailout); Label* bailout);
void StoreFieldsNoWriteBarrier(Node* start_address, Node* end_address, void StoreFieldsNoWriteBarrier(TNode<IntPtrT> start_address,
Node* value); TNode<IntPtrT> end_address,
TNode<Object> value);
Node* AllocateCellWithValue(Node* value, Node* AllocateCellWithValue(Node* value,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER); WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
...@@ -1763,7 +1764,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler ...@@ -1763,7 +1764,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<CollectionType> AllocateSmallOrderedHashTable(TNode<IntPtrT> capacity); TNode<CollectionType> AllocateSmallOrderedHashTable(TNode<IntPtrT> capacity);
Node* AllocateStruct(Node* map, AllocationFlags flags = kNone); Node* AllocateStruct(Node* map, AllocationFlags flags = kNone);
void InitializeStructBody(Node* object, Node* map, Node* size, void InitializeStructBody(TNode<HeapObject> object, TNode<IntPtrT> size,
int start_offset = Struct::kHeaderSize); int start_offset = Struct::kHeaderSize);
TNode<JSObject> AllocateJSObjectFromMap( TNode<JSObject> AllocateJSObjectFromMap(
...@@ -1772,14 +1773,17 @@ class V8_EXPORT_PRIVATE CodeStubAssembler ...@@ -1772,14 +1773,17 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
SlackTrackingMode slack_tracking_mode = kNoSlackTracking); SlackTrackingMode slack_tracking_mode = kNoSlackTracking);
void InitializeJSObjectFromMap( void InitializeJSObjectFromMap(
Node* object, Node* map, Node* instance_size, Node* properties = nullptr, SloppyTNode<HeapObject> object, SloppyTNode<Map> map,
SloppyTNode<IntPtrT> instance_size, Node* properties = nullptr,
Node* elements = nullptr, Node* elements = nullptr,
SlackTrackingMode slack_tracking_mode = kNoSlackTracking); SlackTrackingMode slack_tracking_mode = kNoSlackTracking);
void InitializeJSObjectBodyWithSlackTracking(Node* object, Node* map, void InitializeJSObjectBodyWithSlackTracking(
Node* instance_size); SloppyTNode<HeapObject> object, SloppyTNode<Map> map,
SloppyTNode<IntPtrT> instance_size);
void InitializeJSObjectBodyNoSlackTracking( void InitializeJSObjectBodyNoSlackTracking(
Node* object, Node* map, Node* instance_size, SloppyTNode<HeapObject> object, SloppyTNode<Map> map,
SloppyTNode<IntPtrT> instance_size,
int start_offset = JSObject::kHeaderSize); int start_offset = JSObject::kHeaderSize);
TNode<BoolT> IsValidFastJSArrayCapacity(Node* capacity, TNode<BoolT> IsValidFastJSArrayCapacity(Node* capacity,
...@@ -3310,39 +3314,24 @@ class V8_EXPORT_PRIVATE CodeStubAssembler ...@@ -3310,39 +3314,24 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
enum class IndexAdvanceMode { kPre, kPost }; enum class IndexAdvanceMode { kPre, kPost };
// TODO(v8:9708): typify index parameter. template <typename TIndex>
using FastLoopBody = std::function<void(Node* index)>; using FastLoopBody = std::function<void(TNode<TIndex> index)>;
template <typename TIndex> template <typename TIndex>
TNode<TIndex> BuildFastLoop( TNode<TIndex> BuildFastLoop(
const VariableList& var_list, TNode<TIndex> start_index, const VariableList& var_list, TNode<TIndex> start_index,
TNode<TIndex> end_index, const FastLoopBody& body, int increment, TNode<TIndex> end_index, const FastLoopBody<TIndex>& body, int increment,
IndexAdvanceMode advance_mode = IndexAdvanceMode::kPre); IndexAdvanceMode advance_mode = IndexAdvanceMode::kPre);
template <typename TIndex> template <typename TIndex>
TNode<TIndex> BuildFastLoop( TNode<TIndex> BuildFastLoop(
TNode<TIndex> start_index, TNode<TIndex> end_index, TNode<TIndex> start_index, TNode<TIndex> end_index,
const FastLoopBody& body, int increment, const FastLoopBody<TIndex>& body, int increment,
IndexAdvanceMode advance_mode = IndexAdvanceMode::kPre) { IndexAdvanceMode advance_mode = IndexAdvanceMode::kPre) {
return BuildFastLoop(VariableList(0, zone()), start_index, end_index, body, return BuildFastLoop(VariableList(0, zone()), start_index, end_index, body,
increment, advance_mode); increment, advance_mode);
} }
// TODO(v8:9708): remove once all uses are ported.
Node* BuildFastLoop(const VariableList& var_list, Node* start_index,
Node* end_index, const FastLoopBody& body, int increment,
ParameterMode parameter_mode,
IndexAdvanceMode advance_mode = IndexAdvanceMode::kPre);
// TODO(v8:9708): remove once all uses are ported.
Node* BuildFastLoop(Node* start_index, Node* end_index,
const FastLoopBody& body, int increment,
ParameterMode parameter_mode,
IndexAdvanceMode advance_mode = IndexAdvanceMode::kPre) {
return BuildFastLoop(VariableList(0, zone()), start_index, end_index, body,
increment, parameter_mode, advance_mode);
}
enum class ForEachDirection { kForward, kReverse }; enum class ForEachDirection { kForward, kReverse };
using FastFixedArrayForEachBody = using FastFixedArrayForEachBody =
...@@ -3387,8 +3376,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler ...@@ -3387,8 +3376,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
Label* doesnt_fit, int base_size, Label* doesnt_fit, int base_size,
ParameterMode mode); ParameterMode mode);
void InitializeFieldsWithRoot(Node* object, Node* start_offset, void InitializeFieldsWithRoot(TNode<HeapObject> object,
Node* end_offset, RootIndex root); TNode<IntPtrT> start_offset,
TNode<IntPtrT> end_offset, RootIndex root);
Node* RelationalComparison(Operation op, SloppyTNode<Object> left, Node* RelationalComparison(Operation op, SloppyTNode<Object> left,
SloppyTNode<Object> right, SloppyTNode<Object> right,
......
...@@ -3882,30 +3882,28 @@ void AccessorAssembler::GenerateCloneObjectIC() { ...@@ -3882,30 +3882,28 @@ void AccessorAssembler::GenerateCloneObjectIC() {
// Just copy the fields as raw data (pretending that there are no mutable // Just copy the fields as raw data (pretending that there are no mutable
// HeapNumbers). This doesn't need write barriers. // HeapNumbers). This doesn't need write barriers.
BuildFastLoop( BuildFastLoop<IntPtrT>(
source_start, source_size, source_start, source_size,
[=](Node* field_index) { [=](TNode<IntPtrT> field_index) {
TNode<IntPtrT> field_offset = TNode<IntPtrT> field_offset = TimesTaggedSize(field_index);
TimesTaggedSize(UncheckedCast<IntPtrT>(field_index));
TNode<TaggedT> field = TNode<TaggedT> field =
LoadObjectField<TaggedT>(CAST(source), field_offset); LoadObjectField<TaggedT>(CAST(source), field_offset);
TNode<IntPtrT> result_offset = TNode<IntPtrT> result_offset =
IntPtrAdd(field_offset, field_offset_difference); IntPtrAdd(field_offset, field_offset_difference);
StoreObjectFieldNoWriteBarrier(object, result_offset, field); StoreObjectFieldNoWriteBarrier(object, result_offset, field);
}, },
1, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); 1, IndexAdvanceMode::kPost);
// If mutable HeapNumbers can occur, we need to go through the {object} // If mutable HeapNumbers can occur, we need to go through the {object}
// again here and properly clone them. We use a second loop here to // again here and properly clone them. We use a second loop here to
// ensure that the GC (and heap verifier) always sees properly initialized // ensure that the GC (and heap verifier) always sees properly initialized
// objects, i.e. never hits undefined values in double fields. // objects, i.e. never hits undefined values in double fields.
if (!FLAG_unbox_double_fields) { if (!FLAG_unbox_double_fields) {
BuildFastLoop( BuildFastLoop<IntPtrT>(
source_start, source_size, source_start, source_size,
[=](Node* field_index) { [=](TNode<IntPtrT> field_index) {
TNode<IntPtrT> result_offset = TNode<IntPtrT> result_offset = IntPtrAdd(
IntPtrAdd(TimesTaggedSize(UncheckedCast<IntPtrT>(field_index)), TimesTaggedSize(field_index), field_offset_difference);
field_offset_difference);
TNode<Object> field = LoadObjectField(object, result_offset); TNode<Object> field = LoadObjectField(object, result_offset);
Label if_done(this), if_mutableheapnumber(this, Label::kDeferred); Label if_done(this), if_mutableheapnumber(this, Label::kDeferred);
GotoIf(TaggedIsSmi(field), &if_done); GotoIf(TaggedIsSmi(field), &if_done);
...@@ -3919,7 +3917,7 @@ void AccessorAssembler::GenerateCloneObjectIC() { ...@@ -3919,7 +3917,7 @@ void AccessorAssembler::GenerateCloneObjectIC() {
} }
BIND(&if_done); BIND(&if_done);
}, },
1, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); 1, IndexAdvanceMode::kPost);
} }
Return(object); Return(object);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment