Commit 3d618949 authored by ishell's avatar ishell Committed by Commit bot

[stubs] Cleanup usages of lambdas in CodeStubAssembler and friends.

The changes are:
1) Pass lambdas as const references to avoid unnecessary copying.
2) Remove CodeStubAssembler* parameter from loop bodies and let the lambdas
   capture the right assembler instead. It makes the loop body code look
   uniform with the surrounding code and unblocks splitting of a CSA
   into different classes.

BUG=

Review-Url: https://codereview.chromium.org/2535753012
Cr-Commit-Position: refs/heads/master@{#41482}
parent 8590e8d4
......@@ -314,11 +314,10 @@ void Builtins::Generate_FastArrayPush(compiler::CodeAssemblerState* state) {
assembler.Bind(&default_label);
{
args.ForEach(
[receiver, context, &arg_index](CodeStubAssembler* assembler,
Node* arg) {
Node* length = assembler->LoadJSArrayLength(receiver);
assembler->CallRuntime(Runtime::kSetProperty, context, receiver,
length, arg, assembler->SmiConstant(STRICT));
[&assembler, receiver, context, &arg_index](Node* arg) {
Node* length = assembler.LoadJSArrayLength(receiver);
assembler.CallRuntime(Runtime::kSetProperty, context, receiver,
length, arg, assembler.SmiConstant(STRICT));
},
arg_index.value());
args.PopAndReturn(assembler.LoadJSArrayLength(receiver));
......
......@@ -376,15 +376,14 @@ void Builtins::Generate_FastFunctionPrototypeBind(
Variable index(&assembler, MachineType::PointerRepresentation());
index.Bind(assembler.IntPtrConstant(0));
CodeStubAssembler::VariableList foreach_vars({&index}, assembler.zone());
args.ForEach(
foreach_vars,
[elements, &index](CodeStubAssembler* assembler, compiler::Node* arg) {
assembler->StoreFixedArrayElement(elements, index.value(), arg,
UPDATE_WRITE_BARRIER, 0,
CodeStubAssembler::INTPTR_PARAMETERS);
assembler->Increment(index);
},
assembler.IntPtrConstant(1));
args.ForEach(foreach_vars,
[&assembler, elements, &index](compiler::Node* arg) {
assembler.StoreFixedArrayElement(
elements, index.value(), arg, UPDATE_WRITE_BARRIER, 0,
CodeStubAssembler::INTPTR_PARAMETERS);
assembler.Increment(index);
},
assembler.IntPtrConstant(1));
argument_array.Bind(elements);
assembler.Goto(&arguments_done);
......
......@@ -2159,8 +2159,8 @@ Node* ReplaceGlobalCallableFastPath(CodeStubAssembler* a, Node* context,
a->BuildFastLoop(
MachineType::PointerRepresentation(), from, to,
[res_elems, isolate, native_context, context, undefined,
replace_callable, mode](CodeStubAssembler* a, Node* index) {
[a, res_elems, isolate, native_context, context, undefined,
replace_callable, mode](Node* index) {
Node* const elem =
a->LoadFixedArrayElement(res_elems, index, 0, mode);
......
......@@ -473,27 +473,26 @@ void Builtins::Generate_StringFromCharCode(
// codes. Stop if any of the conversions generates a code that doesn't fit
// in 8 bits.
CodeStubAssembler::VariableList vars({&max_index}, assembler.zone());
arguments.ForEach(vars, [context, &two_byte, &max_index, &code16,
one_byte_result](CodeStubAssembler* assembler,
Node* arg) {
Node* code32 = assembler->TruncateTaggedToWord32(context, arg);
code16 = assembler->Word32And(
code32, assembler->Int32Constant(String::kMaxUtf16CodeUnit));
assembler->GotoIf(
assembler->Int32GreaterThan(
code16, assembler->Int32Constant(String::kMaxOneByteCharCode)),
arguments.ForEach(vars, [&assembler, context, &two_byte, &max_index,
&code16, one_byte_result](Node* arg) {
Node* code32 = assembler.TruncateTaggedToWord32(context, arg);
code16 = assembler.Word32And(
code32, assembler.Int32Constant(String::kMaxUtf16CodeUnit));
assembler.GotoIf(
assembler.Int32GreaterThan(
code16, assembler.Int32Constant(String::kMaxOneByteCharCode)),
&two_byte);
// The {code16} fits into the SeqOneByteString {one_byte_result}.
Node* offset = assembler->ElementOffsetFromIndex(
Node* offset = assembler.ElementOffsetFromIndex(
max_index.value(), UINT8_ELEMENTS,
CodeStubAssembler::INTPTR_PARAMETERS,
SeqOneByteString::kHeaderSize - kHeapObjectTag);
assembler->StoreNoWriteBarrier(MachineRepresentation::kWord8,
one_byte_result, offset, code16);
max_index.Bind(assembler->IntPtrAdd(max_index.value(),
assembler->IntPtrConstant(1)));
assembler.StoreNoWriteBarrier(MachineRepresentation::kWord8,
one_byte_result, offset, code16);
max_index.Bind(
assembler.IntPtrAdd(max_index.value(), assembler.IntPtrConstant(1)));
});
arguments.PopAndReturn(one_byte_result);
......@@ -527,20 +526,19 @@ void Builtins::Generate_StringFromCharCode(
// using a 16-bit representation.
arguments.ForEach(
vars,
[context, two_byte_result, &max_index](CodeStubAssembler* assembler,
Node* arg) {
Node* code32 = assembler->TruncateTaggedToWord32(context, arg);
Node* code16 = assembler->Word32And(
code32, assembler->Int32Constant(String::kMaxUtf16CodeUnit));
[&assembler, context, two_byte_result, &max_index](Node* arg) {
Node* code32 = assembler.TruncateTaggedToWord32(context, arg);
Node* code16 = assembler.Word32And(
code32, assembler.Int32Constant(String::kMaxUtf16CodeUnit));
Node* offset = assembler->ElementOffsetFromIndex(
Node* offset = assembler.ElementOffsetFromIndex(
max_index.value(), UINT16_ELEMENTS,
CodeStubAssembler::INTPTR_PARAMETERS,
SeqTwoByteString::kHeaderSize - kHeapObjectTag);
assembler->StoreNoWriteBarrier(MachineRepresentation::kWord16,
two_byte_result, offset, code16);
max_index.Bind(assembler->IntPtrAdd(max_index.value(),
assembler->IntPtrConstant(1)));
assembler.StoreNoWriteBarrier(MachineRepresentation::kWord16,
two_byte_result, offset, code16);
max_index.Bind(assembler.IntPtrAdd(max_index.value(),
assembler.IntPtrConstant(1)));
},
max_index.value());
......
This diff is collapsed.
......@@ -164,8 +164,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* InnerAllocate(Node* previous, Node* offset);
Node* IsRegularHeapObjectSize(Node* size);
typedef std::function<Node*()> ConditionBody;
void Assert(ConditionBody condition_body, const char* string = nullptr,
typedef std::function<Node*()> NodeGenerator;
void Assert(const NodeGenerator& condition_body, const char* string = nullptr,
const char* file = nullptr, int line = 0);
// Check a value for smi-ness
......@@ -830,8 +831,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
// If it can't handle the case {receiver}/{key} case then the control goes
// to {if_bailout}.
void TryPrototypeChainLookup(Node* receiver, Node* key,
LookupInHolder& lookup_property_in_holder,
LookupInHolder& lookup_element_in_holder,
const LookupInHolder& lookup_property_in_holder,
const LookupInHolder& lookup_element_in_holder,
Label* if_end, Label* if_bailout);
// Instanceof helpers.
......@@ -923,28 +924,28 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
enum class IndexAdvanceMode { kPre, kPost };
void BuildFastLoop(
const VariableList& var_list, MachineRepresentation index_rep,
Node* start_index, Node* end_index,
std::function<void(CodeStubAssembler* assembler, Node* index)> body,
int increment, IndexAdvanceMode mode = IndexAdvanceMode::kPre);
typedef std::function<void(Node* index)> FastLoopBody;
void BuildFastLoop(const VariableList& var_list,
MachineRepresentation index_rep, Node* start_index,
Node* end_index, const FastLoopBody& body, int increment,
IndexAdvanceMode mode = IndexAdvanceMode::kPre);
void BuildFastLoop(
MachineRepresentation index_rep, Node* start_index, Node* end_index,
std::function<void(CodeStubAssembler* assembler, Node* index)> body,
int increment, IndexAdvanceMode mode = IndexAdvanceMode::kPre) {
void BuildFastLoop(MachineRepresentation index_rep, Node* start_index,
Node* end_index, const FastLoopBody& body, int increment,
IndexAdvanceMode mode = IndexAdvanceMode::kPre) {
BuildFastLoop(VariableList(0, zone()), index_rep, start_index, end_index,
body, increment, mode);
}
enum class ForEachDirection { kForward, kReverse };
typedef std::function<void(Node* fixed_array, Node* offset)>
FastFixedArrayForEachBody;
void BuildFastFixedArrayForEach(
Node* fixed_array, ElementsKind kind, Node* first_element_inclusive,
Node* last_element_exclusive,
std::function<void(CodeStubAssembler* assembler, Node* fixed_array,
Node* offset)>
body,
Node* last_element_exclusive, const FastFixedArrayForEachBody& body,
ParameterMode mode = INTPTR_PARAMETERS,
ForEachDirection direction = ForEachDirection::kReverse);
......@@ -1069,11 +1070,10 @@ class CodeStubArguments {
Node* GetLength() const { return argc_; }
typedef std::function<void(CodeStubAssembler* assembler, Node* arg)>
ForEachBodyFunction;
typedef std::function<void(Node* arg)> ForEachBodyFunction;
// Iteration doesn't include the receiver. |first| and |last| are zero-based.
void ForEach(ForEachBodyFunction body, Node* first = nullptr,
void ForEach(const ForEachBodyFunction& body, Node* first = nullptr,
Node* last = nullptr, CodeStubAssembler::ParameterMode mode =
CodeStubAssembler::INTPTR_PARAMETERS) {
CodeStubAssembler::VariableList list(0, assembler_->zone());
......@@ -1082,7 +1082,7 @@ class CodeStubArguments {
// Iteration doesn't include the receiver. |first| and |last| are zero-based.
void ForEach(const CodeStubAssembler::VariableList& vars,
ForEachBodyFunction body, Node* first = nullptr,
const ForEachBodyFunction& body, Node* first = nullptr,
Node* last = nullptr, CodeStubAssembler::ParameterMode mode =
CodeStubAssembler::INTPTR_PARAMETERS);
......
......@@ -2626,7 +2626,7 @@ compiler::Node* FastNewFunctionContextStub::Generate(
Node* undefined = assembler->UndefinedConstant();
assembler->BuildFastFixedArrayForEach(
function_context, FAST_ELEMENTS, min_context_slots, length,
[undefined](CodeStubAssembler* assembler, Node* context, Node* offset) {
[assembler, undefined](Node* context, Node* offset) {
assembler->StoreNoWriteBarrier(MachineType::PointerRepresentation(),
context, offset, undefined);
});
......
......@@ -79,21 +79,20 @@ void AccessorAssemblerImpl::HandlePolymorphicCase(
Node* length = LoadAndUntagFixedArrayBaseLength(feedback);
BuildFastLoop(
MachineType::PointerRepresentation(), init, length,
[receiver_map, feedback, if_handler, var_handler](CodeStubAssembler* csa,
Node* index) {
Node* cached_map = csa->LoadWeakCellValue(
csa->LoadFixedArrayElement(feedback, index, 0, INTPTR_PARAMETERS));
[this, receiver_map, feedback, if_handler, var_handler](Node* index) {
Node* cached_map = LoadWeakCellValue(
LoadFixedArrayElement(feedback, index, 0, INTPTR_PARAMETERS));
Label next_entry(csa);
csa->GotoIf(csa->WordNotEqual(receiver_map, cached_map), &next_entry);
Label next_entry(this);
GotoIf(WordNotEqual(receiver_map, cached_map), &next_entry);
// Found, now call handler.
Node* handler = csa->LoadFixedArrayElement(
feedback, index, kPointerSize, INTPTR_PARAMETERS);
Node* handler = LoadFixedArrayElement(feedback, index, kPointerSize,
INTPTR_PARAMETERS);
var_handler->Bind(handler);
csa->Goto(if_handler);
Goto(if_handler);
csa->Bind(&next_entry);
Bind(&next_entry);
},
kEntrySize, IndexAdvanceMode::kPost);
// The loop falls through if no handler was found.
......@@ -111,30 +110,28 @@ void AccessorAssemblerImpl::HandleKeyedStorePolymorphicCase(
Node* init = IntPtrConstant(0);
Node* length = LoadAndUntagFixedArrayBaseLength(feedback);
BuildFastLoop(
MachineType::PointerRepresentation(), init, length,
[receiver_map, feedback, if_handler, var_handler, if_transition_handler,
var_transition_map_cell](CodeStubAssembler* csa, Node* index) {
Node* cached_map = csa->LoadWeakCellValue(
csa->LoadFixedArrayElement(feedback, index, 0, INTPTR_PARAMETERS));
Label next_entry(csa);
csa->GotoIf(csa->WordNotEqual(receiver_map, cached_map), &next_entry);
Node* maybe_transition_map_cell = csa->LoadFixedArrayElement(
feedback, index, kPointerSize, INTPTR_PARAMETERS);
var_handler->Bind(csa->LoadFixedArrayElement(
feedback, index, 2 * kPointerSize, INTPTR_PARAMETERS));
csa->GotoIf(
csa->WordEqual(maybe_transition_map_cell,
csa->LoadRoot(Heap::kUndefinedValueRootIndex)),
if_handler);
var_transition_map_cell->Bind(maybe_transition_map_cell);
csa->Goto(if_transition_handler);
csa->Bind(&next_entry);
},
kEntrySize, IndexAdvanceMode::kPost);
BuildFastLoop(MachineType::PointerRepresentation(), init, length,
[this, receiver_map, feedback, if_handler, var_handler,
if_transition_handler, var_transition_map_cell](Node* index) {
Node* cached_map = LoadWeakCellValue(LoadFixedArrayElement(
feedback, index, 0, INTPTR_PARAMETERS));
Label next_entry(this);
GotoIf(WordNotEqual(receiver_map, cached_map), &next_entry);
Node* maybe_transition_map_cell = LoadFixedArrayElement(
feedback, index, kPointerSize, INTPTR_PARAMETERS);
var_handler->Bind(LoadFixedArrayElement(
feedback, index, 2 * kPointerSize, INTPTR_PARAMETERS));
GotoIf(WordEqual(maybe_transition_map_cell,
LoadRoot(Heap::kUndefinedValueRootIndex)),
if_handler);
var_transition_map_cell->Bind(maybe_transition_map_cell);
Goto(if_transition_handler);
Bind(&next_entry);
},
kEntrySize, IndexAdvanceMode::kPost);
// The loop falls through if no handler was found.
Goto(if_miss);
}
......@@ -424,7 +421,7 @@ Node* AccessorAssemblerImpl::EmitLoadICProtoArrayCheck(
BuildFastLoop(
MachineType::PointerRepresentation(), start_index.value(), handler_length,
[this, p, handler, miss](CodeStubAssembler*, Node* current) {
[this, p, handler, miss](Node* current) {
Node* prototype_cell =
LoadFixedArrayElement(handler, current, 0, INTPTR_PARAMETERS);
CheckPrototype(prototype_cell, p->name, miss);
......@@ -579,7 +576,7 @@ void AccessorAssemblerImpl::HandleStoreICProtoHandler(
Node* length = SmiUntag(maybe_transition_cell);
BuildFastLoop(MachineType::PointerRepresentation(),
IntPtrConstant(StoreHandler::kFirstPrototypeIndex), length,
[this, p, handler, miss](CodeStubAssembler*, Node* current) {
[this, p, handler, miss](Node* current) {
Node* prototype_cell = LoadFixedArrayElement(
handler, current, 0, INTPTR_PARAMETERS);
CheckPrototype(prototype_cell, p->name, miss);
......
......@@ -1742,9 +1742,8 @@ TEST(ArgumentsForEach) {
sum.Bind(m.IntPtrConstant(0));
arguments.ForEach(list, [&m, &sum](CodeStubAssembler* assembler, Node* arg) {
sum.Bind(assembler->IntPtrAdd(sum.value(), arg));
});
arguments.ForEach(
list, [&m, &sum](Node* arg) { sum.Bind(m.IntPtrAdd(sum.value(), arg)); });
m.Return(sum.value());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment