Commit 017faaf4 authored by bmeurer's avatar bmeurer Committed by Commit bot

[turbofan] First steps towards optimizing for-in loops.

This is basically a port of the majority of optimizations that are
applied to for-in in full codegen. But it is not done during graph
building, but instead during typed lowering, which way less adhoc than
what the other compilers do.

Review URL: https://codereview.chromium.org/1155313008

Cr-Commit-Position: refs/heads/master@{#28726}
parent 7a709875
......@@ -58,6 +58,34 @@ FieldAccess AccessBuilder::ForExternalArrayPointer() {
}
// static
FieldAccess AccessBuilder::ForDescriptorArrayEnumCache() {
return {kTaggedBase, DescriptorArray::kEnumCacheOffset, Handle<Name>(),
Type::TaggedPointer(), kMachAnyTagged};
}
// static
FieldAccess AccessBuilder::ForDescriptorArrayEnumCacheBridgeCache() {
return {kTaggedBase, DescriptorArray::kEnumCacheBridgeCacheOffset,
Handle<Name>(), Type::TaggedPointer(), kMachAnyTagged};
}
// static
FieldAccess AccessBuilder::ForMapBitField3() {
return {kTaggedBase, Map::kBitField3Offset, Handle<Name>(),
Type::UntaggedUnsigned32(), kMachUint32};
}
// static
FieldAccess AccessBuilder::ForMapDescriptors() {
return {kTaggedBase, Map::kDescriptorsOffset, Handle<Name>(),
Type::TaggedPointer(), kMachAnyTagged};
}
// static
FieldAccess AccessBuilder::ForMapInstanceType() {
return {kTaggedBase, Map::kInstanceTypeOffset, Handle<Name>(),
......
......@@ -37,6 +37,18 @@ class AccessBuilder final : public AllStatic {
// Provides access to ExternalArray::external_pointer() field.
static FieldAccess ForExternalArrayPointer();
// Provides access to DescriptorArray::enum_cache() field.
static FieldAccess ForDescriptorArrayEnumCache();
// Provides access to DescriptorArray::enum_cache_bridge_cache() field.
static FieldAccess ForDescriptorArrayEnumCacheBridgeCache();
// Provides access to Map::bit_field3() field.
static FieldAccess ForMapBitField3();
// Provides access to Map::descriptors() field.
static FieldAccess ForMapDescriptors();
// Provides access to Map::instance_type() field.
static FieldAccess ForMapInstanceType();
......
......@@ -74,6 +74,11 @@ class Graph : public ZoneObject {
Node* nodes[] = {n1, n2, n3, n4, n5, n6, n7, n8};
return NewNode(op, arraysize(nodes), nodes);
}
Node* NewNode(const Operator* op, Node* n1, Node* n2, Node* n3, Node* n4,
Node* n5, Node* n6, Node* n7, Node* n8, Node* n9) {
Node* nodes[] = {n1, n2, n3, n4, n5, n6, n7, n8, n9};
return NewNode(op, arraysize(nodes), nodes);
}
template <class Visitor>
inline void VisitNodeInputsFromEnd(Visitor* visitor);
......
......@@ -606,8 +606,7 @@ void JSGenericLowering::LowerJSForInPrepare(Node* node) {
Node* cache_type = effect = graph()->NewNode(
common()->Call(descriptor),
jsgraph()->CEntryStubConstant(function->result_size), object,
jsgraph()->ExternalConstant(
ExternalReference(function->function_id, isolate())),
jsgraph()->ExternalConstant(function->function_id),
jsgraph()->Int32Constant(1), context, frame_state, effect, control);
control = graph()->NewNode(common()->IfSuccess(), cache_type);
......
......@@ -183,6 +183,11 @@ Node* JSGraph::ExternalConstant(ExternalReference reference) {
}
Node* JSGraph::ExternalConstant(Runtime::FunctionId function_id) {
return ExternalConstant(ExternalReference(function_id, isolate()));
}
Node* JSGraph::EmptyFrameState() {
Node* empty_frame_state = cached_nodes_[kEmptyFrameState];
if (!empty_frame_state || empty_frame_state->IsDead()) {
......
......@@ -102,6 +102,7 @@ class JSGraph : public ZoneObject {
// Creates an ExternalConstant node, usually canonicalized.
Node* ExternalConstant(ExternalReference ref);
Node* ExternalConstant(Runtime::FunctionId function_id);
Node* SmiConstant(int32_t immediate) {
DCHECK(Smi::IsValid(immediate));
......
......@@ -1081,6 +1081,268 @@ Reduction JSTypedLowering::ReduceJSCreateBlockContext(Node* node) {
}
Reduction JSTypedLowering::ReduceJSForInDone(Node* node) {
DCHECK_EQ(IrOpcode::kJSForInDone, node->opcode());
node->set_op(machine()->Word32Equal());
node->TrimInputCount(2);
return Changed(node);
}
Reduction JSTypedLowering::ReduceJSForInPrepare(Node* node) {
DCHECK_EQ(IrOpcode::kJSForInPrepare, node->opcode());
Node* receiver = NodeProperties::GetValueInput(node, 0);
Node* context = NodeProperties::GetContextInput(node);
Node* frame_state = NodeProperties::GetFrameStateInput(node, 0);
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
// Get the set of properties to enumerate.
Node* cache_type = effect = graph()->NewNode(
javascript()->CallRuntime(Runtime::kGetPropertyNamesFast, 1), receiver,
context, frame_state, effect, control);
control = graph()->NewNode(common()->IfSuccess(), cache_type);
Node* receiver_map = effect =
graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
receiver, effect, control);
Node* cache_type_map = effect =
graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
cache_type, effect, control);
Node* meta_map = jsgraph()->HeapConstant(factory()->meta_map());
// If we got a map from the GetPropertyNamesFast runtime call, we can do a
// fast modification check. Otherwise, we got a fixed array, and we have to
// perform a slow check on every iteration.
Node* check0 = graph()->NewNode(simplified()->ReferenceEqual(Type::Any()),
cache_type_map, meta_map);
Node* branch0 =
graph()->NewNode(common()->Branch(BranchHint::kTrue), check0, control);
Node* if_true0 = graph()->NewNode(common()->IfTrue(), branch0);
Node* cache_array_true0;
Node* cache_length_true0;
Node* cache_type_true0;
Node* etrue0;
{
// Enum cache case.
Node* cache_type_enum_length = etrue0 = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForMapBitField3()), cache_type,
effect, if_true0);
cache_length_true0 =
graph()->NewNode(machine()->Word32And(), cache_type_enum_length,
jsgraph()->Uint32Constant(Map::EnumLengthBits::kMask));
Node* check1 =
graph()->NewNode(machine()->Word32Equal(), cache_length_true0,
jsgraph()->Int32Constant(0));
Node* branch1 =
graph()->NewNode(common()->Branch(BranchHint::kTrue), check1, if_true0);
Node* if_true1 = graph()->NewNode(common()->IfTrue(), branch1);
Node* cache_array_true1;
Node* etrue1;
{
// No properties to enumerate.
cache_array_true1 =
jsgraph()->HeapConstant(factory()->empty_fixed_array());
etrue1 = etrue0;
}
Node* if_false1 = graph()->NewNode(common()->IfFalse(), branch1);
Node* cache_array_false1;
Node* efalse1;
{
// Load the enumeration cache from the instance descriptors of {receiver}.
Node* receiver_map_descriptors = efalse1 = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForMapDescriptors()),
receiver_map, etrue0, if_false1);
Node* object_map_enum_cache = efalse1 = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForDescriptorArrayEnumCache()),
receiver_map_descriptors, efalse1, if_false1);
cache_array_false1 = efalse1 = graph()->NewNode(
simplified()->LoadField(
AccessBuilder::ForDescriptorArrayEnumCacheBridgeCache()),
object_map_enum_cache, efalse1, if_false1);
}
if_true0 = graph()->NewNode(common()->Merge(2), if_true1, if_false1);
etrue0 =
graph()->NewNode(common()->EffectPhi(2), etrue1, efalse1, if_true0);
cache_array_true0 =
graph()->NewNode(common()->Phi(kMachAnyTagged, 2), cache_array_true1,
cache_array_false1, if_true0);
cache_type_true0 = cache_type;
}
Node* if_false0 = graph()->NewNode(common()->IfFalse(), branch0);
Node* cache_array_false0;
Node* cache_length_false0;
Node* cache_type_false0;
Node* efalse0;
{
// FixedArray case.
Node* receiver_instance_type = efalse0 = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForMapInstanceType()),
receiver_map, effect, if_false0);
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
cache_type_false0 = graph()->NewNode(
common()->Select(kMachAnyTagged, BranchHint::kFalse),
graph()->NewNode(machine()->Uint32LessThanOrEqual(),
receiver_instance_type,
jsgraph()->Uint32Constant(LAST_JS_PROXY_TYPE)),
jsgraph()->ZeroConstant(), // Zero indicagtes proxy.
jsgraph()->OneConstant()); // One means slow check.
cache_array_false0 = cache_type;
cache_length_false0 = efalse0 = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForFixedArrayLength()),
cache_array_false0, efalse0, if_false0);
}
control = graph()->NewNode(common()->Merge(2), if_true0, if_false0);
effect = graph()->NewNode(common()->EffectPhi(2), etrue0, efalse0, control);
Node* cache_array =
graph()->NewNode(common()->Phi(kMachAnyTagged, 2), cache_array_true0,
cache_array_false0, control);
Node* cache_length =
graph()->NewNode(common()->Phi(kMachAnyTagged, 2), cache_length_true0,
cache_length_false0, control);
cache_type = graph()->NewNode(common()->Phi(kMachAnyTagged, 2),
cache_type_true0, cache_type_false0, control);
for (auto edge : node->use_edges()) {
Node* const use = edge.from();
if (NodeProperties::IsEffectEdge(edge)) {
edge.UpdateTo(effect);
Revisit(use);
} else {
if (NodeProperties::IsControlEdge(edge)) {
DCHECK_EQ(IrOpcode::kIfSuccess, use->opcode());
Replace(use, control);
} else {
DCHECK(NodeProperties::IsValueEdge(edge));
DCHECK_EQ(IrOpcode::kProjection, use->opcode());
switch (ProjectionIndexOf(use->op())) {
case 0:
Replace(use, cache_type);
break;
case 1:
Replace(use, cache_array);
break;
case 2:
Replace(use, cache_length);
break;
default:
UNREACHABLE();
break;
}
}
use->Kill();
}
}
return NoChange(); // All uses were replaced already above.
}
Reduction JSTypedLowering::ReduceJSForInNext(Node* node) {
DCHECK_EQ(IrOpcode::kJSForInNext, node->opcode());
Node* receiver = NodeProperties::GetValueInput(node, 0);
Node* cache_array = NodeProperties::GetValueInput(node, 1);
Node* cache_type = NodeProperties::GetValueInput(node, 2);
Node* index = NodeProperties::GetValueInput(node, 3);
Node* context = NodeProperties::GetContextInput(node);
Node* frame_state = NodeProperties::GetFrameStateInput(node, 0);
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
// Load the next {key} from the {cache_array}.
Node* key = effect = graph()->NewNode(
simplified()->LoadElement(AccessBuilder::ForFixedArrayElement()),
cache_array, index, effect, control);
// Load the map of the {receiver}.
Node* receiver_map = effect =
graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
receiver, effect, control);
// Check if the expected map still matches that of the {receiver}.
Node* check0 = graph()->NewNode(simplified()->ReferenceEqual(Type::Any()),
receiver_map, cache_type);
Node* branch0 =
graph()->NewNode(common()->Branch(BranchHint::kTrue), check0, control);
Node* if_true0 = graph()->NewNode(common()->IfTrue(), branch0);
Node* etrue0;
Node* vtrue0;
{
// Don't need filtering since expected map still matches that of the
// {receiver}.
etrue0 = effect;
vtrue0 = key;
}
Node* if_false0 = graph()->NewNode(common()->IfFalse(), branch0);
Node* efalse0;
Node* vfalse0;
{
// Check if the {cache_type} is zero, which indicates proxy.
Node* check1 = graph()->NewNode(simplified()->ReferenceEqual(Type::Any()),
cache_type, jsgraph()->ZeroConstant());
Node* branch1 = graph()->NewNode(common()->Branch(BranchHint::kFalse),
check1, if_false0);
Node* if_true1 = graph()->NewNode(common()->IfTrue(), branch1);
Node* etrue1;
Node* vtrue1;
{
// Don't do filtering for proxies.
etrue1 = effect;
vtrue1 = key;
}
Node* if_false1 = graph()->NewNode(common()->IfFalse(), branch1);
Node* efalse1;
Node* vfalse1;
{
// Filter the {key} to check if it's still a valid property of the
// {receiver} (does the ToName conversion implicitly).
vfalse1 = efalse1 = graph()->NewNode(
javascript()->CallRuntime(Runtime::kForInFilter, 2), receiver, key,
context, frame_state, effect, if_false1);
if_false1 = graph()->NewNode(common()->IfSuccess(), vfalse1);
}
if_false0 = graph()->NewNode(common()->Merge(2), if_true1, if_false1);
efalse0 =
graph()->NewNode(common()->EffectPhi(2), etrue1, efalse1, if_false0);
vfalse0 = graph()->NewNode(common()->Phi(kMachAnyTagged, 2), vtrue1,
vfalse1, if_false0);
}
control = graph()->NewNode(common()->Merge(2), if_true0, if_false0);
effect = graph()->NewNode(common()->EffectPhi(2), etrue0, efalse0, control);
ReplaceWithValue(node, node, effect, control);
node->set_op(common()->Phi(kMachAnyTagged, 2));
node->ReplaceInput(0, vtrue0);
node->ReplaceInput(1, vfalse0);
node->ReplaceInput(2, control);
node->TrimInputCount(3);
return Changed(node);
}
Reduction JSTypedLowering::ReduceJSForInStep(Node* node) {
DCHECK_EQ(IrOpcode::kJSForInStep, node->opcode());
node->set_op(machine()->Int32Add());
node->ReplaceInput(1, jsgraph()->Int32Constant(1));
DCHECK_EQ(2, node->InputCount());
return Changed(node);
}
Reduction JSTypedLowering::Reduce(Node* node) {
// Check if the output type is a singleton. In that case we already know the
// result value and can simply replace the node if it's eliminable.
......@@ -1177,6 +1439,14 @@ Reduction JSTypedLowering::Reduce(Node* node) {
return ReduceJSCreateWithContext(node);
case IrOpcode::kJSCreateBlockContext:
return ReduceJSCreateBlockContext(node);
case IrOpcode::kJSForInDone:
return ReduceJSForInDone(node);
case IrOpcode::kJSForInNext:
return ReduceJSForInNext(node);
case IrOpcode::kJSForInPrepare:
return ReduceJSForInPrepare(node);
case IrOpcode::kJSForInStep:
return ReduceJSForInStep(node);
default:
break;
}
......
......@@ -58,6 +58,10 @@ class JSTypedLowering final : public AdvancedReducer {
Reduction ReduceJSCreateLiteralObject(Node* node);
Reduction ReduceJSCreateWithContext(Node* node);
Reduction ReduceJSCreateBlockContext(Node* node);
Reduction ReduceJSForInDone(Node* node);
Reduction ReduceJSForInNext(Node* node);
Reduction ReduceJSForInPrepare(Node* node);
Reduction ReduceJSForInStep(Node* node);
Reduction ReduceNumberBinop(Node* node, const Operator* numberOp);
Reduction ReduceInt32Binop(Node* node, const Operator* intOp);
Reduction ReduceUI32Shift(Node* node, Signedness left_signedness,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment