Commit aba708a1 authored by Jaroslav Sevcik's avatar Jaroslav Sevcik Committed by Commit Bot

Initial optimization of Map.prototype.(get|has) in Turbofan.

This introduces a new builtin (MapLookupHashIndex) and uses it
in Turbofan to compute Map.p.get and Map.p.has.

I have also refactored the existing CSA builtins for Map.p.get and 
Map.p.has to use the new builtin under the hood.

The code for the lookup has been also improved.
- Specialized lookups for smis, strings, heap numbers and everything else.
  - the advantage is that we can use fast equalities for the lookup.
  - strings can likely be optimized further if we care about the 
    internalized string fast case.
- Instead of a call to runtime to get the hash code, we now call C directly.

In the Turbofan implementation itself, there are no special optimizations yet.
The next step is to teach load elimination to reuse the indexes from
previous calls of MapLookupHashIndex. 

BUG=v8:6410

Change-Id: I0b1a70493eb031d444e51002f6b2cc1f30ea2b68
Reviewed-on: https://chromium-review.googlesource.com/560169Reviewed-by: 's avatarBenedikt Meurer <bmeurer@chromium.org>
Commit-Queue: Jaroslav Sevcik <jarin@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46510}
parent 0d007e68
......@@ -1510,8 +1510,9 @@ ExternalReference ExternalReference::search_string_raw(Isolate* isolate) {
return ExternalReference(Redirect(isolate, FUNCTION_ADDR(f)));
}
ExternalReference ExternalReference::orderedhashmap_get_raw(Isolate* isolate) {
auto f = OrderedHashMap::Get;
ExternalReference ExternalReference::orderedhashmap_gethash_raw(
Isolate* isolate) {
auto f = OrderedHashMap::GetHash;
return ExternalReference(Redirect(isolate, FUNCTION_ADDR(f)));
}
......
......@@ -997,7 +997,7 @@ class ExternalReference BASE_EMBEDDED {
template <typename SubjectChar, typename PatternChar>
static ExternalReference search_string_raw(Isolate* isolate);
static ExternalReference orderedhashmap_get_raw(Isolate* isolate);
static ExternalReference orderedhashmap_gethash_raw(Isolate* isolate);
template <typename CollectionType, int entrysize>
static ExternalReference orderedhashtable_has_raw(Isolate* isolate);
......
This diff is collapsed.
......@@ -576,6 +576,7 @@ namespace internal {
LoadGlobal) \
\
/* Map */ \
TFS(MapLookupHashIndex, kTable, kKey) \
TFJ(MapConstructor, SharedFunctionInfo::kDontAdaptArgumentsSentinel) \
TFJ(MapGet, 1, kKey) \
TFJ(MapHas, 1, kKey) \
......
......@@ -823,6 +823,12 @@ bool EffectControlLinearizer::TryWireInStateEffect(Node* node,
case IrOpcode::kStoreTypedElement:
LowerStoreTypedElement(node);
break;
case IrOpcode::kLookupHashStorageIndex:
result = LowerLookupHashStorageIndex(node);
break;
case IrOpcode::kLoadHashMapValue:
result = LowerLoadHashMapValue(node);
break;
case IrOpcode::kFloat64RoundUp:
if (!LowerFloat64RoundUp(node).To(&result)) {
return false;
......@@ -3134,6 +3140,29 @@ Maybe<Node*> EffectControlLinearizer::LowerFloat64RoundTruncate(Node* node) {
return Just(done.PhiAt(0));
}
Node* EffectControlLinearizer::LowerLookupHashStorageIndex(Node* node) {
Node* table = NodeProperties::GetValueInput(node, 0);
Node* key = NodeProperties::GetValueInput(node, 1);
{
Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kMapLookupHashIndex);
Operator::Properties const properties = node->op()->properties();
CallDescriptor::Flags const flags = CallDescriptor::kNoFlags;
CallDescriptor* desc = Linkage::GetStubCallDescriptor(
isolate(), graph()->zone(), callable.descriptor(), 0, flags,
properties);
return __ Call(desc, __ HeapConstant(callable.code()), table, key,
__ NoContextConstant());
}
}
Node* EffectControlLinearizer::LowerLoadHashMapValue(Node* node) {
Node* table = NodeProperties::GetValueInput(node, 0);
Node* index = NodeProperties::GetValueInput(node, 1);
return __ LoadElement(AccessBuilder::ForFixedArrayElement(), table, index);
}
#undef __
Factory* EffectControlLinearizer::factory() const {
......
......@@ -119,6 +119,8 @@ class V8_EXPORT_PRIVATE EffectControlLinearizer {
void LowerTransitionElementsKind(Node* node);
Node* LowerLoadTypedElement(Node* node);
void LowerStoreTypedElement(Node* node);
Node* LowerLookupHashStorageIndex(Node* node);
Node* LowerLoadHashMapValue(Node* node);
// Lowering of optional operators.
Maybe<Node*> LowerFloat64RoundUp(Node* node);
......
......@@ -1388,6 +1388,109 @@ Reduction JSBuiltinReducer::ReduceGlobalIsNaN(Node* node) {
return NoChange();
}
namespace {
bool ExistsJSMapWitness(Node* receiver, Node* effect) {
ZoneHandleSet<Map> receiver_maps;
NodeProperties::InferReceiverMapsResult result =
NodeProperties::InferReceiverMaps(receiver, effect, &receiver_maps);
switch (result) {
case NodeProperties::kUnreliableReceiverMaps:
case NodeProperties::kReliableReceiverMaps:
DCHECK_NE(0, receiver_maps.size());
for (size_t i = 0; i < receiver_maps.size(); ++i) {
if (receiver_maps[i]->instance_type() != JS_MAP_TYPE) return false;
}
return true;
case NodeProperties::kNoReceiverMaps:
return false;
}
UNREACHABLE();
}
} // namespace
Reduction JSBuiltinReducer::ReduceMapGet(Node* node) {
// We only optimize if we have target, receiver and key parameters.
if (node->op()->ValueInputCount() != 3) return NoChange();
Node* receiver = NodeProperties::GetValueInput(node, 1);
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
Node* key = NodeProperties::GetValueInput(node, 2);
if (!ExistsJSMapWitness(receiver, effect)) return NoChange();
Node* storage = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSCollectionTable()), receiver,
effect, control);
Node* index = effect = graph()->NewNode(
simplified()->LookupHashStorageIndex(), storage, key, effect, control);
Node* check = graph()->NewNode(simplified()->NumberEqual(), index,
jsgraph()->MinusOneConstant());
Node* branch = graph()->NewNode(common()->Branch(), check, control);
// Key not found.
Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
Node* etrue = effect;
Node* vtrue = jsgraph()->UndefinedConstant();
// Key found.
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
Node* efalse = effect;
Node* vfalse = efalse = graph()->NewNode(
simplified()->LoadElement(AccessBuilder::ForFixedArrayElement()), storage,
index, efalse, if_false);
control = graph()->NewNode(common()->Merge(2), if_true, if_false);
Node* value = graph()->NewNode(
common()->Phi(MachineRepresentation::kTagged, 2), vtrue, vfalse, control);
effect = graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
ReplaceWithValue(node, value, effect, control);
return Replace(value);
}
Reduction JSBuiltinReducer::ReduceMapHas(Node* node) {
// We only optimize if we have target, receiver and key parameters.
if (node->op()->ValueInputCount() != 3) return NoChange();
Node* receiver = NodeProperties::GetValueInput(node, 1);
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
Node* key = NodeProperties::GetValueInput(node, 2);
if (!ExistsJSMapWitness(receiver, effect)) return NoChange();
Node* storage = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSCollectionTable()), receiver,
effect, control);
Node* index = effect = graph()->NewNode(
simplified()->LookupHashStorageIndex(), storage, key, effect, control);
Node* check = graph()->NewNode(simplified()->NumberEqual(), index,
jsgraph()->MinusOneConstant());
Node* branch = graph()->NewNode(common()->Branch(), check, control);
// Key not found.
Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
Node* vtrue = jsgraph()->FalseConstant();
// Key found.
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
Node* vfalse = jsgraph()->TrueConstant();
control = graph()->NewNode(common()->Merge(2), if_true, if_false);
Node* value = graph()->NewNode(
common()->Phi(MachineRepresentation::kTagged, 2), vtrue, vfalse, control);
ReplaceWithValue(node, value, effect, control);
return Replace(value);
}
// ES6 section 20.2.2.1 Math.abs ( x )
Reduction JSBuiltinReducer::ReduceMathAbs(Node* node) {
JSCallReduction r(node);
......@@ -2464,6 +2567,12 @@ Reduction JSBuiltinReducer::Reduce(Node* node) {
case kGlobalIsNaN:
reduction = ReduceGlobalIsNaN(node);
break;
case kMapGet:
reduction = ReduceMapGet(node);
break;
case kMapHas:
reduction = ReduceMapHas(node);
break;
case kMathAbs:
reduction = ReduceMathAbs(node);
break;
......
......@@ -64,6 +64,8 @@ class V8_EXPORT_PRIVATE JSBuiltinReducer final
Reduction ReduceFunctionBind(Node* node);
Reduction ReduceGlobalIsFinite(Node* node);
Reduction ReduceGlobalIsNaN(Node* node);
Reduction ReduceMapHas(Node* node);
Reduction ReduceMapGet(Node* node);
Reduction ReduceMathAbs(Node* node);
Reduction ReduceMathAcos(Node* node);
Reduction ReduceMathAcosh(Node* node);
......
......@@ -130,6 +130,9 @@ Node* JSGraph::OneConstant() {
return CACHED(kOneConstant, NumberConstant(1.0));
}
Node* JSGraph::MinusOneConstant() {
return CACHED(kMinusOneConstant, NumberConstant(-1.0));
}
Node* JSGraph::NaNConstant() {
return CACHED(kNaNConstant,
......
......@@ -63,6 +63,7 @@ class V8_EXPORT_PRIVATE JSGraph : public NON_EXPORTED_BASE(ZoneObject) {
Node* ZeroConstant();
Node* OneConstant();
Node* NaNConstant();
Node* MinusOneConstant();
// Creates a HeapConstant node, possibly canonicalized, and may access the
// heap to inspect the object.
......@@ -183,6 +184,7 @@ class V8_EXPORT_PRIVATE JSGraph : public NON_EXPORTED_BASE(ZoneObject) {
kNullConstant,
kZeroConstant,
kOneConstant,
kMinusOneConstant,
kNaNConstant,
kEmptyStateValues,
kSingleDeadTypedStateValues,
......
......@@ -138,7 +138,8 @@ class V8_EXPORT_PRIVATE NodeProperties final {
enum InferReceiverMapsResult {
kNoReceiverMaps, // No receiver maps inferred.
kReliableReceiverMaps, // Receiver maps can be trusted.
kUnreliableReceiverMaps // Receiver maps might have changed (side-effect).
kUnreliableReceiverMaps // Receiver maps might have changed (side-effect),
// but instance type is reliable.
};
static InferReceiverMapsResult InferReceiverMaps(
Node* receiver, Node* effect, ZoneHandleSet<Map>* maps_return);
......
......@@ -361,7 +361,9 @@
V(ArrayBufferWasNeutered) \
V(EnsureWritableFastElements) \
V(MaybeGrowFastElements) \
V(TransitionElementsKind)
V(TransitionElementsKind) \
V(LookupHashStorageIndex) \
V(LoadHashMapValue)
#define SIMPLIFIED_OP_LIST(V) \
SIMPLIFIED_CHANGE_OP_LIST(V) \
......
......@@ -2867,6 +2867,16 @@ class RepresentationSelector {
// Assume the output is tagged.
return SetOutput(node, MachineRepresentation::kTagged);
case IrOpcode::kLookupHashStorageIndex:
VisitInputs(node);
return SetOutput(node, MachineRepresentation::kTaggedSigned);
case IrOpcode::kLoadHashMapValue:
ProcessInput(node, 0, UseInfo::AnyTagged()); // table
ProcessInput(node, 1, UseInfo::TruncatingWord32()); // index
ProcessRemainingInputs(node, 2);
return SetOutput(node, MachineRepresentation::kTagged);
// Operators with all inputs tagged and no or tagged output have uniform
// handling.
case IrOpcode::kEnd:
......
......@@ -585,6 +585,20 @@ struct SimplifiedOperatorGlobalCache final {
};
ArrayBufferWasNeuteredOperator kArrayBufferWasNeutered;
struct LookupHashStorageIndexOperator final : public Operator {
LookupHashStorageIndexOperator()
: Operator(IrOpcode::kLookupHashStorageIndex, Operator::kEliminatable,
"LookupHashStorageIndex", 2, 1, 1, 1, 1, 0) {}
};
LookupHashStorageIndexOperator kLookupHashStorageIndex;
struct LoadHashMapValueOperator final : public Operator {
LoadHashMapValueOperator()
: Operator(IrOpcode::kLoadHashMapValue, Operator::kEliminatable,
"LoadHashMapValue", 2, 1, 1, 1, 1, 0) {}
};
LoadHashMapValueOperator kLoadHashMapValue;
struct ArgumentsFrameOperator final : public Operator {
ArgumentsFrameOperator()
: Operator(IrOpcode::kArgumentsFrame, Operator::kPure, "ArgumentsFrame",
......@@ -781,6 +795,8 @@ PURE_OP_LIST(GET_FROM_CACHE)
CHECKED_OP_LIST(GET_FROM_CACHE)
GET_FROM_CACHE(ArrayBufferWasNeutered)
GET_FROM_CACHE(ArgumentsFrame)
GET_FROM_CACHE(LookupHashStorageIndex)
GET_FROM_CACHE(LoadHashMapValue)
GET_FROM_CACHE(NewUnmappedArgumentsElements)
#undef GET_FROM_CACHE
......
......@@ -385,6 +385,9 @@ class V8_EXPORT_PRIVATE SimplifiedOperatorBuilder final
const Operator* StringToLowerCaseIntl();
const Operator* StringToUpperCaseIntl();
const Operator* LookupHashStorageIndex();
const Operator* LoadHashMapValue();
const Operator* SpeculativeToNumber(NumberOperationHint hint);
const Operator* PlainPrimitiveToNumber();
......
......@@ -2019,6 +2019,14 @@ Type* Typer::Visitor::TypeArrayBufferWasNeutered(Node* node) {
return Type::Boolean();
}
Type* Typer::Visitor::TypeLookupHashStorageIndex(Node* node) {
return Type::SignedSmall();
}
Type* Typer::Visitor::TypeLoadHashMapValue(Node* node) {
return Type::NonInternal();
}
// Heap constants.
Type* Typer::Visitor::TypeConstant(Handle<Object> value) {
......
......@@ -1014,6 +1014,13 @@ void Verifier::Visitor::Check(Node* node) {
CheckValueInputIs(node, 0, Type::Any());
CheckTypeIs(node, Type::Boolean());
break;
case IrOpcode::kLookupHashStorageIndex:
CheckTypeIs(node, Type::SignedSmall());
break;
case IrOpcode::kLoadHashMapValue:
CheckValueInputIs(node, 2, Type::SignedSmall());
CheckTypeIs(node, Type::SignedSmall());
break;
case IrOpcode::kArgumentsLength:
CheckValueInputIs(node, 0, Type::ExternalPointer());
CheckTypeIs(node, TypeCache::Get().kArgumentsLengthType);
......
......@@ -271,8 +271,8 @@ void ExternalReferenceTable::AddReferences(Isolate* isolate) {
Add(ExternalReference::search_string_raw<const uc16, const uc16>(isolate)
.address(),
"search_string_raw<1-byte, 2-byte>");
Add(ExternalReference::orderedhashmap_get_raw(isolate).address(),
"orderedhashmap_get_raw");
Add(ExternalReference::orderedhashmap_gethash_raw(isolate).address(),
"orderedhashmap_gethash_raw");
Add(ExternalReference::orderedhashtable_has_raw<OrderedHashMap, 2>(isolate)
.address(),
"orderedhashtable_has_raw<OrderedHashMap, 2>");
......
......@@ -18278,13 +18278,21 @@ bool OrderedHashTable<Derived, entrysize>::Delete(Isolate* isolate,
return true;
}
Object* OrderedHashMap::Get(Isolate* isolate, OrderedHashMap* table,
Object* key) {
DCHECK(table->IsOrderedHashMap());
Object* OrderedHashMap::GetHash(Isolate* isolate, Object* key) {
DisallowHeapAllocation no_gc;
int entry = table->FindEntry(isolate, key);
if (entry == kNotFound) return isolate->heap()->undefined_value();
return table->ValueAt(entry);
// This special cases for Smi, so that we avoid the HandleScope
// creation below.
if (key->IsSmi()) {
return Smi::FromInt(ComputeIntegerHash(Smi::cast(key)->value()));
}
HandleScope scope(isolate);
Object* hash = key->GetHash();
// If the object does not have an identity hash, it was never used as a key
if (hash->IsUndefined(isolate)) return Smi::FromInt(-1);
DCHECK(hash->IsSmi());
DCHECK(Smi::cast(hash)->value() >= 0);
return hash;
}
Handle<OrderedHashMap> OrderedHashMap::Add(Handle<OrderedHashMap> table,
......
......@@ -4642,6 +4642,7 @@ class ContextExtension : public Struct {
V(Map.prototype, forEach, MapForEach) \
V(Map.prototype, has, MapHas) \
V(Map.prototype, keys, MapKeys) \
V(Map.prototype, get, MapGet) \
V(Map.prototype, set, MapSet) \
V(Map.prototype, values, MapValues) \
V(Set.prototype, add, SetAdd) \
......
......@@ -555,11 +555,12 @@ class OrderedHashMap : public OrderedHashTable<OrderedHashMap, 2> {
// Returns a value if the OrderedHashMap contains the key, otherwise
// returns undefined.
static Object* Get(Isolate* isolate, OrderedHashMap* table, Object* key);
static Handle<OrderedHashMap> Add(Handle<OrderedHashMap> table,
Handle<Object> key, Handle<Object> value);
Object* ValueAt(int entry);
static Object* GetHash(Isolate* isolate, Object* key);
static const int kValueOffset = 1;
};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment