Commit 59ba2a8f authored by bmeurer's avatar bmeurer Committed by Commit bot

[turbofan] Initial support for keyed access to holey elements.

This adds support for load/store access to FAST_HOLEY_ELEMENTS and
FAST_HOLEY_SMI_ELEMENTS backing stores. We don't currently fully support
the double holes, because we make some difficult assumptions in
Crankshaft there.

R=jarin@chromium.org
BUG=v8:4470
LOG=n

Review URL: https://codereview.chromium.org/1448903002

Cr-Commit-Position: refs/heads/master@{#32012}
parent 3cf6e040
......@@ -19,10 +19,13 @@ namespace compiler {
namespace {
bool CanInlineElementAccess(Handle<Map> map) {
// TODO(bmeurer): Add support for holey elements.
return map->IsJSObjectMap() &&
IsFastPackedElementsKind(map->elements_kind()) &&
!map->has_indexed_interceptor() && !map->is_access_check_needed();
if (!map->IsJSObjectMap()) return false;
if (map->is_access_check_needed()) return false;
if (map->has_indexed_interceptor()) return false;
ElementsKind const elements_kind = map->elements_kind();
if (IsFastElementsKind(elements_kind)) return true;
// TODO(bmeurer): Add support for other elements kind.
return false;
}
......@@ -137,7 +140,12 @@ bool AccessInfoFactory::ComputeElementAccessInfo(
// Check if it is safe to inline element access for the {map}.
if (!CanInlineElementAccess(map)) return false;
ElementsKind elements_kind = map->elements_kind();
ElementsKind const elements_kind = map->elements_kind();
if (access_mode == AccessMode::kLoad &&
elements_kind == FAST_HOLEY_DOUBLE_ELEMENTS) {
// TODO(bmeurer): Add support for holey loads.
return false;
}
// Certain (monomorphic) stores need a prototype chain check because shape
// changes could allow callbacks on elements in the chain that are not
......
......@@ -13,6 +13,7 @@
#include "src/compiler/linkage.h"
#include "src/compiler/node-matchers.h"
#include "src/field-index-inl.h"
#include "src/isolate-inl.h"
#include "src/objects-inl.h" // TODO(mstarzinger): Temporary cycle breaker!
#include "src/type-cache.h"
#include "src/type-feedback-vector.h"
......@@ -496,7 +497,8 @@ Reduction JSNativeContextSpecialization::ReduceJSStoreNamed(Node* node) {
Reduction JSNativeContextSpecialization::ReduceElementAccess(
Node* node, Node* index, Node* value, MapHandleList const& receiver_maps,
AccessMode access_mode, LanguageMode language_mode) {
AccessMode access_mode, LanguageMode language_mode,
KeyedAccessStoreMode store_mode) {
DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
node->opcode() == IrOpcode::kJSStoreProperty);
Node* receiver = NodeProperties::GetValueInput(node, 0);
......@@ -508,6 +510,9 @@ Reduction JSNativeContextSpecialization::ReduceElementAccess(
// Not much we can do if deoptimization support is disabled.
if (!(flags() & kDeoptimizationEnabled)) return NoChange();
// TODO(bmeurer): Add support for non-standard stores.
if (store_mode != STANDARD_STORE) return NoChange();
// Compute element access infos for the receiver maps.
ZoneVector<ElementAccessInfo> access_infos(zone());
if (!access_info_factory().ComputeElementAccessInfos(
......@@ -728,10 +733,62 @@ Reduction JSNativeContextSpecialization::ReduceElementAccess(
element_type, element_machine_type};
// Access the actual element.
// TODO(bmeurer): Refactor this into separate methods or even a separate
// class that deals with the elements access.
if (access_mode == AccessMode::kLoad) {
// Compute the real element access type, which includes the hole in case
// of holey backing stores.
if (elements_kind == FAST_HOLEY_ELEMENTS ||
elements_kind == FAST_HOLEY_SMI_ELEMENTS) {
element_access.type = Type::Union(
element_type,
Type::Constant(factory()->the_hole_value(), graph()->zone()),
graph()->zone());
}
// Perform the actual backing store access.
this_value = this_effect = graph()->NewNode(
simplified()->LoadElement(element_access), this_elements, this_index,
this_effect, this_control);
// Handle loading from holey backing stores correctly, by either mapping
// the hole to undefined if possible, or deoptimizing otherwise.
if (elements_kind == FAST_HOLEY_ELEMENTS ||
elements_kind == FAST_HOLEY_SMI_ELEMENTS) {
// Perform the hole check on the result.
Node* check =
graph()->NewNode(simplified()->ReferenceEqual(element_access.type),
this_value, jsgraph()->TheHoleConstant());
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
check, this_control);
Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
// Check if we are allowed to turn the hole into undefined.
Type* initial_holey_array_type = Type::Class(
handle(isolate()->get_initial_js_array_map(FAST_HOLEY_ELEMENTS)),
graph()->zone());
if (receiver_type->NowIs(initial_holey_array_type) &&
isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
// Add a code dependency on the array protector cell.
AssumePrototypesStable(receiver_type,
isolate()->initial_object_prototype());
dependencies()->AssumePropertyCell(factory()->array_protector());
// Turn the hole into undefined.
this_control =
graph()->NewNode(common()->Merge(2), if_true, if_false);
this_value = graph()->NewNode(common()->Phi(kMachAnyTagged, 2),
jsgraph()->UndefinedConstant(),
this_value, this_control);
element_type =
Type::Union(element_type, Type::Undefined(), graph()->zone());
} else {
// Deoptimize in case of the hole.
exit_controls.push_back(if_true);
this_control = if_false;
}
// Rename the result to represent the actual type (not polluted by the
// hole).
this_value = graph()->NewNode(common()->Guard(element_type), this_value,
this_control);
}
} else {
DCHECK_EQ(AccessMode::kStore, access_mode);
if (IsFastSmiElementsKind(elements_kind)) {
......@@ -809,7 +866,8 @@ Reduction JSNativeContextSpecialization::ReduceElementAccess(
Reduction JSNativeContextSpecialization::ReduceKeyedAccess(
Node* node, Node* index, Node* value, FeedbackNexus const& nexus,
AccessMode access_mode, LanguageMode language_mode) {
AccessMode access_mode, LanguageMode language_mode,
KeyedAccessStoreMode store_mode) {
DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
node->opcode() == IrOpcode::kJSStoreProperty);
......@@ -848,7 +906,7 @@ Reduction JSNativeContextSpecialization::ReduceKeyedAccess(
// Try to lower the element access based on the {receiver_maps}.
return ReduceElementAccess(node, index, value, receiver_maps, access_mode,
language_mode);
language_mode, store_mode);
}
......@@ -864,7 +922,7 @@ Reduction JSNativeContextSpecialization::ReduceJSLoadProperty(Node* node) {
// Try to lower the keyed access based on the {nexus}.
return ReduceKeyedAccess(node, index, value, nexus, AccessMode::kLoad,
p.language_mode());
p.language_mode(), STANDARD_STORE);
}
......@@ -878,9 +936,12 @@ Reduction JSNativeContextSpecialization::ReduceJSStoreProperty(Node* node) {
if (!p.feedback().IsValid()) return NoChange();
KeyedStoreICNexus nexus(p.feedback().vector(), p.feedback().slot());
// Extract the keyed access store mode from the KEYED_STORE_IC.
KeyedAccessStoreMode store_mode = nexus.GetKeyedAccessStoreMode();
// Try to lower the keyed access based on the {nexus}.
return ReduceKeyedAccess(node, index, value, nexus, AccessMode::kStore,
p.language_mode());
p.language_mode(), store_mode);
}
......
......@@ -59,11 +59,13 @@ class JSNativeContextSpecialization final : public AdvancedReducer {
Reduction ReduceElementAccess(Node* node, Node* index, Node* value,
MapHandleList const& receiver_maps,
AccessMode access_mode,
LanguageMode language_mode);
LanguageMode language_mode,
KeyedAccessStoreMode store_mode);
Reduction ReduceKeyedAccess(Node* node, Node* index, Node* value,
FeedbackNexus const& nexus,
AccessMode access_mode,
LanguageMode language_mode);
LanguageMode language_mode,
KeyedAccessStoreMode store_mode);
Reduction ReduceNamedAccess(Node* node, Node* value,
MapHandleList const& receiver_maps,
Handle<Name> name, AccessMode access_mode,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment