Commit 76f740b2 authored by bmeurer's avatar bmeurer Committed by Commit bot

[turbofan] Introduce a dedicated ArrayBufferWasNeutered operator.

Using the dedicated simplified operator we are able to eliminate
redundant neuterung checks as long as there is no call in the
effect chain. This yields a nice speed up for the Octane Mandreel
benchmark (and TypedArray-heavy workloads in general).

R=jarin@chromium.org
BUG=v8:5267

Review-Url: https://codereview.chromium.org/2279213002
Cr-Commit-Position: refs/heads/master@{#38932}
parent 04d81120
......@@ -702,6 +702,9 @@ bool EffectControlLinearizer::TryWireInStateEffect(Node* node,
case IrOpcode::kObjectIsUndetectable:
state = LowerObjectIsUndetectable(node, *effect, *control);
break;
case IrOpcode::kArrayBufferWasNeutered:
state = LowerArrayBufferWasNeutered(node, *effect, *control);
break;
case IrOpcode::kStringFromCharCode:
state = LowerStringFromCharCode(node, *effect, *control);
break;
......@@ -1995,6 +1998,26 @@ EffectControlLinearizer::LowerObjectIsUndetectable(Node* node, Node* effect,
return ValueEffectControl(value, effect, control);
}
EffectControlLinearizer::ValueEffectControl
EffectControlLinearizer::LowerArrayBufferWasNeutered(Node* node, Node* effect,
Node* control) {
Node* value = node->InputAt(0);
Node* value_bit_field = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSArrayBufferBitField()), value,
effect, control);
value = graph()->NewNode(
machine()->Word32Equal(),
graph()->NewNode(machine()->Word32Equal(),
graph()->NewNode(machine()->Word32And(), value_bit_field,
jsgraph()->Int32Constant(
JSArrayBuffer::WasNeutered::kMask)),
jsgraph()->Int32Constant(0)),
jsgraph()->Int32Constant(0));
return ValueEffectControl(value, effect, control);
}
EffectControlLinearizer::ValueEffectControl
EffectControlLinearizer::LowerStringCharCodeAt(Node* node, Node* effect,
Node* control) {
......
......@@ -122,6 +122,8 @@ class EffectControlLinearizer {
Node* control);
ValueEffectControl LowerObjectIsUndetectable(Node* node, Node* effect,
Node* control);
ValueEffectControl LowerArrayBufferWasNeutered(Node* node, Node* effect,
Node* control);
ValueEffectControl LowerStringCharCodeAt(Node* node, Node* effect,
Node* control);
ValueEffectControl LowerStringFromCharCode(Node* node, Node* effect,
......
......@@ -930,27 +930,21 @@ Reduction JSBuiltinReducer::ReduceArrayBufferViewAccessor(
Node* control = NodeProperties::GetControlInput(node);
if (HasInstanceTypeWitness(receiver, effect, instance_type)) {
// Load the {receiver}s field.
Node* receiver_length = effect = graph()->NewNode(
Node* receiver_value = effect = graph()->NewNode(
simplified()->LoadField(access), receiver, effect, control);
// Check if the {receiver}s buffer was neutered.
Node* receiver_buffer = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSArrayBufferViewBuffer()),
receiver, effect, control);
Node* receiver_buffer_bitfield = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSArrayBufferBitField()),
receiver_buffer, effect, control);
Node* check = graph()->NewNode(
simplified()->NumberEqual(),
graph()->NewNode(
simplified()->NumberBitwiseAnd(), receiver_buffer_bitfield,
jsgraph()->Constant(JSArrayBuffer::WasNeutered::kMask)),
jsgraph()->ZeroConstant());
Node* check = effect =
graph()->NewNode(simplified()->ArrayBufferWasNeutered(),
receiver_buffer, effect, control);
// Default to zero if the {receiver}s buffer was neutered.
Node* value = graph()->NewNode(
common()->Select(MachineRepresentation::kTagged, BranchHint::kTrue),
check, receiver_length, jsgraph()->ZeroConstant());
common()->Select(MachineRepresentation::kTagged, BranchHint::kFalse),
check, jsgraph()->ZeroConstant(), receiver_value);
ReplaceWithValue(node, value, effect, control);
return Replace(value);
......
......@@ -1048,20 +1048,13 @@ JSNativeContextSpecialization::BuildElementAccess(
Node* buffer = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSArrayBufferViewBuffer()),
receiver, effect, control);
Node* buffer_bitfield = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSArrayBufferBitField()),
buffer, effect, control);
Node* check = graph()->NewNode(
simplified()->NumberEqual(),
graph()->NewNode(
simplified()->NumberBitwiseAnd(), buffer_bitfield,
jsgraph()->Constant(JSArrayBuffer::WasNeutered::kMask)),
jsgraph()->ZeroConstant());
Node* check = effect = graph()->NewNode(
simplified()->ArrayBufferWasNeutered(), buffer, effect, control);
// Default to zero if the {receiver}s buffer was neutered.
length = graph()->NewNode(
common()->Select(MachineRepresentation::kTagged, BranchHint::kTrue),
check, length, jsgraph()->ZeroConstant());
common()->Select(MachineRepresentation::kTagged, BranchHint::kFalse),
check, jsgraph()->ZeroConstant(), length);
if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
// Check that the {index} is a valid array index, we do the actual
......
......@@ -55,6 +55,8 @@ bool MustAlias(Node* a, Node* b) { return QueryAlias(a, b) == kMustAlias; }
Reduction LoadElimination::Reduce(Node* node) {
switch (node->opcode()) {
case IrOpcode::kArrayBufferWasNeutered:
return ReduceArrayBufferWasNeutered(node);
case IrOpcode::kCheckMaps:
return ReduceCheckMaps(node);
case IrOpcode::kEnsureWritableFastElements:
......@@ -85,6 +87,65 @@ Reduction LoadElimination::Reduce(Node* node) {
return NoChange();
}
namespace {
bool IsCompatibleCheck(Node const* a, Node const* b) {
if (a->op() != b->op()) return false;
for (int i = a->op()->ValueInputCount(); --i >= 0;) {
if (!MustAlias(a->InputAt(i), b->InputAt(i))) return false;
}
return true;
}
} // namespace
Node* LoadElimination::AbstractChecks::Lookup(Node* node) const {
for (Node* const check : nodes_) {
if (check && IsCompatibleCheck(check, node)) {
return check;
}
}
return nullptr;
}
bool LoadElimination::AbstractChecks::Equals(AbstractChecks const* that) const {
if (this == that) return true;
for (size_t i = 0; i < arraysize(nodes_); ++i) {
if (Node* this_node = this->nodes_[i]) {
for (size_t j = 0;; ++j) {
if (j == arraysize(nodes_)) return false;
if (that->nodes_[j] == this_node) break;
}
}
}
for (size_t i = 0; i < arraysize(nodes_); ++i) {
if (Node* that_node = that->nodes_[i]) {
for (size_t j = 0;; ++j) {
if (j == arraysize(nodes_)) return false;
if (this->nodes_[j] == that_node) break;
}
}
}
return true;
}
LoadElimination::AbstractChecks const* LoadElimination::AbstractChecks::Merge(
AbstractChecks const* that, Zone* zone) const {
if (this->Equals(that)) return this;
AbstractChecks* copy = new (zone) AbstractChecks(zone);
for (Node* const this_node : this->nodes_) {
if (this_node == nullptr) continue;
for (Node* const that_node : that->nodes_) {
if (this_node == that_node) {
copy->nodes_[copy->next_index_++] = this_node;
break;
}
}
}
copy->next_index_ %= arraysize(nodes_);
return copy;
}
Node* LoadElimination::AbstractElements::Lookup(Node* object,
Node* index) const {
for (Element const element : elements_) {
......@@ -165,6 +226,7 @@ LoadElimination::AbstractElements::Merge(AbstractElements const* that,
this_element.index == that_element.index &&
this_element.value == that_element.value) {
copy->elements_[copy->next_index_++] = this_element;
break;
}
}
}
......@@ -194,6 +256,13 @@ LoadElimination::AbstractField const* LoadElimination::AbstractField::Kill(
}
bool LoadElimination::AbstractState::Equals(AbstractState const* that) const {
if (this->checks_) {
if (!that->checks_ || !that->checks_->Equals(this->checks_)) {
return false;
}
} else if (that->checks_) {
return false;
}
if (this->elements_) {
if (!that->elements_ || !that->elements_->Equals(this->elements_)) {
return false;
......@@ -215,6 +284,12 @@ bool LoadElimination::AbstractState::Equals(AbstractState const* that) const {
void LoadElimination::AbstractState::Merge(AbstractState const* that,
Zone* zone) {
// Merge the information we have about the checks.
if (this->checks_) {
this->checks_ =
that->checks_ ? that->checks_->Merge(this->checks_, zone) : nullptr;
}
// Merge the information we have about the elements.
if (this->elements_) {
this->elements_ = that->elements_
......@@ -234,6 +309,21 @@ void LoadElimination::AbstractState::Merge(AbstractState const* that,
}
}
Node* LoadElimination::AbstractState::LookupCheck(Node* node) const {
return this->checks_ ? this->checks_->Lookup(node) : nullptr;
}
LoadElimination::AbstractState const* LoadElimination::AbstractState::AddCheck(
Node* node, Zone* zone) const {
AbstractState* that = new (zone) AbstractState(*this);
if (that->checks_) {
that->checks_ = that->checks_->Extend(node, zone);
} else {
that->checks_ = new (zone) AbstractChecks(node, zone);
}
return that;
}
Node* LoadElimination::AbstractState::LookupElement(Node* object,
Node* index) const {
if (this->elements_) {
......@@ -315,6 +405,18 @@ void LoadElimination::AbstractStateForEffectNodes::Set(
info_for_node_[id] = state;
}
Reduction LoadElimination::ReduceArrayBufferWasNeutered(Node* node) {
Node* const effect = NodeProperties::GetEffectInput(node);
AbstractState const* state = node_states_.Get(effect);
if (state == nullptr) return NoChange();
if (Node* const check = state->LookupCheck(node)) {
ReplaceWithValue(node, check, effect);
return Replace(check);
}
state = state->AddCheck(node, zone());
return UpdateState(node, state);
}
Reduction LoadElimination::ReduceCheckMaps(Node* node) {
Node* const object = NodeProperties::GetValueInput(node, 0);
Node* const effect = NodeProperties::GetEffectInput(node);
......
......@@ -24,6 +24,37 @@ class LoadElimination final : public AdvancedReducer {
Reduction Reduce(Node* node) final;
private:
static const size_t kMaxTrackedChecks = 8;
// Abstract state to approximate the current state of checks that are
// only invalidated by calls, i.e. array buffer neutering checks, along
// the effect paths through the graph.
class AbstractChecks final : public ZoneObject {
public:
explicit AbstractChecks(Zone* zone) {
for (size_t i = 0; i < arraysize(nodes_); ++i) {
nodes_[i] = nullptr;
}
}
AbstractChecks(Node* node, Zone* zone) : AbstractChecks(zone) {
nodes_[next_index_++] = node;
}
AbstractChecks const* Extend(Node* node, Zone* zone) const {
AbstractChecks* that = new (zone) AbstractChecks(*this);
that->nodes_[that->next_index_] = node;
that->next_index_ = (that->next_index_ + 1) % arraysize(nodes_);
return that;
}
Node* Lookup(Node* node) const;
bool Equals(AbstractChecks const* that) const;
AbstractChecks const* Merge(AbstractChecks const* that, Zone* zone) const;
private:
Node* nodes_[kMaxTrackedChecks];
size_t next_index_ = 0;
};
static const size_t kMaxTrackedElements = 8;
// Abstract state to approximate the current state of an element along the
......@@ -133,7 +164,11 @@ class LoadElimination final : public AdvancedReducer {
Zone* zone) const;
Node* LookupElement(Node* object, Node* index) const;
AbstractState const* AddCheck(Node* node, Zone* zone) const;
Node* LookupCheck(Node* node) const;
private:
AbstractChecks const* checks_ = nullptr;
AbstractElements const* elements_ = nullptr;
AbstractField const* fields_[kMaxTrackedFields];
};
......@@ -150,6 +185,7 @@ class LoadElimination final : public AdvancedReducer {
ZoneVector<AbstractState const*> info_for_node_;
};
Reduction ReduceArrayBufferWasNeutered(Node* node);
Reduction ReduceCheckMaps(Node* node);
Reduction ReduceEnsureWritableFastElements(Node* node);
Reduction ReduceMaybeGrowFastElements(Node* node);
......
......@@ -306,6 +306,7 @@
V(ObjectIsSmi) \
V(ObjectIsString) \
V(ObjectIsUndetectable) \
V(ArrayBufferWasNeutered) \
V(EnsureWritableFastElements) \
V(MaybeGrowFastElements) \
V(TransitionElementsKind)
......
......@@ -2347,9 +2347,9 @@ class RepresentationSelector {
case IrOpcode::kObjectIsReceiver:
case IrOpcode::kObjectIsSmi:
case IrOpcode::kObjectIsString:
case IrOpcode::kObjectIsUndetectable: {
ProcessInput(node, 0, UseInfo::AnyTagged());
SetOutput(node, MachineRepresentation::kBit);
case IrOpcode::kObjectIsUndetectable:
case IrOpcode::kArrayBufferWasNeutered: {
VisitUnop(node, UseInfo::AnyTagged(), MachineRepresentation::kBit);
return;
}
case IrOpcode::kCheckFloat64Hole: {
......
......@@ -458,6 +458,13 @@ struct SimplifiedOperatorGlobalCache final {
CHECKED_OP_LIST(CHECKED)
#undef CHECKED
struct ArrayBufferWasNeuteredOperator final : public Operator {
ArrayBufferWasNeuteredOperator()
: Operator(IrOpcode::kArrayBufferWasNeutered, Operator::kEliminatable,
"ArrayBufferWasNeutered", 1, 1, 1, 1, 1, 0) {}
};
ArrayBufferWasNeuteredOperator kArrayBufferWasNeutered;
template <CheckForMinusZeroMode kMode>
struct ChangeFloat64ToTaggedOperator final
: public Operator1<CheckForMinusZeroMode> {
......@@ -614,6 +621,7 @@ SimplifiedOperatorBuilder::SimplifiedOperatorBuilder(Zone* zone)
const Operator* SimplifiedOperatorBuilder::Name() { return &cache_.k##Name; }
PURE_OP_LIST(GET_FROM_CACHE)
CHECKED_OP_LIST(GET_FROM_CACHE)
GET_FROM_CACHE(ArrayBufferWasNeutered)
#undef GET_FROM_CACHE
const Operator* SimplifiedOperatorBuilder::ChangeFloat64ToTagged(
......
......@@ -338,6 +338,9 @@ class SimplifiedOperatorBuilder final : public ZoneObject {
const Operator* ObjectIsString();
const Operator* ObjectIsUndetectable();
// array-buffer-was-neutered buffer
const Operator* ArrayBufferWasNeutered();
// ensure-writable-fast-elements object, elements
const Operator* EnsureWritableFastElements();
......
......@@ -1695,6 +1695,9 @@ Type* Typer::Visitor::TypeObjectIsUndetectable(Node* node) {
return TypeUnaryOp(node, ObjectIsUndetectable);
}
Type* Typer::Visitor::TypeArrayBufferWasNeutered(Node* node) {
return Type::Boolean();
}
// Machine operators.
......
......@@ -868,6 +868,7 @@ void Verifier::Visitor::Check(Node* node) {
case IrOpcode::kObjectIsSmi:
case IrOpcode::kObjectIsString:
case IrOpcode::kObjectIsUndetectable:
case IrOpcode::kArrayBufferWasNeutered:
CheckValueInputIs(node, 0, Type::Any());
CheckUpperIs(node, Type::Boolean());
break;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment