Commit 35f6c0fd authored by bmeurer's avatar bmeurer Committed by Commit bot

[turbofan] Optimize silent hole checks on legacy const context slots.

Currently we always generate a diamond in the graph builder for every
legacy const context slot, which we cannot get rid of until late control
reduction, even if we know after context specialization that the slot is
already initialized.

Now we generate a select instead, which the CommonOperatorReducer
happily removes during typed lowering. This greatly speeds up asm.js
code generated by Emscripten with the new POINTER_MASKING mode.

R=mstarzinger@chromium.org

Review URL: https://codereview.chromium.org/1072353002

Cr-Commit-Position: refs/heads/master@{#27739}
parent c0593a1f
...@@ -2728,16 +2728,10 @@ Node* AstGraphBuilder::BuildRestArgumentsArray(Variable* rest, int index) { ...@@ -2728,16 +2728,10 @@ Node* AstGraphBuilder::BuildRestArgumentsArray(Variable* rest, int index) {
Node* AstGraphBuilder::BuildHoleCheckSilent(Node* value, Node* for_hole, Node* AstGraphBuilder::BuildHoleCheckSilent(Node* value, Node* for_hole,
Node* not_hole) { Node* not_hole) {
IfBuilder hole_check(this);
Node* the_hole = jsgraph()->TheHoleConstant(); Node* the_hole = jsgraph()->TheHoleConstant();
Node* check = NewNode(javascript()->StrictEqual(), value, the_hole); Node* check = NewNode(javascript()->StrictEqual(), value, the_hole);
hole_check.If(check); return NewNode(common()->Select(kMachAnyTagged, BranchHint::kFalse), check,
hole_check.Then(); for_hole, not_hole);
environment()->Push(for_hole);
hole_check.Else();
environment()->Push(not_hole);
hole_check.End();
return environment()->Pop();
} }
......
...@@ -109,7 +109,12 @@ Reduction CommonOperatorReducer::ReduceSelect(Node* node) { ...@@ -109,7 +109,12 @@ Reduction CommonOperatorReducer::ReduceSelect(Node* node) {
Node* vtrue = NodeProperties::GetValueInput(node, 1); Node* vtrue = NodeProperties::GetValueInput(node, 1);
Node* vfalse = NodeProperties::GetValueInput(node, 2); Node* vfalse = NodeProperties::GetValueInput(node, 2);
if (vtrue == vfalse) return Replace(vtrue); if (vtrue == vfalse) return Replace(vtrue);
if (cond->opcode() == IrOpcode::kFloat32LessThan) { switch (cond->opcode()) {
case IrOpcode::kHeapConstant: {
HeapObjectMatcher<HeapObject> mcond(cond);
return Replace(mcond.Value().handle()->BooleanValue() ? vtrue : vfalse);
}
case IrOpcode::kFloat32LessThan: {
Float32BinopMatcher mcond(cond); Float32BinopMatcher mcond(cond);
if (mcond.left().Is(0.0) && mcond.right().Equals(vtrue) && if (mcond.left().Is(0.0) && mcond.right().Equals(vtrue) &&
vfalse->opcode() == IrOpcode::kFloat32Sub && vfalse->opcode() == IrOpcode::kFloat32Sub &&
...@@ -126,7 +131,9 @@ Reduction CommonOperatorReducer::ReduceSelect(Node* node) { ...@@ -126,7 +131,9 @@ Reduction CommonOperatorReducer::ReduceSelect(Node* node) {
machine()->HasFloat32Max()) { machine()->HasFloat32Max()) {
return Change(node, machine()->Float32Max(), vtrue, vfalse); return Change(node, machine()->Float32Max(), vtrue, vfalse);
} }
} else if (cond->opcode() == IrOpcode::kFloat64LessThan) { break;
}
case IrOpcode::kFloat64LessThan: {
Float64BinopMatcher mcond(cond); Float64BinopMatcher mcond(cond);
if (mcond.left().Is(0.0) && mcond.right().Equals(vtrue) && if (mcond.left().Is(0.0) && mcond.right().Equals(vtrue) &&
vfalse->opcode() == IrOpcode::kFloat64Sub && vfalse->opcode() == IrOpcode::kFloat64Sub &&
...@@ -143,6 +150,10 @@ Reduction CommonOperatorReducer::ReduceSelect(Node* node) { ...@@ -143,6 +150,10 @@ Reduction CommonOperatorReducer::ReduceSelect(Node* node) {
machine()->HasFloat64Max()) { machine()->HasFloat64Max()) {
return Change(node, machine()->Float64Max(), vtrue, vfalse); return Change(node, machine()->Float64Max(), vtrue, vfalse);
} }
break;
}
default:
break;
} }
return NoChange(); return NoChange();
} }
......
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
var stdlib = this;
var foreign = {};
var heap = new ArrayBuffer(64 * 1024);
var pm1 = (function(stdlib, foreign, heap) {
"use asm";
var HEAP8 = new stdlib.Int8Array(heap);
const MASK1 = 1023;
function load1(i) {
i = i|0;
var j = 0;
j = HEAP8[(i & MASK1)]|0;
return j|0;
}
function store1(i, j) {
i = i|0;
j = j|0;
HEAP8[(i & MASK1)] = j;
}
return {load1: load1, store1: store1};
})(stdlib, foreign, heap);
assertEquals(0, pm1.load1(0));
assertEquals(0, pm1.load1(1025));
pm1.store1(0, 1);
pm1.store1(1025, 127);
assertEquals(1, pm1.load1(0));
assertEquals(1, pm1.load1(1024));
assertEquals(127, pm1.load1(1));
assertEquals(127, pm1.load1(1025));
...@@ -221,6 +221,28 @@ TEST_F(CommonOperatorReducerTest, RedundantSelect) { ...@@ -221,6 +221,28 @@ TEST_F(CommonOperatorReducerTest, RedundantSelect) {
} }
TEST_F(CommonOperatorReducerTest, SelectWithFalseConstant) {
Node* p0 = Parameter(0);
Node* p1 = Parameter(1);
Node* select = graph()->NewNode(common()->Select(kMachAnyTagged),
FalseConstant(), p0, p1);
Reduction r = Reduce(select);
ASSERT_TRUE(r.Changed());
EXPECT_EQ(p1, r.replacement());
}
TEST_F(CommonOperatorReducerTest, SelectWithTrueConstant) {
Node* p0 = Parameter(0);
Node* p1 = Parameter(1);
Node* select = graph()->NewNode(common()->Select(kMachAnyTagged),
TrueConstant(), p0, p1);
Reduction r = Reduce(select);
ASSERT_TRUE(r.Changed());
EXPECT_EQ(p0, r.replacement());
}
TEST_F(CommonOperatorReducerTest, SelectToFloat32Abs) { TEST_F(CommonOperatorReducerTest, SelectToFloat32Abs) {
Node* p0 = Parameter(0); Node* p0 = Parameter(0);
Node* c0 = Float32Constant(0.0); Node* c0 = Float32Constant(0.0);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment