Commit fd24deb0 authored by bmeurer's avatar bmeurer Committed by Commit Bot

[turbofan] Replace uninitialized JSConstruct nodes with SOFT deopt.

Similar to JSCall, we can also replace uninitialized JSConstruct nodes
with SOFT deopts to ensure that we don't generate unnecessary dead code.
This for example shows up in the hot parts of the Node event emitter
currently where the generic code for handling events with 4 or more
parameters might not have been run, but we still generate most of the
code because the new Array call in the beginning is not turned into
a SOFT deopt immediately.

Drive-by-fix: Also refactor the BytecodeGraphBuilder's handling of
Construct bytecodes a bit to reduce the amount of code duplication.

BUG=v8:4551, v8:5267
R=jarin@chromium.org

Review-Url: https://codereview.chromium.org/2958253002
Cr-Commit-Position: refs/heads/master@{#46339}
parent 2b1eb978
...@@ -1581,28 +1581,63 @@ void BytecodeGraphBuilder::VisitCallRuntimeForPair() { ...@@ -1581,28 +1581,63 @@ void BytecodeGraphBuilder::VisitCallRuntimeForPair() {
Environment::kAttachFrameState); Environment::kAttachFrameState);
} }
Node* BytecodeGraphBuilder::ProcessConstructWithSpreadArguments( Node* const* BytecodeGraphBuilder::GetConstructArgumentsFromRegister(
const Operator* op, Node* callee, Node* new_target, Node* target, Node* new_target, interpreter::Register first_arg,
interpreter::Register receiver, size_t reg_count) { int arg_count) {
int arg_count = static_cast<int>(reg_count);
// arity is args + callee and new target. // arity is args + callee and new target.
int arity = arg_count + 2; int arity = arg_count + 2;
Node** all = local_zone()->NewArray<Node*>(static_cast<size_t>(arity)); Node** all = local_zone()->NewArray<Node*>(static_cast<size_t>(arity));
all[0] = callee; all[0] = target;
int first_arg_index = receiver.index(); int first_arg_index = first_arg.index();
for (int i = 0; i < arg_count; ++i) { for (int i = 0; i < arg_count; ++i) {
all[1 + i] = environment()->LookupRegister( all[1 + i] = environment()->LookupRegister(
interpreter::Register(first_arg_index + i)); interpreter::Register(first_arg_index + i));
} }
all[arity - 1] = new_target; all[arity - 1] = new_target;
Node* value = MakeNode(op, arity, all, false); return all;
return value; }
Node* BytecodeGraphBuilder::ProcessConstructArguments(const Operator* op,
Node* const* args,
int arg_count) {
return MakeNode(op, arg_count, args, false);
}
void BytecodeGraphBuilder::VisitConstruct() {
PrepareEagerCheckpoint();
interpreter::Register callee_reg = bytecode_iterator().GetRegisterOperand(0);
interpreter::Register first_reg = bytecode_iterator().GetRegisterOperand(1);
size_t reg_count = bytecode_iterator().GetRegisterCountOperand(2);
// Slot index of 0 is used indicate no feedback slot is available. Assert
// the assumption that slot index 0 is never a valid feedback slot.
STATIC_ASSERT(FeedbackVector::kReservedIndexCount > 0);
int const slot_id = bytecode_iterator().GetIndexOperand(3);
VectorSlotPair feedback = CreateVectorSlotPair(slot_id);
Node* new_target = environment()->LookupAccumulator();
Node* callee = environment()->LookupRegister(callee_reg);
CallFrequency frequency = ComputeCallFrequency(slot_id);
const Operator* op = javascript()->Construct(
static_cast<uint32_t>(reg_count + 2), frequency, feedback);
int arg_count = static_cast<int>(reg_count);
Node* const* args = GetConstructArgumentsFromRegister(callee, new_target,
first_reg, arg_count);
Node* node = nullptr;
if (Node* simplified = TryBuildSimplifiedConstruct(
op, args, static_cast<int>(arg_count), feedback.slot())) {
if (environment() == nullptr) return;
node = simplified;
} else {
node = ProcessConstructArguments(op, args, 2 + arg_count);
}
environment()->BindAccumulator(node, Environment::kAttachFrameState);
} }
void BytecodeGraphBuilder::VisitConstructWithSpread() { void BytecodeGraphBuilder::VisitConstructWithSpread() {
PrepareEagerCheckpoint(); PrepareEagerCheckpoint();
interpreter::Register callee_reg = bytecode_iterator().GetRegisterOperand(0); interpreter::Register callee_reg = bytecode_iterator().GetRegisterOperand(0);
interpreter::Register receiver = bytecode_iterator().GetRegisterOperand(1); interpreter::Register first_reg = bytecode_iterator().GetRegisterOperand(1);
size_t reg_count = bytecode_iterator().GetRegisterCountOperand(2); size_t reg_count = bytecode_iterator().GetRegisterCountOperand(2);
Node* new_target = environment()->LookupAccumulator(); Node* new_target = environment()->LookupAccumulator();
...@@ -1610,8 +1645,10 @@ void BytecodeGraphBuilder::VisitConstructWithSpread() { ...@@ -1610,8 +1645,10 @@ void BytecodeGraphBuilder::VisitConstructWithSpread() {
const Operator* op = const Operator* op =
javascript()->ConstructWithSpread(static_cast<uint32_t>(reg_count + 2)); javascript()->ConstructWithSpread(static_cast<uint32_t>(reg_count + 2));
Node* value = ProcessConstructWithSpreadArguments(op, callee, new_target, int arg_count = static_cast<int>(reg_count);
receiver, reg_count); Node* const* args = GetConstructArgumentsFromRegister(callee, new_target,
first_reg, arg_count);
Node* value = ProcessConstructArguments(op, args, 2 + arg_count);
environment()->BindAccumulator(value, Environment::kAttachFrameState); environment()->BindAccumulator(value, Environment::kAttachFrameState);
} }
...@@ -1628,46 +1665,6 @@ void BytecodeGraphBuilder::VisitInvokeIntrinsic() { ...@@ -1628,46 +1665,6 @@ void BytecodeGraphBuilder::VisitInvokeIntrinsic() {
environment()->BindAccumulator(value, Environment::kAttachFrameState); environment()->BindAccumulator(value, Environment::kAttachFrameState);
} }
Node* BytecodeGraphBuilder::ProcessConstructArguments(
const Operator* call_new_op, Node* callee, Node* new_target,
interpreter::Register receiver, size_t reg_count) {
int arg_count = static_cast<int>(reg_count);
// arity is args + callee and new target.
int arity = arg_count + 2;
Node** all = local_zone()->NewArray<Node*>(static_cast<size_t>(arity));
all[0] = callee;
int first_arg_index = receiver.index();
for (int i = 0; i < arg_count; ++i) {
all[1 + i] = environment()->LookupRegister(
interpreter::Register(first_arg_index + i));
}
all[arity - 1] = new_target;
Node* value = MakeNode(call_new_op, arity, all, false);
return value;
}
void BytecodeGraphBuilder::VisitConstruct() {
PrepareEagerCheckpoint();
interpreter::Register callee_reg = bytecode_iterator().GetRegisterOperand(0);
interpreter::Register receiver = bytecode_iterator().GetRegisterOperand(1);
size_t reg_count = bytecode_iterator().GetRegisterCountOperand(2);
// Slot index of 0 is used indicate no feedback slot is available. Assert
// the assumption that slot index 0 is never a valid feedback slot.
STATIC_ASSERT(FeedbackVector::kReservedIndexCount > 0);
int const slot_id = bytecode_iterator().GetIndexOperand(3);
VectorSlotPair feedback = CreateVectorSlotPair(slot_id);
Node* new_target = environment()->LookupAccumulator();
Node* callee = environment()->LookupRegister(callee_reg);
CallFrequency frequency = ComputeCallFrequency(slot_id);
const Operator* call = javascript()->Construct(
static_cast<uint32_t>(reg_count + 2), frequency, feedback);
Node* value =
ProcessConstructArguments(call, callee, new_target, receiver, reg_count);
environment()->BindAccumulator(value, Environment::kAttachFrameState);
}
void BytecodeGraphBuilder::VisitThrow() { void BytecodeGraphBuilder::VisitThrow() {
BuildLoopExitsForFunctionExit(); BuildLoopExitsForFunctionExit();
Node* value = environment()->LookupAccumulator(); Node* value = environment()->LookupAccumulator();
...@@ -2691,6 +2688,25 @@ Node* BytecodeGraphBuilder::TryBuildSimplifiedCall(const Operator* op, ...@@ -2691,6 +2688,25 @@ Node* BytecodeGraphBuilder::TryBuildSimplifiedCall(const Operator* op,
return nullptr; return nullptr;
} }
Node* BytecodeGraphBuilder::TryBuildSimplifiedConstruct(const Operator* op,
Node* const* args,
int arg_count,
FeedbackSlot slot) {
// TODO(mstarzinger,6112): This is a workaround for OSR loop entries being
// pruned from the graph by a soft-deopt. It can happen that a CallIC that
// control-dominates the OSR entry is still in "uninitialized" state.
if (!osr_ast_id_.IsNone()) return nullptr;
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
Reduction early_reduction = type_hint_lowering().ReduceConstructOperation(
op, args, arg_count, effect, control, slot);
if (early_reduction.Changed()) {
ApplyEarlyReduction(early_reduction);
return early_reduction.replacement();
}
return nullptr;
}
Node* BytecodeGraphBuilder::TryBuildSimplifiedLoadNamed(const Operator* op, Node* BytecodeGraphBuilder::TryBuildSimplifiedLoadNamed(const Operator* op,
Node* receiver, Node* receiver,
FeedbackSlot slot) { FeedbackSlot slot) {
......
...@@ -124,14 +124,11 @@ class BytecodeGraphBuilder { ...@@ -124,14 +124,11 @@ class BytecodeGraphBuilder {
int arg_count); int arg_count);
Node* ProcessCallArguments(const Operator* call_op, Node* callee, Node* ProcessCallArguments(const Operator* call_op, Node* callee,
interpreter::Register receiver, size_t reg_count); interpreter::Register receiver, size_t reg_count);
Node* ProcessConstructArguments(const Operator* call_new_op, Node* callee, Node* const* GetConstructArgumentsFromRegister(
Node* new_target, Node* target, Node* new_target, interpreter::Register first_arg,
interpreter::Register receiver, int arg_count);
size_t reg_count); Node* ProcessConstructArguments(const Operator* op, Node* const* args,
Node* ProcessConstructWithSpreadArguments(const Operator* op, Node* callee, int arg_count);
Node* new_target,
interpreter::Register receiver,
size_t reg_count);
Node* ProcessCallRuntimeArguments(const Operator* call_runtime_op, Node* ProcessCallRuntimeArguments(const Operator* call_runtime_op,
interpreter::Register receiver, interpreter::Register receiver,
size_t reg_count); size_t reg_count);
...@@ -190,6 +187,8 @@ class BytecodeGraphBuilder { ...@@ -190,6 +187,8 @@ class BytecodeGraphBuilder {
Node* TryBuildSimplifiedToPrimitiveToString(Node* input, FeedbackSlot slot); Node* TryBuildSimplifiedToPrimitiveToString(Node* input, FeedbackSlot slot);
Node* TryBuildSimplifiedCall(const Operator* op, Node* const* args, Node* TryBuildSimplifiedCall(const Operator* op, Node* const* args,
int arg_count, FeedbackSlot slot); int arg_count, FeedbackSlot slot);
Node* TryBuildSimplifiedConstruct(const Operator* op, Node* const* args,
int arg_count, FeedbackSlot slot);
Node* TryBuildSimplifiedLoadNamed(const Operator* op, Node* receiver, Node* TryBuildSimplifiedLoadNamed(const Operator* op, Node* receiver,
FeedbackSlot slot); FeedbackSlot slot);
Node* TryBuildSimplifiedLoadKeyed(const Operator* op, Node* receiver, Node* TryBuildSimplifiedLoadKeyed(const Operator* op, Node* receiver,
......
...@@ -1029,18 +1029,8 @@ Reduction JSCallReducer::ReduceJSCall(Node* node) { ...@@ -1029,18 +1029,8 @@ Reduction JSCallReducer::ReduceJSCall(Node* node) {
if (nexus.IsUninitialized()) { if (nexus.IsUninitialized()) {
if (flags() & kBailoutOnUninitialized) { if (flags() & kBailoutOnUninitialized) {
// Introduce a SOFT deopt if the call {node} wasn't executed so far. // Introduce a SOFT deopt if the call {node} wasn't executed so far.
Node* frame_state = NodeProperties::FindFrameStateBefore(node); return ReduceSoftDeoptimize(
Node* deoptimize = graph()->NewNode( node, DeoptimizeReason::kInsufficientTypeFeedbackForCall);
common()->Deoptimize(
DeoptimizeKind::kSoft,
DeoptimizeReason::kInsufficientTypeFeedbackForCall),
frame_state, effect, control);
// TODO(bmeurer): This should be on the AdvancedReducer somehow.
NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
Revisit(graph()->end());
node->TrimInputCount(0);
NodeProperties::ChangeOp(node, common()->Dead());
return Changed(node);
} }
return NoChange(); return NoChange();
} }
...@@ -1161,8 +1151,18 @@ Reduction JSCallReducer::ReduceJSConstruct(Node* node) { ...@@ -1161,8 +1151,18 @@ Reduction JSCallReducer::ReduceJSConstruct(Node* node) {
return NoChange(); return NoChange();
} }
// Extract feedback from the {node} using the CallICNexus.
if (!p.feedback().IsValid()) return NoChange(); if (!p.feedback().IsValid()) return NoChange();
CallICNexus nexus(p.feedback().vector(), p.feedback().slot()); CallICNexus nexus(p.feedback().vector(), p.feedback().slot());
if (nexus.IsUninitialized()) {
if (flags() & kBailoutOnUninitialized) {
// Introduce a SOFT deopt if the construct {node} wasn't executed so far.
return ReduceSoftDeoptimize(
node, DeoptimizeReason::kInsufficientTypeFeedbackForConstruct);
}
return NoChange();
}
Handle<Object> feedback(nexus.GetFeedback(), isolate()); Handle<Object> feedback(nexus.GetFeedback(), isolate());
if (feedback->IsAllocationSite()) { if (feedback->IsAllocationSite()) {
// The feedback is an AllocationSite, which means we have called the // The feedback is an AllocationSite, which means we have called the
...@@ -1245,6 +1245,22 @@ Reduction JSCallReducer::ReduceReturnReceiver(Node* node) { ...@@ -1245,6 +1245,22 @@ Reduction JSCallReducer::ReduceReturnReceiver(Node* node) {
return Replace(receiver); return Replace(receiver);
} }
Reduction JSCallReducer::ReduceSoftDeoptimize(Node* node,
DeoptimizeReason reason) {
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
Node* frame_state = NodeProperties::FindFrameStateBefore(node);
Node* deoptimize =
graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kSoft, reason),
frame_state, effect, control);
// TODO(bmeurer): This should be on the AdvancedReducer somehow.
NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
Revisit(graph()->end());
node->TrimInputCount(0);
NodeProperties::ChangeOp(node, common()->Dead());
return Changed(node);
}
Graph* JSCallReducer::graph() const { return jsgraph()->graph(); } Graph* JSCallReducer::graph() const { return jsgraph()->graph(); }
Isolate* JSCallReducer::isolate() const { return jsgraph()->isolate(); } Isolate* JSCallReducer::isolate() const { return jsgraph()->isolate(); }
......
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
#include "src/base/flags.h" #include "src/base/flags.h"
#include "src/compiler/graph-reducer.h" #include "src/compiler/graph-reducer.h"
#include "src/deoptimize-reason.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
...@@ -74,6 +75,8 @@ class JSCallReducer final : public AdvancedReducer { ...@@ -74,6 +75,8 @@ class JSCallReducer final : public AdvancedReducer {
Reduction ReduceJSCallWithSpread(Node* node); Reduction ReduceJSCallWithSpread(Node* node);
Reduction ReduceReturnReceiver(Node* node); Reduction ReduceReturnReceiver(Node* node);
Reduction ReduceSoftDeoptimize(Node* node, DeoptimizeReason reason);
Graph* graph() const; Graph* graph() const;
JSGraph* jsgraph() const { return jsgraph_; } JSGraph* jsgraph() const { return jsgraph_; }
Isolate* isolate() const; Isolate* isolate() const;
......
...@@ -290,6 +290,20 @@ Reduction JSTypeHintLowering::ReduceCallOperation(const Operator* op, ...@@ -290,6 +290,20 @@ Reduction JSTypeHintLowering::ReduceCallOperation(const Operator* op,
return Reduction(); return Reduction();
} }
Reduction JSTypeHintLowering::ReduceConstructOperation(
const Operator* op, Node* const* args, int arg_count, Node* effect,
Node* control, FeedbackSlot slot) const {
DCHECK_EQ(IrOpcode::kJSConstruct, op->opcode());
DCHECK(!slot.IsInvalid());
CallICNexus nexus(feedback_vector(), slot);
if (Node* node = TryBuildSoftDeopt(
nexus, effect, control,
DeoptimizeReason::kInsufficientTypeFeedbackForConstruct)) {
return Reduction(node);
}
return Reduction();
}
Reduction JSTypeHintLowering::ReduceLoadNamedOperation( Reduction JSTypeHintLowering::ReduceLoadNamedOperation(
const Operator* op, Node* obj, Node* effect, Node* control, const Operator* op, Node* obj, Node* effect, Node* control,
FeedbackSlot slot) const { FeedbackSlot slot) const {
......
...@@ -69,6 +69,11 @@ class JSTypeHintLowering { ...@@ -69,6 +69,11 @@ class JSTypeHintLowering {
int arg_count, Node* effect, Node* control, int arg_count, Node* effect, Node* control,
FeedbackSlot slot) const; FeedbackSlot slot) const;
// Potential reduction of construct operations.
Reduction ReduceConstructOperation(const Operator* op, Node* const* args,
int arg_count, Node* effect, Node* control,
FeedbackSlot slot) const;
// Potential reduction of property access operations. // Potential reduction of property access operations.
Reduction ReduceLoadNamedOperation(const Operator* op, Node* obj, Reduction ReduceLoadNamedOperation(const Operator* op, Node* obj,
Node* effect, Node* control, Node* effect, Node* control,
......
...@@ -24,6 +24,8 @@ namespace internal { ...@@ -24,6 +24,8 @@ namespace internal {
V(InsufficientTypeFeedbackForCall, "Insufficient type feedback for call") \ V(InsufficientTypeFeedbackForCall, "Insufficient type feedback for call") \
V(InsufficientTypeFeedbackForCallWithArguments, \ V(InsufficientTypeFeedbackForCallWithArguments, \
"Insufficient type feedback for call with arguments") \ "Insufficient type feedback for call with arguments") \
V(InsufficientTypeFeedbackForConstruct, \
"Insufficient type feedback for construct") \
V(FastPathFailed, "Falling off the fast path") \ V(FastPathFailed, "Falling off the fast path") \
V(InsufficientTypeFeedbackForCombinedTypeOfBinaryOperation, \ V(InsufficientTypeFeedbackForCombinedTypeOfBinaryOperation, \
"Insufficient type feedback for combined type of binary operation") \ "Insufficient type feedback for combined type of binary operation") \
......
...@@ -18,6 +18,8 @@ class B extends A { ...@@ -18,6 +18,8 @@ class B extends A {
test = new B(0); test = new B(0);
test = new B(0); test = new B(0);
assertThrowsEquals(() => {new B(1)}, ReferenceError());
assertThrowsEquals(() => {new B(1)}, ReferenceError());
%OptimizeFunctionOnNextCall(B); %OptimizeFunctionOnNextCall(B);
test = new B(0); test = new B(0);
assertOptimized(B); assertOptimized(B);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment