Commit 47e5282b authored by Georg Neis's avatar Georg Neis Committed by Commit Bot

[turbofan] Move kBailoutOnUninitialized check into ReduceSoftDeoptimize

This automatically fixes another place in element access where the check
was missing.

Change-Id: I585079d76c03903f3486ec9e001a51a131259c0b
Reviewed-on: https://chromium-review.googlesource.com/c/1460950Reviewed-by: 's avatarJaroslav Sevcik <jarin@chromium.org>
Commit-Queue: Georg Neis <neis@chromium.org>
Cr-Commit-Position: refs/heads/master@{#59490}
parent 1f5171dd
...@@ -3335,13 +3335,9 @@ Reduction JSCallReducer::ReduceJSCall(Node* node) { ...@@ -3335,13 +3335,9 @@ Reduction JSCallReducer::ReduceJSCall(Node* node) {
if (!p.feedback().IsValid()) return NoChange(); if (!p.feedback().IsValid()) return NoChange();
FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot()); FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
if (nexus.IsUninitialized()) { if (nexus.IsUninitialized()) {
if (flags() & kBailoutOnUninitialized) {
// Introduce a SOFT deopt if the call {node} wasn't executed so far.
return ReduceSoftDeoptimize( return ReduceSoftDeoptimize(
node, DeoptimizeReason::kInsufficientTypeFeedbackForCall); node, DeoptimizeReason::kInsufficientTypeFeedbackForCall);
} }
return NoChange();
}
base::Optional<HeapObjectRef> feedback = base::Optional<HeapObjectRef> feedback =
GetHeapObjectFeedback(broker(), nexus); GetHeapObjectFeedback(broker(), nexus);
...@@ -3755,14 +3751,9 @@ Reduction JSCallReducer::ReduceJSConstruct(Node* node) { ...@@ -3755,14 +3751,9 @@ Reduction JSCallReducer::ReduceJSConstruct(Node* node) {
if (p.feedback().IsValid()) { if (p.feedback().IsValid()) {
FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot()); FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
if (nexus.IsUninitialized()) { if (nexus.IsUninitialized()) {
if (flags() & kBailoutOnUninitialized) {
// Introduce a SOFT deopt if the construct {node} wasn't executed so
// far.
return ReduceSoftDeoptimize( return ReduceSoftDeoptimize(
node, DeoptimizeReason::kInsufficientTypeFeedbackForConstruct); node, DeoptimizeReason::kInsufficientTypeFeedbackForConstruct);
} }
return NoChange();
}
base::Optional<HeapObjectRef> feedback = base::Optional<HeapObjectRef> feedback =
GetHeapObjectFeedback(broker(), nexus); GetHeapObjectFeedback(broker(), nexus);
...@@ -4307,6 +4298,7 @@ Reduction JSCallReducer::ReduceReturnReceiver(Node* node) { ...@@ -4307,6 +4298,7 @@ Reduction JSCallReducer::ReduceReturnReceiver(Node* node) {
Reduction JSCallReducer::ReduceSoftDeoptimize(Node* node, Reduction JSCallReducer::ReduceSoftDeoptimize(Node* node,
DeoptimizeReason reason) { DeoptimizeReason reason) {
if (flags() & kBailoutOnUninitialized) {
Node* effect = NodeProperties::GetEffectInput(node); Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node); Node* control = NodeProperties::GetControlInput(node);
Node* frame_state = NodeProperties::FindFrameStateBefore(node); Node* frame_state = NodeProperties::FindFrameStateBefore(node);
...@@ -4319,6 +4311,8 @@ Reduction JSCallReducer::ReduceSoftDeoptimize(Node* node, ...@@ -4319,6 +4311,8 @@ Reduction JSCallReducer::ReduceSoftDeoptimize(Node* node,
node->TrimInputCount(0); node->TrimInputCount(0);
NodeProperties::ChangeOp(node, common()->Dead()); NodeProperties::ChangeOp(node, common()->Dead());
return Changed(node); return Changed(node);
}
return NoChange();
} }
// ES6 section 22.1.3.18 Array.prototype.push ( ) // ES6 section 22.1.3.18 Array.prototype.push ( )
......
...@@ -1085,12 +1085,8 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess( ...@@ -1085,12 +1085,8 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess(
// Nothing to do if we have no non-deprecated maps. // Nothing to do if we have no non-deprecated maps.
if (access_infos.empty()) { if (access_infos.empty()) {
if (flags() & kBailoutOnUninitialized) {
return ReduceSoftDeoptimize( return ReduceSoftDeoptimize(
node, node, DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess);
DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess);
}
return NoChange();
} }
// Ensure that {index} matches the specified {name} (if {index} is given). // Ensure that {index} matches the specified {name} (if {index} is given).
...@@ -1336,12 +1332,8 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccessFromNexus( ...@@ -1336,12 +1332,8 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccessFromNexus(
if (!ExtractReceiverMaps(receiver, effect, nexus, &receiver_maps)) { if (!ExtractReceiverMaps(receiver, effect, nexus, &receiver_maps)) {
return NoChange(); return NoChange();
} else if (receiver_maps.empty()) { } else if (receiver_maps.empty()) {
if (flags() & kBailoutOnUninitialized) {
return ReduceSoftDeoptimize( return ReduceSoftDeoptimize(
node, node, DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess);
DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess);
}
return NoChange();
} }
DCHECK(!nexus.IsUninitialized()); DCHECK(!nexus.IsUninitialized());
...@@ -1744,12 +1736,8 @@ Reduction JSNativeContextSpecialization::ReduceKeyedAccess( ...@@ -1744,12 +1736,8 @@ Reduction JSNativeContextSpecialization::ReduceKeyedAccess(
if (!ExtractReceiverMaps(receiver, effect, nexus, &receiver_maps)) { if (!ExtractReceiverMaps(receiver, effect, nexus, &receiver_maps)) {
return NoChange(); return NoChange();
} else if (receiver_maps.empty()) { } else if (receiver_maps.empty()) {
if (flags() & kBailoutOnUninitialized) {
return ReduceSoftDeoptimize( return ReduceSoftDeoptimize(
node, node, DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess);
DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess);
}
return NoChange();
} }
DCHECK(!nexus.IsUninitialized()); DCHECK(!nexus.IsUninitialized());
...@@ -1797,6 +1785,7 @@ Reduction JSNativeContextSpecialization::ReduceKeyedAccess( ...@@ -1797,6 +1785,7 @@ Reduction JSNativeContextSpecialization::ReduceKeyedAccess(
Reduction JSNativeContextSpecialization::ReduceSoftDeoptimize( Reduction JSNativeContextSpecialization::ReduceSoftDeoptimize(
Node* node, DeoptimizeReason reason) { Node* node, DeoptimizeReason reason) {
if (flags() & kBailoutOnUninitialized) {
Node* effect = NodeProperties::GetEffectInput(node); Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node); Node* control = NodeProperties::GetControlInput(node);
Node* frame_state = NodeProperties::FindFrameStateBefore(node); Node* frame_state = NodeProperties::FindFrameStateBefore(node);
...@@ -1809,6 +1798,8 @@ Reduction JSNativeContextSpecialization::ReduceSoftDeoptimize( ...@@ -1809,6 +1798,8 @@ Reduction JSNativeContextSpecialization::ReduceSoftDeoptimize(
node->TrimInputCount(0); node->TrimInputCount(0);
NodeProperties::ChangeOp(node, common()->Dead()); NodeProperties::ChangeOp(node, common()->Dead());
return Changed(node); return Changed(node);
}
return NoChange();
} }
Reduction JSNativeContextSpecialization::ReduceJSLoadPropertyWithEnumeratedKey( Reduction JSNativeContextSpecialization::ReduceJSLoadPropertyWithEnumeratedKey(
...@@ -2435,12 +2426,8 @@ Reduction JSNativeContextSpecialization::ReduceJSStoreInArrayLiteral( ...@@ -2435,12 +2426,8 @@ Reduction JSNativeContextSpecialization::ReduceJSStoreInArrayLiteral(
if (!ExtractReceiverMaps(receiver, effect, nexus, &receiver_maps)) { if (!ExtractReceiverMaps(receiver, effect, nexus, &receiver_maps)) {
return NoChange(); return NoChange();
} else if (receiver_maps.empty()) { } else if (receiver_maps.empty()) {
if (flags() & kBailoutOnUninitialized) {
return ReduceSoftDeoptimize( return ReduceSoftDeoptimize(
node, node, DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess);
DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess);
}
return NoChange();
} }
DCHECK(!nexus.IsUninitialized()); DCHECK(!nexus.IsUninitialized());
DCHECK_EQ(ELEMENT, nexus.GetKeyType()); DCHECK_EQ(ELEMENT, nexus.GetKeyType());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment