Commit 0a47bc63 authored by Georg Neis's avatar Georg Neis Committed by Commit Bot

[turbofan] Eliminate more non-const reference arguments

... mostly by turning them into pointer arguments.

After this CL, all remaining non-const reference arguments in
the compiler directory are in the backend.

Bug: v8:9429
Change-Id: I6a546da0fe93179e1a0b12296632591cbf209808
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1719185Reviewed-by: 's avatarTobias Tebbi <tebbi@chromium.org>
Commit-Queue: Georg Neis <neis@chromium.org>
Cr-Commit-Position: refs/heads/master@{#62930}
parent 0cedd864
...@@ -245,7 +245,7 @@ bool CodeStubAssembler::IsIntPtrOrSmiConstantZero(Node* test, ...@@ -245,7 +245,7 @@ bool CodeStubAssembler::IsIntPtrOrSmiConstantZero(Node* test,
int32_t constant_test; int32_t constant_test;
Smi smi_test; Smi smi_test;
if (mode == INTPTR_PARAMETERS) { if (mode == INTPTR_PARAMETERS) {
if (ToInt32Constant(test, constant_test) && constant_test == 0) { if (ToInt32Constant(test, &constant_test) && constant_test == 0) {
return true; return true;
} }
} else { } else {
...@@ -262,7 +262,7 @@ bool CodeStubAssembler::TryGetIntPtrOrSmiConstantValue(Node* maybe_constant, ...@@ -262,7 +262,7 @@ bool CodeStubAssembler::TryGetIntPtrOrSmiConstantValue(Node* maybe_constant,
ParameterMode mode) { ParameterMode mode) {
int32_t int32_constant; int32_t int32_constant;
if (mode == INTPTR_PARAMETERS) { if (mode == INTPTR_PARAMETERS) {
if (ToInt32Constant(maybe_constant, int32_constant)) { if (ToInt32Constant(maybe_constant, &int32_constant)) {
*value = int32_constant; *value = int32_constant;
return true; return true;
} }
...@@ -542,7 +542,7 @@ TNode<Smi> CodeStubAssembler::SmiFromInt32(SloppyTNode<Int32T> value) { ...@@ -542,7 +542,7 @@ TNode<Smi> CodeStubAssembler::SmiFromInt32(SloppyTNode<Int32T> value) {
TNode<BoolT> CodeStubAssembler::IsValidPositiveSmi(TNode<IntPtrT> value) { TNode<BoolT> CodeStubAssembler::IsValidPositiveSmi(TNode<IntPtrT> value) {
intptr_t constant_value; intptr_t constant_value;
if (ToIntPtrConstant(value, constant_value)) { if (ToIntPtrConstant(value, &constant_value)) {
return (static_cast<uintptr_t>(constant_value) <= return (static_cast<uintptr_t>(constant_value) <=
static_cast<uintptr_t>(Smi::kMaxValue)) static_cast<uintptr_t>(Smi::kMaxValue))
? Int32TrueConstant() ? Int32TrueConstant()
...@@ -554,7 +554,7 @@ TNode<BoolT> CodeStubAssembler::IsValidPositiveSmi(TNode<IntPtrT> value) { ...@@ -554,7 +554,7 @@ TNode<BoolT> CodeStubAssembler::IsValidPositiveSmi(TNode<IntPtrT> value) {
TNode<Smi> CodeStubAssembler::SmiTag(SloppyTNode<IntPtrT> value) { TNode<Smi> CodeStubAssembler::SmiTag(SloppyTNode<IntPtrT> value) {
int32_t constant_value; int32_t constant_value;
if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) { if (ToInt32Constant(value, &constant_value) && Smi::IsValid(constant_value)) {
return SmiConstant(constant_value); return SmiConstant(constant_value);
} }
TNode<Smi> smi = TNode<Smi> smi =
...@@ -564,7 +564,7 @@ TNode<Smi> CodeStubAssembler::SmiTag(SloppyTNode<IntPtrT> value) { ...@@ -564,7 +564,7 @@ TNode<Smi> CodeStubAssembler::SmiTag(SloppyTNode<IntPtrT> value) {
TNode<IntPtrT> CodeStubAssembler::SmiUntag(SloppyTNode<Smi> value) { TNode<IntPtrT> CodeStubAssembler::SmiUntag(SloppyTNode<Smi> value) {
intptr_t constant_value; intptr_t constant_value;
if (ToIntPtrConstant(value, constant_value)) { if (ToIntPtrConstant(value, &constant_value)) {
return IntPtrConstant(constant_value >> (kSmiShiftSize + kSmiTagSize)); return IntPtrConstant(constant_value >> (kSmiShiftSize + kSmiTagSize));
} }
return Signed( return Signed(
...@@ -1065,7 +1065,7 @@ TNode<HeapObject> CodeStubAssembler::AllocateRaw(TNode<IntPtrT> size_in_bytes, ...@@ -1065,7 +1065,7 @@ TNode<HeapObject> CodeStubAssembler::AllocateRaw(TNode<IntPtrT> size_in_bytes,
intptr_t size_in_bytes_constant; intptr_t size_in_bytes_constant;
bool size_in_bytes_is_constant = false; bool size_in_bytes_is_constant = false;
if (ToIntPtrConstant(size_in_bytes, size_in_bytes_constant)) { if (ToIntPtrConstant(size_in_bytes, &size_in_bytes_constant)) {
size_in_bytes_is_constant = true; size_in_bytes_is_constant = true;
CHECK(Internals::IsValidSmi(size_in_bytes_constant)); CHECK(Internals::IsValidSmi(size_in_bytes_constant));
CHECK_GT(size_in_bytes_constant, 0); CHECK_GT(size_in_bytes_constant, 0);
...@@ -1224,7 +1224,7 @@ TNode<HeapObject> CodeStubAssembler::Allocate(TNode<IntPtrT> size_in_bytes, ...@@ -1224,7 +1224,7 @@ TNode<HeapObject> CodeStubAssembler::Allocate(TNode<IntPtrT> size_in_bytes,
!new_space || !allow_large_objects || FLAG_young_generation_large_objects; !new_space || !allow_large_objects || FLAG_young_generation_large_objects;
if (!allow_large_objects) { if (!allow_large_objects) {
intptr_t size_constant; intptr_t size_constant;
if (ToIntPtrConstant(size_in_bytes, size_constant)) { if (ToIntPtrConstant(size_in_bytes, &size_constant)) {
CHECK_LE(size_constant, kMaxRegularHeapObjectSize); CHECK_LE(size_constant, kMaxRegularHeapObjectSize);
} else { } else {
CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes)); CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes));
...@@ -2717,7 +2717,7 @@ void CodeStubAssembler::StoreObjectField(Node* object, int offset, ...@@ -2717,7 +2717,7 @@ void CodeStubAssembler::StoreObjectField(Node* object, int offset,
void CodeStubAssembler::StoreObjectField(Node* object, Node* offset, void CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
Node* value) { Node* value) {
int const_offset; int const_offset;
if (ToInt32Constant(offset, const_offset)) { if (ToInt32Constant(offset, &const_offset)) {
StoreObjectField(object, const_offset, value); StoreObjectField(object, const_offset, value);
} else { } else {
Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value); Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
...@@ -2745,7 +2745,7 @@ void CodeStubAssembler::StoreObjectFieldNoWriteBarrier( ...@@ -2745,7 +2745,7 @@ void CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
Node* object, SloppyTNode<IntPtrT> offset, Node* value, Node* object, SloppyTNode<IntPtrT> offset, Node* value,
MachineRepresentation rep) { MachineRepresentation rep) {
int const_offset; int const_offset;
if (ToInt32Constant(offset, const_offset)) { if (ToInt32Constant(offset, &const_offset)) {
return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep); return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep);
} }
StoreNoWriteBarrier(rep, object, StoreNoWriteBarrier(rep, object,
...@@ -5237,8 +5237,8 @@ void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string, ...@@ -5237,8 +5237,8 @@ void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
int to_index_constant = 0, from_index_constant = 0; int to_index_constant = 0, from_index_constant = 0;
bool index_same = (from_encoding == to_encoding) && bool index_same = (from_encoding == to_encoding) &&
(from_index == to_index || (from_index == to_index ||
(ToInt32Constant(from_index, from_index_constant) && (ToInt32Constant(from_index, &from_index_constant) &&
ToInt32Constant(to_index, to_index_constant) && ToInt32Constant(to_index, &to_index_constant) &&
from_index_constant == to_index_constant)); from_index_constant == to_index_constant));
BuildFastLoop( BuildFastLoop(
vars, from_offset, limit_offset, vars, from_offset, limit_offset,
...@@ -8464,8 +8464,8 @@ TNode<IntPtrT> CodeStubAssembler::IntPtrMax(SloppyTNode<IntPtrT> left, ...@@ -8464,8 +8464,8 @@ TNode<IntPtrT> CodeStubAssembler::IntPtrMax(SloppyTNode<IntPtrT> left,
SloppyTNode<IntPtrT> right) { SloppyTNode<IntPtrT> right) {
intptr_t left_constant; intptr_t left_constant;
intptr_t right_constant; intptr_t right_constant;
if (ToIntPtrConstant(left, left_constant) && if (ToIntPtrConstant(left, &left_constant) &&
ToIntPtrConstant(right, right_constant)) { ToIntPtrConstant(right, &right_constant)) {
return IntPtrConstant(std::max(left_constant, right_constant)); return IntPtrConstant(std::max(left_constant, right_constant));
} }
return SelectConstant<IntPtrT>(IntPtrGreaterThanOrEqual(left, right), left, return SelectConstant<IntPtrT>(IntPtrGreaterThanOrEqual(left, right), left,
...@@ -8476,8 +8476,8 @@ TNode<IntPtrT> CodeStubAssembler::IntPtrMin(SloppyTNode<IntPtrT> left, ...@@ -8476,8 +8476,8 @@ TNode<IntPtrT> CodeStubAssembler::IntPtrMin(SloppyTNode<IntPtrT> left,
SloppyTNode<IntPtrT> right) { SloppyTNode<IntPtrT> right) {
intptr_t left_constant; intptr_t left_constant;
intptr_t right_constant; intptr_t right_constant;
if (ToIntPtrConstant(left, left_constant) && if (ToIntPtrConstant(left, &left_constant) &&
ToIntPtrConstant(right, right_constant)) { ToIntPtrConstant(right, &right_constant)) {
return IntPtrConstant(std::min(left_constant, right_constant)); return IntPtrConstant(std::min(left_constant, right_constant));
} }
return SelectConstant<IntPtrT>(IntPtrLessThanOrEqual(left, right), left, return SelectConstant<IntPtrT>(IntPtrLessThanOrEqual(left, right), left,
...@@ -10080,7 +10080,7 @@ TNode<IntPtrT> CodeStubAssembler::ElementOffsetFromIndex(Node* index_node, ...@@ -10080,7 +10080,7 @@ TNode<IntPtrT> CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
index_node = BitcastTaggedSignedToWord(index_node); index_node = BitcastTaggedSignedToWord(index_node);
} else { } else {
DCHECK(mode == INTPTR_PARAMETERS); DCHECK(mode == INTPTR_PARAMETERS);
constant_index = ToIntPtrConstant(index_node, index); constant_index = ToIntPtrConstant(index_node, &index);
} }
if (constant_index) { if (constant_index) {
return IntPtrConstant(base_size + element_size * index); return IntPtrConstant(base_size + element_size * index);
...@@ -11102,7 +11102,7 @@ Node* CodeStubAssembler::BuildFastLoop( ...@@ -11102,7 +11102,7 @@ Node* CodeStubAssembler::BuildFastLoop(
// loop actually iterates. // loop actually iterates.
Node* first_check = WordEqual(var.value(), end_index); Node* first_check = WordEqual(var.value(), end_index);
int32_t first_check_val; int32_t first_check_val;
if (ToInt32Constant(first_check, first_check_val)) { if (ToInt32Constant(first_check, &first_check_val)) {
if (first_check_val) return var.value(); if (first_check_val) return var.value();
Goto(&loop); Goto(&loop);
} else { } else {
...@@ -11135,9 +11135,9 @@ void CodeStubAssembler::BuildFastFixedArrayForEach( ...@@ -11135,9 +11135,9 @@ void CodeStubAssembler::BuildFastFixedArrayForEach(
CSA_SLOW_ASSERT(this, Word32Or(IsFixedArrayWithKind(fixed_array, kind), CSA_SLOW_ASSERT(this, Word32Or(IsFixedArrayWithKind(fixed_array, kind),
IsPropertyArray(fixed_array))); IsPropertyArray(fixed_array)));
int32_t first_val; int32_t first_val;
bool constant_first = ToInt32Constant(first_element_inclusive, first_val); bool constant_first = ToInt32Constant(first_element_inclusive, &first_val);
int32_t last_val; int32_t last_val;
bool constent_last = ToInt32Constant(last_element_exclusive, last_val); bool constent_last = ToInt32Constant(last_element_exclusive, &last_val);
if (constant_first && constent_last) { if (constant_first && constent_last) {
int delta = last_val - first_val; int delta = last_val - first_val;
DCHECK_GE(delta, 0); DCHECK_GE(delta, 0);
......
...@@ -268,7 +268,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler ...@@ -268,7 +268,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
} else { } else {
DCHECK_EQ(mode, ParameterMode::INTPTR_PARAMETERS); DCHECK_EQ(mode, ParameterMode::INTPTR_PARAMETERS);
intptr_t constant; intptr_t constant;
if (ToIntPtrConstant(node, constant)) { if (ToIntPtrConstant(node, &constant)) {
*out = constant; *out = constant;
return true; return true;
} }
......
This diff is collapsed.
This diff is collapsed.
...@@ -853,15 +853,10 @@ class V8_EXPORT_PRIVATE CodeAssembler { ...@@ -853,15 +853,10 @@ class V8_EXPORT_PRIVATE CodeAssembler {
return value ? Int32TrueConstant() : Int32FalseConstant(); return value ? Int32TrueConstant() : Int32FalseConstant();
} }
// TODO(jkummerow): The style guide wants pointers for output parameters. bool ToInt32Constant(Node* node, int32_t* out_value);
// https://google.github.io/styleguide/cppguide.html#Output_Parameters bool ToInt64Constant(Node* node, int64_t* out_value);
bool ToInt32Constant(Node* node, bool ToIntPtrConstant(Node* node, intptr_t* out_value);
int32_t& out_value); // NOLINT(runtime/references)
bool ToInt64Constant(Node* node,
int64_t& out_value); // NOLINT(runtime/references)
bool ToSmiConstant(Node* node, Smi* out_value); bool ToSmiConstant(Node* node, Smi* out_value);
bool ToIntPtrConstant(Node* node,
intptr_t& out_value); // NOLINT(runtime/references)
bool IsUndefinedConstant(TNode<Object> node); bool IsUndefinedConstant(TNode<Object> node);
bool IsNullConstant(TNode<Object> node); bool IsNullConstant(TNode<Object> node);
......
...@@ -1133,9 +1133,9 @@ bool CanInlineArrayIteratingBuiltin(JSHeapBroker* broker, ...@@ -1133,9 +1133,9 @@ bool CanInlineArrayIteratingBuiltin(JSHeapBroker* broker,
return true; return true;
} }
bool CanInlineArrayResizingBuiltin( bool CanInlineArrayResizingBuiltin(JSHeapBroker* broker,
JSHeapBroker* broker, MapHandles const& receiver_maps, MapHandles const& receiver_maps,
std::vector<ElementsKind>& kinds, // NOLINT(runtime/references) std::vector<ElementsKind>* kinds,
bool builtin_is_push = false) { bool builtin_is_push = false) {
DCHECK_NE(0, receiver_maps.size()); DCHECK_NE(0, receiver_maps.size());
for (auto receiver_map : receiver_maps) { for (auto receiver_map : receiver_maps) {
...@@ -1147,14 +1147,14 @@ bool CanInlineArrayResizingBuiltin( ...@@ -1147,14 +1147,14 @@ bool CanInlineArrayResizingBuiltin(
return false; return false;
} }
ElementsKind current_kind = map.elements_kind(); ElementsKind current_kind = map.elements_kind();
auto kind_ptr = kinds.data(); auto kind_ptr = kinds->data();
size_t i; size_t i;
for (i = 0; i < kinds.size(); i++, kind_ptr++) { for (i = 0; i < kinds->size(); i++, kind_ptr++) {
if (UnionElementsKindUptoPackedness(kind_ptr, current_kind)) { if (UnionElementsKindUptoPackedness(kind_ptr, current_kind)) {
break; break;
} }
} }
if (i == kinds.size()) kinds.push_back(current_kind); if (i == kinds->size()) kinds->push_back(current_kind);
} }
return true; return true;
} }
...@@ -4497,7 +4497,7 @@ Reduction JSCallReducer::ReduceArrayPrototypePush(Node* node) { ...@@ -4497,7 +4497,7 @@ Reduction JSCallReducer::ReduceArrayPrototypePush(Node* node) {
MapHandles const& receiver_maps = inference.GetMaps(); MapHandles const& receiver_maps = inference.GetMaps();
std::vector<ElementsKind> kinds; std::vector<ElementsKind> kinds;
if (!CanInlineArrayResizingBuiltin(broker(), receiver_maps, kinds, true)) { if (!CanInlineArrayResizingBuiltin(broker(), receiver_maps, &kinds, true)) {
return inference.NoChange(); return inference.NoChange();
} }
if (!dependencies()->DependOnNoElementsProtector()) UNREACHABLE(); if (!dependencies()->DependOnNoElementsProtector()) UNREACHABLE();
...@@ -4632,7 +4632,7 @@ Reduction JSCallReducer::ReduceArrayPrototypePop(Node* node) { ...@@ -4632,7 +4632,7 @@ Reduction JSCallReducer::ReduceArrayPrototypePop(Node* node) {
MapHandles const& receiver_maps = inference.GetMaps(); MapHandles const& receiver_maps = inference.GetMaps();
std::vector<ElementsKind> kinds; std::vector<ElementsKind> kinds;
if (!CanInlineArrayResizingBuiltin(broker(), receiver_maps, kinds)) { if (!CanInlineArrayResizingBuiltin(broker(), receiver_maps, &kinds)) {
return inference.NoChange(); return inference.NoChange();
} }
if (!dependencies()->DependOnNoElementsProtector()) UNREACHABLE(); if (!dependencies()->DependOnNoElementsProtector()) UNREACHABLE();
...@@ -4770,7 +4770,7 @@ Reduction JSCallReducer::ReduceArrayPrototypeShift(Node* node) { ...@@ -4770,7 +4770,7 @@ Reduction JSCallReducer::ReduceArrayPrototypeShift(Node* node) {
MapHandles const& receiver_maps = inference.GetMaps(); MapHandles const& receiver_maps = inference.GetMaps();
std::vector<ElementsKind> kinds; std::vector<ElementsKind> kinds;
if (!CanInlineArrayResizingBuiltin(broker(), receiver_maps, kinds)) { if (!CanInlineArrayResizingBuiltin(broker(), receiver_maps, &kinds)) {
return inference.NoChange(); return inference.NoChange();
} }
if (!dependencies()->DependOnNoElementsProtector()) UNREACHABLE(); if (!dependencies()->DependOnNoElementsProtector()) UNREACHABLE();
......
...@@ -332,7 +332,7 @@ base::Optional<SharedFunctionInfoRef> JSInliner::DetermineCallTarget( ...@@ -332,7 +332,7 @@ base::Optional<SharedFunctionInfoRef> JSInliner::DetermineCallTarget(
// - context : The context (as SSA value) bound by the call target. // - context : The context (as SSA value) bound by the call target.
// - feedback_vector : The target is guaranteed to use this feedback vector. // - feedback_vector : The target is guaranteed to use this feedback vector.
FeedbackVectorRef JSInliner::DetermineCallContext(Node* node, FeedbackVectorRef JSInliner::DetermineCallContext(Node* node,
Node*& context_out) { Node** context_out) {
DCHECK(IrOpcode::IsInlineeOpcode(node->opcode())); DCHECK(IrOpcode::IsInlineeOpcode(node->opcode()));
HeapObjectMatcher match(node->InputAt(0)); HeapObjectMatcher match(node->InputAt(0));
...@@ -342,7 +342,7 @@ FeedbackVectorRef JSInliner::DetermineCallContext(Node* node, ...@@ -342,7 +342,7 @@ FeedbackVectorRef JSInliner::DetermineCallContext(Node* node,
CHECK(function.has_feedback_vector()); CHECK(function.has_feedback_vector());
// The inlinee specializes to the context from the JSFunction object. // The inlinee specializes to the context from the JSFunction object.
context_out = jsgraph()->Constant(function.context()); *context_out = jsgraph()->Constant(function.context());
return function.feedback_vector(); return function.feedback_vector();
} }
...@@ -354,7 +354,7 @@ FeedbackVectorRef JSInliner::DetermineCallContext(Node* node, ...@@ -354,7 +354,7 @@ FeedbackVectorRef JSInliner::DetermineCallContext(Node* node,
FeedbackCellRef cell(FeedbackCellRef(broker(), p.feedback_cell())); FeedbackCellRef cell(FeedbackCellRef(broker(), p.feedback_cell()));
// The inlinee uses the locally provided context at instantiation. // The inlinee uses the locally provided context at instantiation.
context_out = NodeProperties::GetContextInput(match.node()); *context_out = NodeProperties::GetContextInput(match.node());
return cell.value().AsFeedbackVector(); return cell.value().AsFeedbackVector();
} }
...@@ -425,7 +425,7 @@ Reduction JSInliner::ReduceJSCall(Node* node) { ...@@ -425,7 +425,7 @@ Reduction JSInliner::ReduceJSCall(Node* node) {
: "")); : ""));
// Determine the targets feedback vector and its context. // Determine the targets feedback vector and its context.
Node* context; Node* context;
FeedbackVectorRef feedback_vector = DetermineCallContext(node, context); FeedbackVectorRef feedback_vector = DetermineCallContext(node, &context);
if (FLAG_concurrent_inlining && if (FLAG_concurrent_inlining &&
!shared_info.value().IsSerializedForCompilation(feedback_vector)) { !shared_info.value().IsSerializedForCompilation(feedback_vector)) {
......
...@@ -59,8 +59,7 @@ class JSInliner final : public AdvancedReducer { ...@@ -59,8 +59,7 @@ class JSInliner final : public AdvancedReducer {
SourcePositionTable* const source_positions_; SourcePositionTable* const source_positions_;
base::Optional<SharedFunctionInfoRef> DetermineCallTarget(Node* node); base::Optional<SharedFunctionInfoRef> DetermineCallTarget(Node* node);
FeedbackVectorRef DetermineCallContext( FeedbackVectorRef DetermineCallContext(Node* node, Node** context_out);
Node* node, Node*& context_out); // NOLINT(runtime/references)
Node* CreateArtificialFrameState(Node* node, Node* outer_frame_state, Node* CreateArtificialFrameState(Node* node, Node* outer_frame_state,
int parameter_count, BailoutId bailout_id, int parameter_count, BailoutId bailout_id,
......
...@@ -549,8 +549,8 @@ JSTypeHintLowering::ReduceStoreKeyedOperation(const Operator* op, Node* obj, ...@@ -549,8 +549,8 @@ JSTypeHintLowering::ReduceStoreKeyedOperation(const Operator* op, Node* obj,
return LoweringResult::NoChange(); return LoweringResult::NoChange();
} }
Node* JSTypeHintLowering::TryBuildSoftDeopt(FeedbackNexus& nexus, Node* effect, Node* JSTypeHintLowering::TryBuildSoftDeopt(FeedbackNexus const& nexus,
Node* control, Node* effect, Node* control,
DeoptimizeReason reason) const { DeoptimizeReason reason) const {
if ((flags() & kBailoutOnUninitialized) && nexus.IsUninitialized()) { if ((flags() & kBailoutOnUninitialized) && nexus.IsUninitialized()) {
Node* deoptimize = jsgraph()->graph()->NewNode( Node* deoptimize = jsgraph()->graph()->NewNode(
......
...@@ -153,9 +153,8 @@ class JSTypeHintLowering { ...@@ -153,9 +153,8 @@ class JSTypeHintLowering {
private: private:
friend class JSSpeculativeBinopBuilder; friend class JSSpeculativeBinopBuilder;
Node* TryBuildSoftDeopt(FeedbackNexus& nexus, // NOLINT(runtime/references) Node* TryBuildSoftDeopt(FeedbackNexus const& nexus, Node* effect,
Node* effect, Node* control, Node* control, DeoptimizeReason reson) const;
DeoptimizeReason reson) const;
JSGraph* jsgraph() const { return jsgraph_; } JSGraph* jsgraph() const { return jsgraph_; }
Isolate* isolate() const; Isolate* isolate() const;
......
...@@ -820,7 +820,7 @@ class SpecialRPONumberer : public ZoneObject { ...@@ -820,7 +820,7 @@ class SpecialRPONumberer : public ZoneObject {
if (num_loops > static_cast<int>(loops_.size())) { if (num_loops > static_cast<int>(loops_.size())) {
// Otherwise, compute the loop information from the backedges in order // Otherwise, compute the loop information from the backedges in order
// to perform a traversal that groups loop bodies together. // to perform a traversal that groups loop bodies together.
ComputeLoopInfo(stack_, num_loops, &backedges_); ComputeLoopInfo(&stack_, num_loops, &backedges_);
// Initialize the "loop stack". Note the entry could be a loop header. // Initialize the "loop stack". Note the entry could be a loop header.
LoopInfo* loop = LoopInfo* loop =
...@@ -962,8 +962,7 @@ class SpecialRPONumberer : public ZoneObject { ...@@ -962,8 +962,7 @@ class SpecialRPONumberer : public ZoneObject {
} }
// Computes loop membership from the backedges of the control flow graph. // Computes loop membership from the backedges of the control flow graph.
void ComputeLoopInfo( void ComputeLoopInfo(ZoneVector<SpecialRPOStackFrame>* queue,
ZoneVector<SpecialRPOStackFrame>& queue, // NOLINT(runtime/references)
size_t num_loops, ZoneVector<Backedge>* backedges) { size_t num_loops, ZoneVector<Backedge>* backedges) {
// Extend existing loop membership vectors. // Extend existing loop membership vectors.
for (LoopInfo& loop : loops_) { for (LoopInfo& loop : loops_) {
...@@ -993,19 +992,19 @@ class SpecialRPONumberer : public ZoneObject { ...@@ -993,19 +992,19 @@ class SpecialRPONumberer : public ZoneObject {
if (!loops_[loop_num].members->Contains(member->id().ToInt())) { if (!loops_[loop_num].members->Contains(member->id().ToInt())) {
loops_[loop_num].members->Add(member->id().ToInt()); loops_[loop_num].members->Add(member->id().ToInt());
} }
queue[queue_length++].block = member; (*queue)[queue_length++].block = member;
} }
// Propagate loop membership backwards. All predecessors of M up to the // Propagate loop membership backwards. All predecessors of M up to the
// loop header H are members of the loop too. O(|blocks between M and H|). // loop header H are members of the loop too. O(|blocks between M and H|).
while (queue_length > 0) { while (queue_length > 0) {
BasicBlock* block = queue[--queue_length].block; BasicBlock* block = (*queue)[--queue_length].block;
for (size_t i = 0; i < block->PredecessorCount(); i++) { for (size_t i = 0; i < block->PredecessorCount(); i++) {
BasicBlock* pred = block->PredecessorAt(i); BasicBlock* pred = block->PredecessorAt(i);
if (pred != header) { if (pred != header) {
if (!loops_[loop_num].members->Contains(pred->id().ToInt())) { if (!loops_[loop_num].members->Contains(pred->id().ToInt())) {
loops_[loop_num].members->Add(pred->id().ToInt()); loops_[loop_num].members->Add(pred->id().ToInt());
queue[queue_length++].block = pred; (*queue)[queue_length++].block = pred;
} }
} }
} }
......
...@@ -357,8 +357,7 @@ class SerializerForBackgroundCompilation { ...@@ -357,8 +357,7 @@ class SerializerForBackgroundCompilation {
bool with_spread = false); bool with_spread = false);
void ProcessApiCall(Handle<SharedFunctionInfo> target, void ProcessApiCall(Handle<SharedFunctionInfo> target,
const HintsVector& arguments); const HintsVector& arguments);
void ProcessReceiverMapForApiCall( void ProcessReceiverMapForApiCall(FunctionTemplateInfoRef target,
FunctionTemplateInfoRef& target, // NOLINT(runtime/references)
Handle<Map> receiver); Handle<Map> receiver);
void ProcessBuiltinCall(Handle<SharedFunctionInfo> target, void ProcessBuiltinCall(Handle<SharedFunctionInfo> target,
const HintsVector& arguments, const HintsVector& arguments,
...@@ -399,8 +398,8 @@ class SerializerForBackgroundCompilation { ...@@ -399,8 +398,8 @@ class SerializerForBackgroundCompilation {
void ProcessContextAccess(const Hints& context_hints, int slot, int depth, void ProcessContextAccess(const Hints& context_hints, int slot, int depth,
ContextProcessingMode mode); ContextProcessingMode mode);
void ProcessImmutableLoad(ContextRef& context, // NOLINT(runtime/references) void ProcessImmutableLoad(ContextRef const& context, int slot,
int slot, ContextProcessingMode mode); ContextProcessingMode mode);
void ProcessLdaLookupGlobalSlot(interpreter::BytecodeArrayIterator* iterator); void ProcessLdaLookupGlobalSlot(interpreter::BytecodeArrayIterator* iterator);
void ProcessLdaLookupContextSlot( void ProcessLdaLookupContextSlot(
interpreter::BytecodeArrayIterator* iterator); interpreter::BytecodeArrayIterator* iterator);
...@@ -631,7 +630,7 @@ class SerializerForBackgroundCompilation::Environment : public ZoneObject { ...@@ -631,7 +630,7 @@ class SerializerForBackgroundCompilation::Environment : public ZoneObject {
// Appends the hints for the given register range to {dst} (in order). // Appends the hints for the given register range to {dst} (in order).
void ExportRegisterHints(interpreter::Register first, size_t count, void ExportRegisterHints(interpreter::Register first, size_t count,
HintsVector& dst); // NOLINT(runtime/references) HintsVector* dst);
private: private:
friend std::ostream& operator<<(std::ostream& out, const Environment& env); friend std::ostream& operator<<(std::ostream& out, const Environment& env);
...@@ -1066,7 +1065,7 @@ void SerializerForBackgroundCompilation::VisitInvokeIntrinsic( ...@@ -1066,7 +1065,7 @@ void SerializerForBackgroundCompilation::VisitInvokeIntrinsic(
size_t reg_count = iterator->GetRegisterCountOperand(2); size_t reg_count = iterator->GetRegisterCountOperand(2);
CHECK_EQ(reg_count, 3); CHECK_EQ(reg_count, 3);
HintsVector arguments(zone()); HintsVector arguments(zone());
environment()->ExportRegisterHints(first_reg, reg_count, arguments); environment()->ExportRegisterHints(first_reg, reg_count, &arguments);
Hints const& resolution_hints = arguments[1]; // The resolution object. Hints const& resolution_hints = arguments[1]; // The resolution object.
ProcessHintsForPromiseResolve(resolution_hints); ProcessHintsForPromiseResolve(resolution_hints);
environment()->accumulator_hints().Clear(); environment()->accumulator_hints().Clear();
...@@ -1107,7 +1106,7 @@ void SerializerForBackgroundCompilation::VisitPopContext( ...@@ -1107,7 +1106,7 @@ void SerializerForBackgroundCompilation::VisitPopContext(
} }
void SerializerForBackgroundCompilation::ProcessImmutableLoad( void SerializerForBackgroundCompilation::ProcessImmutableLoad(
ContextRef& context_ref, int slot, ContextProcessingMode mode) { ContextRef const& context_ref, int slot, ContextProcessingMode mode) {
DCHECK(mode == kSerializeSlot || mode == kSerializeSlotAndAddToAccumulator); DCHECK(mode == kSerializeSlot || mode == kSerializeSlotAndAddToAccumulator);
base::Optional<ObjectRef> slot_value = context_ref.get(slot, true); base::Optional<ObjectRef> slot_value = context_ref.get(slot, true);
...@@ -1555,7 +1554,7 @@ void SerializerForBackgroundCompilation::ProcessCallVarArgs( ...@@ -1555,7 +1554,7 @@ void SerializerForBackgroundCompilation::ProcessCallVarArgs(
receiver.AddConstant(broker()->isolate()->factory()->undefined_value()); receiver.AddConstant(broker()->isolate()->factory()->undefined_value());
arguments.push_back(receiver); arguments.push_back(receiver);
} }
environment()->ExportRegisterHints(first_reg, reg_count, arguments); environment()->ExportRegisterHints(first_reg, reg_count, &arguments);
ProcessCallOrConstruct(callee, base::nullopt, arguments, slot); ProcessCallOrConstruct(callee, base::nullopt, arguments, slot);
} }
...@@ -1601,7 +1600,7 @@ void SerializerForBackgroundCompilation::ProcessApiCall( ...@@ -1601,7 +1600,7 @@ void SerializerForBackgroundCompilation::ProcessApiCall(
} }
void SerializerForBackgroundCompilation::ProcessReceiverMapForApiCall( void SerializerForBackgroundCompilation::ProcessReceiverMapForApiCall(
FunctionTemplateInfoRef& target, Handle<Map> receiver) { FunctionTemplateInfoRef target, Handle<Map> receiver) {
if (receiver->is_access_check_needed()) { if (receiver->is_access_check_needed()) {
return; return;
} }
...@@ -1871,10 +1870,10 @@ void SerializerForBackgroundCompilation::VisitSwitchOnGeneratorState( ...@@ -1871,10 +1870,10 @@ void SerializerForBackgroundCompilation::VisitSwitchOnGeneratorState(
} }
void SerializerForBackgroundCompilation::Environment::ExportRegisterHints( void SerializerForBackgroundCompilation::Environment::ExportRegisterHints(
interpreter::Register first, size_t count, HintsVector& dst) { interpreter::Register first, size_t count, HintsVector* dst) {
const int reg_base = first.index(); const int reg_base = first.index();
for (int i = 0; i < static_cast<int>(count); ++i) { for (int i = 0; i < static_cast<int>(count); ++i) {
dst.push_back(register_hints(interpreter::Register(reg_base + i))); dst->push_back(register_hints(interpreter::Register(reg_base + i)));
} }
} }
...@@ -1888,7 +1887,7 @@ void SerializerForBackgroundCompilation::VisitConstruct( ...@@ -1888,7 +1887,7 @@ void SerializerForBackgroundCompilation::VisitConstruct(
const Hints& new_target = environment()->accumulator_hints(); const Hints& new_target = environment()->accumulator_hints();
HintsVector arguments(zone()); HintsVector arguments(zone());
environment()->ExportRegisterHints(first_reg, reg_count, arguments); environment()->ExportRegisterHints(first_reg, reg_count, &arguments);
ProcessCallOrConstruct(callee, new_target, arguments, slot); ProcessCallOrConstruct(callee, new_target, arguments, slot);
} }
...@@ -1903,7 +1902,7 @@ void SerializerForBackgroundCompilation::VisitConstructWithSpread( ...@@ -1903,7 +1902,7 @@ void SerializerForBackgroundCompilation::VisitConstructWithSpread(
const Hints& new_target = environment()->accumulator_hints(); const Hints& new_target = environment()->accumulator_hints();
HintsVector arguments(zone()); HintsVector arguments(zone());
environment()->ExportRegisterHints(first_reg, reg_count, arguments); environment()->ExportRegisterHints(first_reg, reg_count, &arguments);
ProcessCallOrConstruct(callee, new_target, arguments, slot, true); ProcessCallOrConstruct(callee, new_target, arguments, slot, true);
} }
......
...@@ -24,7 +24,7 @@ using Variable = CodeAssemblerVariable; ...@@ -24,7 +24,7 @@ using Variable = CodeAssemblerVariable;
Node* SmiTag(CodeAssembler& m, // NOLINT(runtime/references) Node* SmiTag(CodeAssembler& m, // NOLINT(runtime/references)
Node* value) { Node* value) {
int32_t constant_value; int32_t constant_value;
if (m.ToInt32Constant(value, constant_value) && if (m.ToInt32Constant(value, &constant_value) &&
Smi::IsValid(constant_value)) { Smi::IsValid(constant_value)) {
return m.SmiConstant(Smi::FromInt(constant_value)); return m.SmiConstant(Smi::FromInt(constant_value));
} }
...@@ -374,24 +374,24 @@ TEST(TestToConstant) { ...@@ -374,24 +374,24 @@ TEST(TestToConstant) {
int32_t value32; int32_t value32;
int64_t value64; int64_t value64;
Node* a = m.Int32Constant(5); Node* a = m.Int32Constant(5);
CHECK(m.ToInt32Constant(a, value32)); CHECK(m.ToInt32Constant(a, &value32));
CHECK(m.ToInt64Constant(a, value64)); CHECK(m.ToInt64Constant(a, &value64));
a = m.Int64Constant(static_cast<int64_t>(1) << 32); a = m.Int64Constant(static_cast<int64_t>(1) << 32);
CHECK(!m.ToInt32Constant(a, value32)); CHECK(!m.ToInt32Constant(a, &value32));
CHECK(m.ToInt64Constant(a, value64)); CHECK(m.ToInt64Constant(a, &value64));
a = m.Int64Constant(13); a = m.Int64Constant(13);
CHECK(m.ToInt32Constant(a, value32)); CHECK(m.ToInt32Constant(a, &value32));
CHECK(m.ToInt64Constant(a, value64)); CHECK(m.ToInt64Constant(a, &value64));
a = UndefinedConstant(m); a = UndefinedConstant(m);
CHECK(!m.ToInt32Constant(a, value32)); CHECK(!m.ToInt32Constant(a, &value32));
CHECK(!m.ToInt64Constant(a, value64)); CHECK(!m.ToInt64Constant(a, &value64));
a = UndefinedConstant(m); a = UndefinedConstant(m);
CHECK(!m.ToInt32Constant(a, value32)); CHECK(!m.ToInt32Constant(a, &value32));
CHECK(!m.ToInt64Constant(a, value64)); CHECK(!m.ToInt64Constant(a, &value64));
} }
TEST(DeferredCodePhiHints) { TEST(DeferredCodePhiHints) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment