Commit b6c152e9 authored by epertoso's avatar epertoso Committed by Commit bot

[stubs] Removes the BranchIf.*() methods from CodeAssembler, changes their uses to Branch().

BranchIf and helpers were introduced when exporting the schedule from the RawMachineAssembler was not ensuring that the CFG was well-form. These methods, that were used to introduce blocks to ensure edge-split form, are now unnecessary.

BUG=

Review-Url: https://codereview.chromium.org/2426923002
Cr-Commit-Position: refs/heads/master@{#40402}
parent 6c85285b
......@@ -1475,8 +1475,9 @@ void Builtins::Generate_ArrayIncludes(CodeStubAssembler* assembler) {
assembler->GotoIf(assembler->WordNotEqual(assembler->LoadMap(element_k),
heap_number_map),
&continue_loop);
assembler->BranchIfFloat64Equal(
search_num.value(), assembler->LoadHeapNumberValue(element_k),
assembler->Branch(
assembler->Float64Equal(search_num.value(),
assembler->LoadHeapNumberValue(element_k)),
&return_true, &continue_loop);
assembler->Bind(&continue_loop);
......@@ -1590,8 +1591,8 @@ void Builtins::Generate_ArrayIncludes(CodeStubAssembler* assembler) {
Node* element_k = assembler->LoadFixedDoubleArrayElement(
elements, index_var.value(), MachineType::Float64(), 0,
CodeStubAssembler::INTPTR_PARAMETERS);
assembler->BranchIfFloat64Equal(element_k, search_num.value(),
&return_true, &continue_loop);
assembler->Branch(assembler->Float64Equal(element_k, search_num.value()),
&return_true, &continue_loop);
assembler->Bind(&continue_loop);
index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
assembler->Goto(&not_nan_loop);
......@@ -1650,8 +1651,8 @@ void Builtins::Generate_ArrayIncludes(CodeStubAssembler* assembler) {
elements, index_var.value(), MachineType::Float64(), 0,
CodeStubAssembler::INTPTR_PARAMETERS, &continue_loop);
assembler->BranchIfFloat64Equal(element_k, search_num.value(),
&return_true, &continue_loop);
assembler->Branch(assembler->Float64Equal(element_k, search_num.value()),
&return_true, &continue_loop);
assembler->Bind(&continue_loop);
index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
assembler->Goto(&not_nan_loop);
......@@ -1916,8 +1917,9 @@ void Builtins::Generate_ArrayIndexOf(CodeStubAssembler* assembler) {
assembler->GotoIf(assembler->WordNotEqual(assembler->LoadMap(element_k),
heap_number_map),
&continue_loop);
assembler->BranchIfFloat64Equal(
search_num.value(), assembler->LoadHeapNumberValue(element_k),
assembler->Branch(
assembler->Float64Equal(search_num.value(),
assembler->LoadHeapNumberValue(element_k)),
&return_found, &continue_loop);
assembler->Bind(&continue_loop);
......@@ -2008,8 +2010,8 @@ void Builtins::Generate_ArrayIndexOf(CodeStubAssembler* assembler) {
Node* element_k = assembler->LoadFixedDoubleArrayElement(
elements, index_var.value(), MachineType::Float64(), 0,
CodeStubAssembler::INTPTR_PARAMETERS);
assembler->BranchIfFloat64Equal(element_k, search_num.value(),
&return_found, &continue_loop);
assembler->Branch(assembler->Float64Equal(element_k, search_num.value()),
&return_found, &continue_loop);
assembler->Bind(&continue_loop);
index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
assembler->Goto(&not_nan_loop);
......@@ -2049,8 +2051,8 @@ void Builtins::Generate_ArrayIndexOf(CodeStubAssembler* assembler) {
elements, index_var.value(), MachineType::Float64(), 0,
CodeStubAssembler::INTPTR_PARAMETERS, &continue_loop);
assembler->BranchIfFloat64Equal(element_k, search_num.value(),
&return_found, &continue_loop);
assembler->Branch(assembler->Float64Equal(element_k, search_num.value()),
&return_found, &continue_loop);
assembler->Bind(&continue_loop);
index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
assembler->Goto(&not_nan_loop);
......
......@@ -68,9 +68,10 @@ void Builtins::Generate_NumberIsInteger(CodeStubAssembler* assembler) {
Node* integer = assembler->Float64Trunc(number_value);
// Check if {number}s value matches the integer (ruling out the infinities).
assembler->BranchIfFloat64Equal(assembler->Float64Sub(number_value, integer),
assembler->Float64Constant(0.0), &return_true,
&return_false);
assembler->Branch(
assembler->Float64Equal(assembler->Float64Sub(number_value, integer),
assembler->Float64Constant(0.0)),
&return_true, &return_false);
assembler->Bind(&return_true);
assembler->Return(assembler->BooleanConstant(true));
......@@ -139,9 +140,10 @@ void Builtins::Generate_NumberIsSafeInteger(CodeStubAssembler* assembler) {
&return_false);
// Check if the {integer} value is in safe integer range.
assembler->BranchIfFloat64LessThanOrEqual(
assembler->Float64Abs(integer),
assembler->Float64Constant(kMaxSafeInteger), &return_true, &return_false);
assembler->Branch(assembler->Float64LessThanOrEqual(
assembler->Float64Abs(integer),
assembler->Float64Constant(kMaxSafeInteger)),
&return_true, &return_false);
assembler->Bind(&return_true);
assembler->Return(assembler->BooleanConstant(true));
......
......@@ -292,8 +292,8 @@ void GenerateStringRelationalComparison(CodeStubAssembler* assembler,
assembler->Goto(&loop);
assembler->Bind(&if_valueisnotsame);
assembler->BranchIf(assembler->Uint32LessThan(lhs_value, rhs_value),
&if_less, &if_greater);
assembler->Branch(assembler->Uint32LessThan(lhs_value, rhs_value),
&if_less, &if_greater);
}
assembler->Bind(&if_done);
......
......@@ -836,7 +836,7 @@ void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
// Check if the floating point {value} is neither 0.0, -0.0 nor NaN.
Node* zero = Float64Constant(0.0);
GotoIf(Float64LessThan(zero, value_value), if_true);
BranchIfFloat64LessThan(value_value, zero, if_true, if_false);
Branch(Float64LessThan(value_value, zero), if_true, if_false);
}
Bind(&if_valueisother);
......@@ -851,8 +851,8 @@ void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
value_map_bitfield, Int32Constant(1 << Map::kIsUndetectable));
// Check if the {value} is undetectable.
BranchIfWord32Equal(value_map_undetectable, Int32Constant(0), if_true,
if_false);
Branch(Word32Equal(value_map_undetectable, Int32Constant(0)), if_true,
if_false);
}
}
}
......@@ -2180,8 +2180,8 @@ Node* CodeStubAssembler::ChangeFloat64ToTagged(Node* value) {
Bind(&if_valueisequal);
{
GotoUnless(Word32Equal(value32, Int32Constant(0)), &if_valueisint32);
BranchIfInt32LessThan(Float64ExtractHighWord32(value), Int32Constant(0),
&if_valueisheapnumber, &if_valueisint32);
Branch(Int32LessThan(Float64ExtractHighWord32(value), Int32Constant(0)),
&if_valueisheapnumber, &if_valueisint32);
}
Bind(&if_valueisnotequal);
Goto(&if_valueisheapnumber);
......@@ -3670,7 +3670,7 @@ void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
Goto(if_keyisunique);
Bind(&if_hascachedindex);
var_index->Bind(BitFieldDecode<Name::ArrayIndexValueBits>(hash));
var_index->Bind(BitFieldDecodeWord<Name::ArrayIndexValueBits>(hash));
Goto(if_keyisindex);
}
......@@ -3994,8 +3994,8 @@ void CodeStubAssembler::LoadPropertyFromFastObject(Node* object, Node* map,
Label if_inobject(this), if_backing_store(this);
Variable var_double_value(this, MachineRepresentation::kFloat64);
Label rebox_double(this, &var_double_value);
BranchIfUintPtrLessThan(field_index, inobject_properties, &if_inobject,
&if_backing_store);
Branch(UintPtrLessThan(field_index, inobject_properties), &if_inobject,
&if_backing_store);
Bind(&if_inobject);
{
Comment("if_inobject");
......@@ -4005,9 +4005,9 @@ void CodeStubAssembler::LoadPropertyFromFastObject(Node* object, Node* map,
IntPtrConstant(kPointerSize));
Label if_double(this), if_tagged(this);
BranchIfWord32NotEqual(representation,
Int32Constant(Representation::kDouble), &if_tagged,
&if_double);
Branch(Word32NotEqual(representation,
Int32Constant(Representation::kDouble)),
&if_tagged, &if_double);
Bind(&if_tagged);
{
var_value->Bind(LoadObjectField(object, field_offset));
......@@ -4033,9 +4033,9 @@ void CodeStubAssembler::LoadPropertyFromFastObject(Node* object, Node* map,
Node* value = LoadFixedArrayElement(properties, field_index);
Label if_double(this), if_tagged(this);
BranchIfWord32NotEqual(representation,
Int32Constant(Representation::kDouble), &if_tagged,
&if_double);
Branch(Word32NotEqual(representation,
Int32Constant(Representation::kDouble)),
&if_tagged, &if_double);
Bind(&if_tagged);
{
var_value->Bind(value);
......@@ -6199,8 +6199,8 @@ void CodeStubAssembler::CheckEnumCache(Node* receiver, Label* use_cache,
Node* invalid_enum_cache_sentinel =
SmiConstant(Smi::FromInt(kInvalidEnumCacheSentinel));
Node* enum_length = EnumLength(current_map.value());
BranchIfWordEqual(enum_length, invalid_enum_cache_sentinel, use_runtime,
&loop);
Branch(WordEqual(enum_length, invalid_enum_cache_sentinel), use_runtime,
&loop);
}
// Check that there are no elements. |current_js_object| contains
......@@ -6211,24 +6211,24 @@ void CodeStubAssembler::CheckEnumCache(Node* receiver, Label* use_cache,
Node* elements = LoadElements(current_js_object.value());
Node* empty_fixed_array = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
// Check that there are no elements.
BranchIfWordEqual(elements, empty_fixed_array, &if_no_elements,
&if_elements);
Branch(WordEqual(elements, empty_fixed_array), &if_no_elements,
&if_elements);
Bind(&if_elements);
{
// Second chance, the object may be using the empty slow element
// dictionary.
Node* slow_empty_dictionary =
LoadRoot(Heap::kEmptySlowElementDictionaryRootIndex);
BranchIfWordNotEqual(elements, slow_empty_dictionary, use_runtime,
&if_no_elements);
Branch(WordNotEqual(elements, slow_empty_dictionary), use_runtime,
&if_no_elements);
}
Bind(&if_no_elements);
{
// Update map prototype.
current_js_object.Bind(LoadMapPrototype(current_map.value()));
BranchIfWordEqual(current_js_object.value(), NullConstant(), use_cache,
&next);
Branch(WordEqual(current_js_object.value(), NullConstant()), use_cache,
&next);
}
}
......@@ -6238,7 +6238,7 @@ void CodeStubAssembler::CheckEnumCache(Node* receiver, Label* use_cache,
current_map.Bind(LoadMap(current_js_object.value()));
Node* enum_length = EnumLength(current_map.value());
Node* zero_constant = SmiConstant(Smi::kZero);
BranchIf(WordEqual(enum_length, zero_constant), &loop, use_runtime);
Branch(WordEqual(enum_length, zero_constant), &loop, use_runtime);
}
}
......@@ -6321,7 +6321,7 @@ void CodeStubAssembler::BuildFastLoop(
// to force the loop header check at the end of the loop and branch forward to
// it from the pre-header). The extra branch is slower in the case that the
// loop actually iterates.
BranchIf(WordEqual(var.value(), end_index), &after_loop, &loop);
Branch(WordEqual(var.value(), end_index), &after_loop, &loop);
Bind(&loop);
{
if (mode == IndexAdvanceMode::kPre) {
......@@ -6331,7 +6331,7 @@ void CodeStubAssembler::BuildFastLoop(
if (mode == IndexAdvanceMode::kPost) {
var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment)));
}
BranchIf(WordNotEqual(var.value(), end_index), &loop, &after_loop);
Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop);
}
Bind(&after_loop);
}
......@@ -6484,16 +6484,16 @@ void CodeStubAssembler::BranchIfNumericRelationalComparison(
// Perform a fast floating point comparison.
switch (mode) {
case kLessThan:
BranchIfFloat64LessThan(lhs, rhs, if_true, if_false);
Branch(Float64LessThan(lhs, rhs), if_true, if_false);
break;
case kLessThanOrEqual:
BranchIfFloat64LessThanOrEqual(lhs, rhs, if_true, if_false);
Branch(Float64LessThanOrEqual(lhs, rhs), if_true, if_false);
break;
case kGreaterThan:
BranchIfFloat64GreaterThan(lhs, rhs, if_true, if_false);
Branch(Float64GreaterThan(lhs, rhs), if_true, if_false);
break;
case kGreaterThanOrEqual:
BranchIfFloat64GreaterThanOrEqual(lhs, rhs, if_true, if_false);
Branch(Float64GreaterThanOrEqual(lhs, rhs), if_true, if_false);
break;
}
}
......@@ -6799,17 +6799,17 @@ compiler::Node* CodeStubAssembler::RelationalComparison(
// Perform a fast floating point comparison.
switch (mode) {
case kLessThan:
BranchIfFloat64LessThan(lhs, rhs, &return_true, &return_false);
Branch(Float64LessThan(lhs, rhs), &return_true, &return_false);
break;
case kLessThanOrEqual:
BranchIfFloat64LessThanOrEqual(lhs, rhs, &return_true, &return_false);
Branch(Float64LessThanOrEqual(lhs, rhs), &return_true, &return_false);
break;
case kGreaterThan:
BranchIfFloat64GreaterThan(lhs, rhs, &return_true, &return_false);
Branch(Float64GreaterThan(lhs, rhs), &return_true, &return_false);
break;
case kGreaterThanOrEqual:
BranchIfFloat64GreaterThanOrEqual(lhs, rhs, &return_true,
&return_false);
Branch(Float64GreaterThanOrEqual(lhs, rhs), &return_true,
&return_false);
break;
}
}
......@@ -7223,10 +7223,11 @@ compiler::Node* CodeStubAssembler::Equal(ResultMode mode, compiler::Node* lhs,
// undetectable (i.e. either also Null or Undefined or some
// undetectable JSReceiver).
Node* rhs_bitfield = LoadMapBitField(rhs_map);
BranchIfWord32Equal(
Word32And(rhs_bitfield,
Int32Constant(1 << Map::kIsUndetectable)),
Int32Constant(0), &if_notequal, &if_equal);
Branch(Word32Equal(
Word32And(rhs_bitfield,
Int32Constant(1 << Map::kIsUndetectable)),
Int32Constant(0)),
&if_notequal, &if_equal);
}
}
......@@ -7321,20 +7322,21 @@ compiler::Node* CodeStubAssembler::Equal(ResultMode mode, compiler::Node* lhs,
Label if_rhsisundetectable(this),
if_rhsisnotundetectable(this, Label::kDeferred);
Node* rhs_bitfield = LoadMapBitField(rhs_map);
BranchIfWord32Equal(
Word32And(rhs_bitfield,
Int32Constant(1 << Map::kIsUndetectable)),
Int32Constant(0), &if_rhsisnotundetectable,
&if_rhsisundetectable);
Branch(Word32Equal(
Word32And(rhs_bitfield,
Int32Constant(1 << Map::kIsUndetectable)),
Int32Constant(0)),
&if_rhsisnotundetectable, &if_rhsisundetectable);
Bind(&if_rhsisundetectable);
{
// Check if {lhs} is an undetectable JSReceiver.
Node* lhs_bitfield = LoadMapBitField(lhs_map);
BranchIfWord32Equal(
Word32And(lhs_bitfield,
Int32Constant(1 << Map::kIsUndetectable)),
Int32Constant(0), &if_notequal, &if_equal);
Branch(Word32Equal(
Word32And(lhs_bitfield,
Int32Constant(1 << Map::kIsUndetectable)),
Int32Constant(0)),
&if_notequal, &if_equal);
}
Bind(&if_rhsisnotundetectable);
......@@ -7367,7 +7369,7 @@ compiler::Node* CodeStubAssembler::Equal(ResultMode mode, compiler::Node* lhs,
Node* rhs = var_fcmp_rhs.value();
// Perform a fast floating point comparison.
BranchIfFloat64Equal(lhs, rhs, &if_equal, &if_notequal);
Branch(Float64Equal(lhs, rhs), &if_equal, &if_notequal);
}
Bind(&if_equal);
......@@ -7487,7 +7489,7 @@ compiler::Node* CodeStubAssembler::StrictEqual(ResultMode mode,
Node* rhs_value = SmiToFloat64(rhs);
// Perform a floating point comparison of {lhs} and {rhs}.
BranchIfFloat64Equal(lhs_value, rhs_value, &if_equal, &if_notequal);
Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
}
Bind(&if_rhsisnotsmi);
......@@ -7507,7 +7509,7 @@ compiler::Node* CodeStubAssembler::StrictEqual(ResultMode mode,
Node* rhs_value = LoadHeapNumberValue(rhs);
// Perform a floating point comparison of {lhs} and {rhs}.
BranchIfFloat64Equal(lhs_value, rhs_value, &if_equal, &if_notequal);
Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
}
Bind(&if_rhsisnotnumber);
......@@ -7613,7 +7615,7 @@ compiler::Node* CodeStubAssembler::StrictEqual(ResultMode mode,
Node* rhs_value = LoadHeapNumberValue(rhs);
// Perform a floating point comparison of {lhs} and {rhs}.
BranchIfFloat64Equal(lhs_value, rhs_value, &if_equal, &if_notequal);
Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
}
Bind(&if_rhsisnotnumber);
......
......@@ -167,22 +167,22 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
void BranchIfSmiEqual(compiler::Node* a, compiler::Node* b, Label* if_true,
Label* if_false) {
BranchIf(SmiEqual(a, b), if_true, if_false);
Branch(SmiEqual(a, b), if_true, if_false);
}
void BranchIfSmiLessThan(compiler::Node* a, compiler::Node* b, Label* if_true,
Label* if_false) {
BranchIf(SmiLessThan(a, b), if_true, if_false);
Branch(SmiLessThan(a, b), if_true, if_false);
}
void BranchIfSmiLessThanOrEqual(compiler::Node* a, compiler::Node* b,
Label* if_true, Label* if_false) {
BranchIf(SmiLessThanOrEqual(a, b), if_true, if_false);
Branch(SmiLessThanOrEqual(a, b), if_true, if_false);
}
void BranchIfFloat64IsNaN(compiler::Node* value, Label* if_true,
Label* if_false) {
BranchIfFloat64Equal(value, value, if_false, if_true);
Branch(Float64Equal(value, value), if_false, if_true);
}
// Branches to {if_true} if ToBoolean applied to {value} yields true,
......
......@@ -2629,8 +2629,8 @@ compiler::Node* FastCloneShallowArrayStub::Generate(
assembler->Comment("fast double elements path");
if (FLAG_debug_code) {
Label correct_elements_map(assembler), abort(assembler, Label::kDeferred);
assembler->BranchIf(assembler->IsFixedDoubleArrayMap(elements_map),
&correct_elements_map, &abort);
assembler->Branch(assembler->IsFixedDoubleArrayMap(elements_map),
&correct_elements_map, &abort);
assembler->Bind(&abort);
{
......
......@@ -328,15 +328,6 @@ Node* CodeAssembler::Projection(int index, Node* value) {
return raw_assembler_->Projection(index, value);
}
void CodeAssembler::BranchIf(Node* condition, Label* if_true, Label* if_false) {
Label if_condition_is_true(this), if_condition_is_false(this);
Branch(condition, &if_condition_is_true, &if_condition_is_false);
Bind(&if_condition_is_true);
Goto(if_true);
Bind(&if_condition_is_false);
Goto(if_false);
}
void CodeAssembler::GotoIfException(Node* node, Label* if_exception,
Variable* exception_var) {
Label success(this), exception(this, Label::kDeferred);
......
......@@ -444,16 +444,6 @@ class V8_EXPORT_PRIVATE CodeAssembler {
void GotoIfException(Node* node, Label* if_exception,
Variable* exception_var = nullptr);
// Branching helpers.
void BranchIf(Node* condition, Label* if_true, Label* if_false);
#define BRANCH_HELPER(name) \
void BranchIf##name(Node* a, Node* b, Label* if_true, Label* if_false) { \
BranchIf(name(a, b), if_true, if_false); \
}
CODE_ASSEMBLER_COMPARE_BINARY_OP_LIST(BRANCH_HELPER)
#undef BRANCH_HELPER
// Helpers which delegate to RawMachineAssembler.
Factory* factory() const;
Isolate* isolate() const;
......
......@@ -97,7 +97,7 @@ Node* InterpreterAssembler::GetContextAtDepth(Node* context, Node* depth) {
Label context_search(this, 2, context_search_loop_variables);
// Fast path if the depth is 0.
BranchIfWord32Equal(depth, Int32Constant(0), &context_found, &context_search);
Branch(Word32Equal(depth, Int32Constant(0)), &context_found, &context_search);
// Loop until the depth is 0.
Bind(&context_search);
......@@ -106,8 +106,8 @@ Node* InterpreterAssembler::GetContextAtDepth(Node* context, Node* depth) {
cur_context.Bind(
LoadContextSlot(cur_context.value(), Context::PREVIOUS_INDEX));
BranchIfWord32Equal(cur_depth.value(), Int32Constant(0), &context_found,
&context_search);
Branch(Word32Equal(cur_depth.value(), Int32Constant(0)), &context_found,
&context_search);
}
Bind(&context_found);
......@@ -573,7 +573,7 @@ Node* InterpreterAssembler::CallJSWithFeedback(Node* function, Node* context,
Node* feedback_element = LoadFixedArrayElement(type_feedback_vector, slot_id);
Node* feedback_value = LoadWeakCellValue(feedback_element);
Node* is_monomorphic = WordEqual(function, feedback_value);
BranchIf(is_monomorphic, &handle_monomorphic, &extra_checks);
Branch(is_monomorphic, &handle_monomorphic, &extra_checks);
Bind(&handle_monomorphic);
{
......@@ -604,7 +604,7 @@ Node* InterpreterAssembler::CallJSWithFeedback(Node* function, Node* context,
Node* is_megamorphic = WordEqual(
feedback_element,
HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())));
BranchIf(is_megamorphic, &call, &check_allocation_site);
Branch(is_megamorphic, &call, &check_allocation_site);
Bind(&check_allocation_site);
{
......@@ -763,7 +763,7 @@ Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context,
Node* instance_type = LoadInstanceType(constructor);
Node* is_js_function =
WordEqual(instance_type, Int32Constant(JS_FUNCTION_TYPE));
BranchIf(is_js_function, &js_function, &call_construct);
Branch(is_js_function, &js_function, &call_construct);
Bind(&js_function);
{
......@@ -778,7 +778,7 @@ Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context,
LoadFixedArrayElement(type_feedback_vector, slot_id);
Node* feedback_value = LoadWeakCellValue(feedback_element);
Node* is_monomorphic = WordEqual(constructor, feedback_value);
BranchIf(is_monomorphic, &call_construct_function, &extra_checks);
Branch(is_monomorphic, &call_construct_function, &extra_checks);
Bind(&extra_checks);
{
......@@ -801,7 +801,7 @@ Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context,
// monomorphic.
Comment("check if weak cell is cleared");
Node* is_smi = TaggedIsSmi(feedback_value);
BranchIf(is_smi, &initialize, &mark_megamorphic);
Branch(is_smi, &initialize, &mark_megamorphic);
}
Bind(&check_allocation_site);
......@@ -817,8 +817,8 @@ Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context,
LoadFixedArrayElement(LoadNativeContext(context),
Int32Constant(Context::ARRAY_FUNCTION_INDEX));
Node* is_array_function = WordEqual(context_slot, constructor);
BranchIf(is_array_function, &set_alloc_feedback_and_call,
&mark_megamorphic);
Branch(is_array_function, &set_alloc_feedback_and_call,
&mark_megamorphic);
}
Bind(&set_alloc_feedback_and_call);
......@@ -833,7 +833,7 @@ Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context,
Comment("check if uninitialized");
Node* is_uninitialized = WordEqual(
feedback_element, LoadRoot(Heap::kuninitialized_symbolRootIndex));
BranchIf(is_uninitialized, &initialize, &mark_megamorphic);
Branch(is_uninitialized, &initialize, &mark_megamorphic);
}
Bind(&initialize);
......@@ -845,7 +845,7 @@ Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context,
LoadFixedArrayElement(LoadNativeContext(context),
Int32Constant(Context::ARRAY_FUNCTION_INDEX));
Node* is_array_function = WordEqual(context_slot, constructor);
BranchIf(is_array_function, &create_allocation_site, &create_weak_cell);
Branch(is_array_function, &create_allocation_site, &create_weak_cell);
Bind(&create_allocation_site);
{
......@@ -989,7 +989,7 @@ Node* InterpreterAssembler::Jump(Node* delta) {
void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) {
Label match(this), no_match(this);
BranchIf(condition, &match, &no_match);
Branch(condition, &match, &no_match);
Bind(&match);
Jump(delta);
Bind(&no_match);
......@@ -1022,7 +1022,7 @@ Node* InterpreterAssembler::StarDispatchLookahead(Node* target_bytecode) {
Node* star_bytecode = IntPtrConstant(static_cast<int>(Bytecode::kStar));
Node* is_star = WordEqual(target_bytecode, star_bytecode);
BranchIf(is_star, &do_inline_star, &done);
Branch(is_star, &do_inline_star, &done);
Bind(&do_inline_star);
{
......@@ -1223,7 +1223,7 @@ void InterpreterAssembler::Abort(BailoutReason bailout_reason) {
void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs,
BailoutReason bailout_reason) {
Label ok(this), abort(this, Label::kDeferred);
BranchIfWordEqual(lhs, rhs, &ok, &abort);
Branch(WordEqual(lhs, rhs), &ok, &abort);
Bind(&abort);
Abort(bailout_reason);
......@@ -1253,7 +1253,7 @@ void InterpreterAssembler::TraceBytecodeDispatch(Node* target_bytecode) {
Node* counter_reached_max = WordEqual(
old_counter, IntPtrConstant(std::numeric_limits<uintptr_t>::max()));
BranchIf(counter_reached_max, &counter_saturated, &counter_ok);
Branch(counter_reached_max, &counter_saturated, &counter_ok);
Bind(&counter_ok);
{
......
......@@ -1222,7 +1222,7 @@ void Interpreter::DoAddSmi(InterpreterAssembler* assembler) {
// {right} is known to be a Smi.
// Check if the {left} is a Smi take the fast path.
__ BranchIf(__ TaggedIsSmi(left), &fastpath, &slowpath);
__ Branch(__ TaggedIsSmi(left), &fastpath, &slowpath);
__ Bind(&fastpath);
{
// Try fast Smi addition first.
......@@ -1232,7 +1232,7 @@ void Interpreter::DoAddSmi(InterpreterAssembler* assembler) {
// Check if the Smi additon overflowed.
Label if_notoverflow(assembler);
__ BranchIf(overflow, &slowpath, &if_notoverflow);
__ Branch(overflow, &slowpath, &if_notoverflow);
__ Bind(&if_notoverflow);
{
__ UpdateFeedback(__ Int32Constant(BinaryOperationFeedback::kSignedSmall),
......@@ -1276,7 +1276,7 @@ void Interpreter::DoSubSmi(InterpreterAssembler* assembler) {
// {right} is known to be a Smi.
// Check if the {left} is a Smi take the fast path.
__ BranchIf(__ TaggedIsSmi(left), &fastpath, &slowpath);
__ Branch(__ TaggedIsSmi(left), &fastpath, &slowpath);
__ Bind(&fastpath);
{
// Try fast Smi subtraction first.
......@@ -1286,7 +1286,7 @@ void Interpreter::DoSubSmi(InterpreterAssembler* assembler) {
// Check if the Smi subtraction overflowed.
Label if_notoverflow(assembler);
__ BranchIf(overflow, &slowpath, &if_notoverflow);
__ Branch(overflow, &slowpath, &if_notoverflow);
__ Bind(&if_notoverflow);
{
__ UpdateFeedback(__ Int32Constant(BinaryOperationFeedback::kSignedSmall),
......@@ -1530,7 +1530,7 @@ void Interpreter::DoLogicalNot(InterpreterAssembler* assembler) {
Label if_true(assembler), if_false(assembler), end(assembler);
Node* true_value = __ BooleanConstant(true);
Node* false_value = __ BooleanConstant(false);
__ BranchIfWordEqual(value, true_value, &if_true, &if_false);
__ Branch(__ WordEqual(value, true_value), &if_true, &if_false);
__ Bind(&if_true);
{
result.Bind(false_value);
......@@ -2064,7 +2064,7 @@ void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) {
Node* use_fast_shallow_clone = __ Word32And(
bytecode_flags,
__ Int32Constant(CreateArrayLiteralFlags::FastShallowCloneBit::kMask));
__ BranchIf(use_fast_shallow_clone, &fast_shallow_clone, &call_runtime);
__ Branch(use_fast_shallow_clone, &fast_shallow_clone, &call_runtime);
__ Bind(&fast_shallow_clone);
{
......@@ -2109,7 +2109,7 @@ void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) {
Node* fast_clone_properties_count =
__ BitFieldDecode<CreateObjectLiteralFlags::FastClonePropertiesCountBits>(
bytecode_flags);
__ BranchIf(fast_clone_properties_count, &if_fast_clone, &if_not_fast_clone);
__ Branch(fast_clone_properties_count, &if_fast_clone, &if_not_fast_clone);
__ Bind(&if_fast_clone);
{
......@@ -2256,7 +2256,7 @@ void Interpreter::DoCreateMappedArguments(InterpreterAssembler* assembler) {
Node* duplicate_parameters_bit = __ Int32Constant(
1 << SharedFunctionInfo::kHasDuplicateParametersBitWithinByte);
Node* compare = __ Word32And(compiler_hints, duplicate_parameters_bit);
__ BranchIf(compare, &if_duplicate_parameters, &if_not_duplicate_parameters);
__ Branch(compare, &if_duplicate_parameters, &if_not_duplicate_parameters);
__ Bind(&if_not_duplicate_parameters);
{
......@@ -2312,7 +2312,7 @@ void Interpreter::DoStackCheck(InterpreterAssembler* assembler) {
Label ok(assembler), stack_check_interrupt(assembler, Label::kDeferred);
Node* interrupt = __ StackCheckTriggeredInterrupt();
__ BranchIf(interrupt, &stack_check_interrupt, &ok);
__ Branch(interrupt, &stack_check_interrupt, &ok);
__ Bind(&ok);
__ Dispatch();
......@@ -2485,7 +2485,7 @@ void Interpreter::DoForInNext(InterpreterAssembler* assembler) {
// Check if we can use the for-in fast path potentially using the enum cache.
Label if_fast(assembler), if_slow(assembler, Label::kDeferred);
Node* receiver_map = __ LoadObjectField(receiver, HeapObject::kMapOffset);
__ BranchIfWordEqual(receiver_map, cache_type, &if_fast, &if_slow);
__ Branch(__ WordEqual(receiver_map, cache_type), &if_fast, &if_slow);
__ Bind(&if_fast);
{
// Enum cache in use for {receiver}, the {key} is definitely valid.
......@@ -2522,7 +2522,7 @@ void Interpreter::DoForInContinue(InterpreterAssembler* assembler) {
// Check if {index} is at {cache_length} already.
Label if_true(assembler), if_false(assembler), end(assembler);
__ BranchIfWordEqual(index, cache_length, &if_true, &if_false);
__ Branch(__ WordEqual(index, cache_length), &if_true, &if_false);
__ Bind(&if_true);
{
__ SetAccumulator(__ BooleanConstant(false));
......@@ -2593,7 +2593,7 @@ void Interpreter::DoSuspendGenerator(InterpreterAssembler* assembler) {
STATIC_ASSERT(StepFrame > StepNext);
STATIC_ASSERT(LastStepAction == StepFrame);
Node* step_next = __ Int32Constant(StepNext);
__ BranchIfInt32LessThanOrEqual(step_next, step_action, &if_stepping, &ok);
__ Branch(__ Int32LessThanOrEqual(step_next, step_action), &if_stepping, &ok);
__ Bind(&ok);
Node* array =
......
......@@ -1257,12 +1257,12 @@ TEST(TryProbeStubCache) {
m.TryProbeStubCache(&stub_cache, receiver, name, &if_handler, &var_handler,
&if_miss);
m.Bind(&if_handler);
m.BranchIfWordEqual(expected_handler, var_handler.value(), &passed,
&failed);
m.Branch(m.WordEqual(expected_handler, var_handler.value()), &passed,
&failed);
m.Bind(&if_miss);
m.BranchIfWordEqual(expected_handler, m.IntPtrConstant(0), &passed,
&failed);
m.Branch(m.WordEqual(expected_handler, m.IntPtrConstant(0)), &passed,
&failed);
m.Bind(&passed);
m.Return(m.BooleanConstant(true));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment