Commit 06515b6f authored by sgjesse@chromium.org's avatar sgjesse@chromium.org

ARM: Change BranchOnSmi/BranchOnNotSmi to JumpIfSmi/JumpIfNotSmi

Review URL: http://codereview.chromium.org/6272019

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@6477 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 12c74ba0
......@@ -566,7 +566,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
// if it's a string already before calling the conversion builtin.
Label convert_argument;
__ bind(&not_cached);
__ BranchOnSmi(r0, &convert_argument);
__ JumpIfSmi(r0, &convert_argument);
// Is it a String?
__ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
......
......@@ -452,7 +452,7 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
Label* not_number) {
Label is_smi, done;
__ BranchOnSmi(object, &is_smi);
__ JumpIfSmi(object, &is_smi);
__ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number);
// Handle loading a double from a heap number.
......@@ -984,7 +984,7 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
Label is_smi;
Label load_result_from_cache;
if (!object_is_smi) {
__ BranchOnSmi(object, &is_smi);
__ JumpIfSmi(object, &is_smi);
if (CpuFeatures::IsSupported(VFP3)) {
CpuFeatures::Scope scope(VFP3);
__ CheckMap(object,
......@@ -1010,7 +1010,7 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
Register probe = mask;
__ ldr(probe,
FieldMemOperand(scratch1, FixedArray::kHeaderSize));
__ BranchOnSmi(probe, not_found);
__ JumpIfSmi(probe, not_found);
__ sub(scratch2, object, Operand(kHeapObjectTag));
__ vldr(d0, scratch2, HeapNumber::kValueOffset);
__ sub(probe, probe, Operand(kHeapObjectTag));
......@@ -2109,7 +2109,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
Label not_smi;
if (ShouldGenerateSmiCode() && specialized_on_rhs_) {
Label lhs_is_unsuitable;
__ BranchOnNotSmi(lhs, &not_smi);
__ JumpIfNotSmi(lhs, &not_smi);
if (IsPowerOf2(constant_rhs_)) {
if (op_ == Token::MOD) {
__ and_(rhs,
......@@ -2708,12 +2708,12 @@ void TypeRecordingBinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
Label call_runtime;
// Check if first argument is a string.
__ BranchOnSmi(left, &call_runtime);
__ JumpIfSmi(left, &call_runtime);
__ CompareObjectType(left, r2, r2, FIRST_NONSTRING_TYPE);
__ b(ge, &call_runtime);
// First argument is a a string, test second.
__ BranchOnSmi(right, &call_runtime);
__ JumpIfSmi(right, &call_runtime);
__ CompareObjectType(right, r2, r2, FIRST_NONSTRING_TYPE);
__ b(ge, &call_runtime);
......@@ -2759,7 +2759,7 @@ void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation(
Register overwritable_operand = mode_ == OVERWRITE_LEFT ? r1 : r0;
// If the overwritable operand is already an object, we skip the
// allocation of a heap number.
__ BranchOnNotSmi(overwritable_operand, &skip_allocation);
__ JumpIfNotSmi(overwritable_operand, &skip_allocation);
// Allocate a heap number for the result.
__ AllocateHeapNumber(
result, scratch1, scratch2, heap_number_map, gc_required);
......@@ -2789,7 +2789,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
if (CpuFeatures::IsSupported(VFP3)) {
// Load argument and check if it is a smi.
__ BranchOnNotSmi(r0, &input_not_smi);
__ JumpIfNotSmi(r0, &input_not_smi);
CpuFeatures::Scope scope(VFP3);
// Input is a smi. Convert to double and load the low and high words
......@@ -2943,7 +2943,7 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
} else if (op_ == Token::BIT_NOT) {
if (include_smi_code_) {
Label non_smi;
__ BranchOnNotSmi(r0, &non_smi);
__ JumpIfNotSmi(r0, &non_smi);
__ mvn(r0, Operand(r0));
// Bit-clear inverted smi-tag.
__ bic(r0, r0, Operand(kSmiTagMask));
......@@ -3472,7 +3472,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
}
// Check that the left hand is a JS object and load map.
__ BranchOnSmi(object, &not_js_object);
__ JumpIfSmi(object, &not_js_object);
__ IsObjectJSObjectType(object, map, scratch, &not_js_object);
// If there is a call site cache don't look in the global cache, but do the
......@@ -3495,7 +3495,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ TryGetFunctionPrototype(function, prototype, scratch, &slow);
// Check that the function prototype is a JS object.
__ BranchOnSmi(prototype, &slow);
__ JumpIfSmi(prototype, &slow);
__ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
// Update the global instanceof or call site inlined cache with the current
......@@ -3575,7 +3575,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ bind(&not_js_object);
// Before null, smi and string value checks, check that the rhs is a function
// as for a non-function rhs an exception needs to be thrown.
__ BranchOnSmi(function, &slow);
__ JumpIfSmi(function, &slow);
__ CompareObjectType(function, scratch2, scratch, JS_FUNCTION_TYPE);
__ b(ne, &slow);
......@@ -3587,7 +3587,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ bind(&object_not_null);
// Smi values are not instances of anything.
__ BranchOnNotSmi(object, &object_not_null_or_smi);
__ JumpIfNotSmi(object, &object_not_null_or_smi);
__ mov(r0, Operand(Smi::FromInt(1)));
__ Ret(HasArgsInRegisters() ? 0 : 2);
......@@ -3631,7 +3631,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
// Check that the key is a smi.
Label slow;
__ BranchOnNotSmi(r1, &slow);
__ JumpIfNotSmi(r1, &slow);
// Check if the calling frame is an arguments adaptor frame.
Label adaptor;
......@@ -4213,7 +4213,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ ldr(r1, MemOperand(sp, argc_ * kPointerSize));
// Check if receiver is a smi (which is a number value).
__ BranchOnSmi(r1, &receiver_is_value);
__ JumpIfSmi(r1, &receiver_is_value);
// Check if the receiver is a valid JS object.
__ CompareObjectType(r1, r2, r2, FIRST_JS_OBJECT_TYPE);
......@@ -4236,7 +4236,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// Check that the function is really a JavaScript function.
// r1: pushed function (to be verified)
__ BranchOnSmi(r1, &slow);
__ JumpIfSmi(r1, &slow);
// Get the map of the function object.
__ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
__ b(ne, &slow);
......@@ -4340,7 +4340,7 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
Label got_char_code;
// If the receiver is a smi trigger the non-string case.
__ BranchOnSmi(object_, receiver_not_string_);
__ JumpIfSmi(object_, receiver_not_string_);
// Fetch the instance type of the receiver into result register.
__ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
......@@ -4350,7 +4350,7 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
__ b(ne, receiver_not_string_);
// If the index is non-smi trigger the non-smi case.
__ BranchOnNotSmi(index_, &index_not_smi_);
__ JumpIfNotSmi(index_, &index_not_smi_);
// Put smi-tagged index into scratch register.
__ mov(scratch_, index_);
......@@ -4447,7 +4447,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
__ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
call_helper.AfterCall(masm);
// If index is still not a smi, it must be out of range.
__ BranchOnNotSmi(scratch_, index_out_of_range_);
__ JumpIfNotSmi(scratch_, index_out_of_range_);
// Otherwise, return to the fast path.
__ jmp(&got_smi_index_);
......
......@@ -1762,7 +1762,7 @@ void CodeGenerator::CallApplyLazy(Expression* applicand,
// sp[2]: applicand.
// Check that the receiver really is a JavaScript object.
__ BranchOnSmi(receiver_reg, &build_args);
__ JumpIfSmi(receiver_reg, &build_args);
// We allow all JSObjects including JSFunctions. As long as
// JS_FUNCTION_TYPE is the last instance type and it is right
// after LAST_JS_OBJECT_TYPE, we do not have to check the upper
......@@ -1774,7 +1774,7 @@ void CodeGenerator::CallApplyLazy(Expression* applicand,
// Check that applicand.apply is Function.prototype.apply.
__ ldr(r0, MemOperand(sp, kPointerSize));
__ BranchOnSmi(r0, &build_args);
__ JumpIfSmi(r0, &build_args);
__ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
__ b(ne, &build_args);
Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
......@@ -1785,7 +1785,7 @@ void CodeGenerator::CallApplyLazy(Expression* applicand,
// Check that applicand is a function.
__ ldr(r1, MemOperand(sp, 2 * kPointerSize));
__ BranchOnSmi(r1, &build_args);
__ JumpIfSmi(r1, &build_args);
__ CompareObjectType(r1, r2, r3, JS_FUNCTION_TYPE);
__ b(ne, &build_args);
......@@ -4618,8 +4618,8 @@ void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
ASSERT(runtime.entry_frame() == NULL);
runtime.set_entry_frame(frame_);
__ BranchOnNotSmi(exponent, &exponent_nonsmi);
__ BranchOnNotSmi(base, &base_nonsmi);
__ JumpIfNotSmi(exponent, &exponent_nonsmi);
__ JumpIfNotSmi(base, &base_nonsmi);
heap_number_map = r6;
__ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
......
......@@ -817,7 +817,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// Convert the object to a JS object.
Label convert, done_convert;
__ BranchOnSmi(r0, &convert);
__ JumpIfSmi(r0, &convert);
__ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
__ b(hs, &done_convert);
__ bind(&convert);
......@@ -2135,7 +2135,7 @@ void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
__ BranchOnSmi(r0, if_false);
__ JumpIfSmi(r0, if_false);
__ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(r0, ip);
__ b(eq, if_true);
......@@ -2167,7 +2167,7 @@ void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
__ BranchOnSmi(r0, if_false);
__ JumpIfSmi(r0, if_false);
__ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(ge, if_true, if_false, fall_through);
......@@ -2188,7 +2188,7 @@ void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
__ BranchOnSmi(r0, if_false);
__ JumpIfSmi(r0, if_false);
__ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
__ tst(r1, Operand(1 << Map::kIsUndetectable));
......@@ -2234,7 +2234,7 @@ void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
__ BranchOnSmi(r0, if_false);
__ JumpIfSmi(r0, if_false);
__ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(eq, if_true, if_false, fall_through);
......@@ -2255,7 +2255,7 @@ void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
__ BranchOnSmi(r0, if_false);
__ JumpIfSmi(r0, if_false);
__ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(eq, if_true, if_false, fall_through);
......@@ -2276,7 +2276,7 @@ void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
__ BranchOnSmi(r0, if_false);
__ JumpIfSmi(r0, if_false);
__ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(eq, if_true, if_false, fall_through);
......@@ -2383,7 +2383,7 @@ void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
VisitForAccumulatorValue(args->at(0));
// If the object is a smi, we return null.
__ BranchOnSmi(r0, &null);
__ JumpIfSmi(r0, &null);
// Check that the object is a JS object but take special care of JS
// functions to make sure they have 'Function' as their class.
......@@ -2534,7 +2534,7 @@ void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) {
Label done;
// If the object is a smi return the object.
__ BranchOnSmi(r0, &done);
__ JumpIfSmi(r0, &done);
// If the object is not a value type, return the object.
__ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
__ b(ne, &done);
......@@ -2564,7 +2564,7 @@ void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
Label done;
// If the object is a smi, return the value.
__ BranchOnSmi(r1, &done);
__ JumpIfSmi(r1, &done);
// If the object is not a value type, return the value.
__ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
......@@ -3087,7 +3087,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
bool inline_smi_code = ShouldInlineSmiCase(expr->op());
if (inline_smi_code) {
Label call_stub;
__ BranchOnNotSmi(r0, &call_stub);
__ JumpIfNotSmi(r0, &call_stub);
__ mvn(r0, Operand(r0));
// Bit-clear inverted smi-tag.
__ bic(r0, r0, Operand(kSmiTagMask));
......@@ -3174,7 +3174,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// Call ToNumber only if operand is not a smi.
Label no_conversion;
__ BranchOnSmi(r0, &no_conversion);
__ JumpIfSmi(r0, &no_conversion);
__ push(r0);
__ InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS);
__ bind(&no_conversion);
......@@ -3208,7 +3208,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ b(vs, &stub_call);
// We could eliminate this smi check if we split the code at
// the first smi check before calling ToNumber.
__ BranchOnSmi(r0, &done);
__ JumpIfSmi(r0, &done);
__ bind(&stub_call);
// Call stub. Undo operation first.
__ sub(r0, r0, Operand(Smi::FromInt(count_value)));
......@@ -3501,7 +3501,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
if (inline_smi_code) {
Label slow_case;
__ orr(r2, r0, Operand(r1));
__ BranchOnNotSmi(r2, &slow_case);
__ JumpIfNotSmi(r2, &slow_case);
__ cmp(r1, r0);
Split(cc, if_true, if_false, NULL);
__ bind(&slow_case);
......
......@@ -420,7 +420,7 @@ static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
int interceptor_bit,
Label* slow) {
// Check that the object isn't a smi.
__ BranchOnSmi(receiver, slow);
__ JumpIfSmi(receiver, slow);
// Get the map of the receiver.
__ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
// Check bit field.
......@@ -750,7 +750,7 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
Label index_smi, index_string;
// Check that the key is a smi.
__ BranchOnNotSmi(r2, &check_string);
__ JumpIfNotSmi(r2, &check_string);
__ bind(&index_smi);
// Now the key is known to be a smi. This place is also jumped to from below
// where a numeric string is converted to a smi.
......@@ -1166,7 +1166,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
Register receiver = r1;
// Check that the key is a smi.
__ BranchOnNotSmi(key, &check_string);
__ JumpIfNotSmi(key, &check_string);
__ bind(&index_smi);
// Now the key is known to be a smi. This place is also jumped to from below
// where a numeric string is converted to a smi.
......@@ -1347,7 +1347,7 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
Label slow;
// Check that the receiver isn't a smi.
__ BranchOnSmi(r1, &slow);
__ JumpIfSmi(r1, &slow);
// Check that the key is an array index, that is Uint32.
__ tst(r0, Operand(kSmiTagMask | kSmiSignMask));
......@@ -1471,7 +1471,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
__ b(ne, &slow);
// Check that the value is a smi. If a conversion is needed call into the
// runtime to convert and clamp.
__ BranchOnNotSmi(value, &slow);
__ JumpIfNotSmi(value, &slow);
__ mov(r4, Operand(key, ASR, kSmiTagSize)); // Untag the key.
__ ldr(ip, FieldMemOperand(elements, PixelArray::kLengthOffset));
__ cmp(r4, Operand(ip));
......@@ -1590,7 +1590,7 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
Register scratch = r3;
// Check that the receiver isn't a smi.
__ BranchOnSmi(receiver, &miss);
__ JumpIfSmi(receiver, &miss);
// Check that the object is a JS array.
__ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
......@@ -1604,7 +1604,7 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
__ b(ne, &miss);
// Check that value is a smi.
__ BranchOnNotSmi(value, &miss);
__ JumpIfNotSmi(value, &miss);
// Prepare tail call to StoreIC_ArrayLength.
__ Push(receiver, value);
......
......@@ -1084,7 +1084,7 @@ void LCodeGen::DoModI(LModI* instr) {
__ bind(deferred->exit());
// If the result in r0 is a Smi, untag it, else deoptimize.
__ BranchOnNotSmi(result, &deoptimize);
__ JumpIfNotSmi(result, &deoptimize);
__ SmiUntag(result);
__ b(al, &done);
......@@ -1164,7 +1164,7 @@ void LCodeGen::DoDivI(LDivI* instr) {
__ bind(deferred->exit());
// If the result in r0 is a Smi, untag it, else deoptimize.
__ BranchOnNotSmi(result, &deoptimize);
__ JumpIfNotSmi(result, &deoptimize);
__ SmiUntag(result);
__ b(&done);
......@@ -1734,7 +1734,7 @@ Condition LCodeGen::EmitIsObject(Register input,
Register temp2,
Label* is_not_object,
Label* is_object) {
__ BranchOnSmi(input, is_not_object);
__ JumpIfSmi(input, is_not_object);
__ LoadRoot(temp1, Heap::kNullValueRootIndex);
__ cmp(input, temp1);
......@@ -2037,7 +2037,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
ASSERT(result.is(r0));
// A Smi is not instance of anything.
__ BranchOnSmi(object, &false_result);
__ JumpIfSmi(object, &false_result);
// This is the inlined call site instanceof cache. The two occurences of the
// hole value will be patched to the last map/result pair generated by the
......@@ -2613,7 +2613,7 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
new DeferredMathAbsTaggedHeapNumber(this, instr);
Register input = ToRegister(instr->InputAt(0));
// Smi check.
__ BranchOnNotSmi(input, deferred->entry());
__ JumpIfNotSmi(input, deferred->entry());
// If smi, handle it directly.
EmitIntegerMathAbs(instr);
__ bind(deferred->exit());
......
......@@ -1385,7 +1385,7 @@ void MacroAssembler::CheckMap(Register obj,
Label* fail,
bool is_heap_object) {
if (!is_heap_object) {
BranchOnSmi(obj, fail);
JumpIfSmi(obj, fail);
}
ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
mov(ip, Operand(map));
......@@ -1400,7 +1400,7 @@ void MacroAssembler::CheckMap(Register obj,
Label* fail,
bool is_heap_object) {
if (!is_heap_object) {
BranchOnSmi(obj, fail);
JumpIfSmi(obj, fail);
}
ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
LoadRoot(ip, index);
......@@ -1414,7 +1414,7 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
Register scratch,
Label* miss) {
// Check that the receiver isn't a smi.
BranchOnSmi(function, miss);
JumpIfSmi(function, miss);
// Check that the function really is a function. Load map into result reg.
CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
......@@ -1513,7 +1513,7 @@ void MacroAssembler::ObjectToDoubleVFPRegister(Register object,
Label done;
if ((flags & OBJECT_NOT_SMI) == 0) {
Label not_smi;
BranchOnNotSmi(object, &not_smi);
JumpIfNotSmi(object, &not_smi);
// Remove smi tag and convert to double.
mov(scratch1, Operand(object, ASR, kSmiTagSize));
vmov(scratch3, scratch1);
......
......@@ -545,16 +545,6 @@ class MacroAssembler: public Assembler {
}
inline void BranchOnSmi(Register value, Label* smi_label) {
tst(value, Operand(kSmiTagMask));
b(eq, smi_label);
}
inline void BranchOnNotSmi(Register value, Label* not_smi_label) {
tst(value, Operand(kSmiTagMask));
b(ne, not_smi_label);
}
// Generates code for reporting that an illegal operation has
// occurred.
void IllegalOperation(int num_arguments);
......@@ -740,6 +730,16 @@ class MacroAssembler: public Assembler {
mov(dst, Operand(src, ASR, kSmiTagSize));
}
// Jump the register contains a smi.
inline void JumpIfSmi(Register value, Label* smi_label) {
tst(value, Operand(kSmiTagMask));
b(eq, smi_label);
}
// Jump if either of the registers contain a non-smi.
inline void JumpIfNotSmi(Register value, Label* not_smi_label) {
tst(value, Operand(kSmiTagMask));
b(ne, not_smi_label);
}
// Jump if either of the registers contain a non-smi.
void JumpIfNotBothSmi(Register reg1, Register reg2, Label* on_not_both_smi);
// Jump if either of the registers contain a smi.
......
......@@ -525,7 +525,7 @@ static void GenerateCallFunction(MacroAssembler* masm,
// -----------------------------------
// Check that the function really is a function.
__ BranchOnSmi(r1, miss);
__ JumpIfSmi(r1, miss);
__ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
__ b(ne, miss);
......@@ -664,7 +664,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
// Check that the receiver isn't a smi.
__ BranchOnSmi(receiver, miss);
__ JumpIfSmi(receiver, miss);
CallOptimization optimization(lookup);
......@@ -1247,7 +1247,7 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
// Check that the receiver isn't a smi.
__ BranchOnSmi(receiver, miss);
__ JumpIfSmi(receiver, miss);
// So far the most popular follow ups for interceptor loads are FIELD
// and CALLBACKS, so inline only them, other cases may be added
......@@ -1515,7 +1515,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
__ ldr(receiver, MemOperand(sp, argc * kPointerSize));
// Check that the receiver isn't a smi.
__ BranchOnSmi(receiver, &miss);
__ JumpIfSmi(receiver, &miss);
// Check that the maps haven't changed.
CheckPrototypes(JSObject::cast(object), receiver,
......@@ -1569,7 +1569,7 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
__ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
// Check for a smi.
__ BranchOnNotSmi(r4, &with_write_barrier);
__ JumpIfNotSmi(r4, &with_write_barrier);
__ bind(&exit);
__ Drop(argc + 1);
__ Ret();
......@@ -1676,7 +1676,7 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
__ ldr(receiver, MemOperand(sp, argc * kPointerSize));
// Check that the receiver isn't a smi.
__ BranchOnSmi(receiver, &miss);
__ JumpIfSmi(receiver, &miss);
// Check that the maps haven't changed.
CheckPrototypes(JSObject::cast(object),
......@@ -2013,7 +2013,7 @@ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
__ ldr(r1, MemOperand(sp, 1 * kPointerSize));
STATIC_ASSERT(kSmiTag == 0);
__ BranchOnSmi(r1, &miss);
__ JumpIfSmi(r1, &miss);
CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
&miss);
......@@ -2172,7 +2172,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
// Check if the argument is a smi.
Label not_smi;
STATIC_ASSERT(kSmiTag == 0);
__ BranchOnNotSmi(r0, &not_smi);
__ JumpIfNotSmi(r0, &not_smi);
// Do bitwise not or do nothing depending on the sign of the
// argument.
......@@ -3365,10 +3365,10 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
Register receiver = r1;
// Check that the object isn't a smi
__ BranchOnSmi(receiver, &slow);
__ JumpIfSmi(receiver, &slow);
// Check that the key is a smi.
__ BranchOnNotSmi(key, &slow);
__ JumpIfNotSmi(key, &slow);
// Check that the object is a JS object. Load map into r2.
__ CompareObjectType(receiver, r2, r3, FIRST_JS_OBJECT_TYPE);
......@@ -3649,7 +3649,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
// r3 mostly holds the elements array or the destination external array.
// Check that the object isn't a smi.
__ BranchOnSmi(receiver, &slow);
__ JumpIfSmi(receiver, &slow);
// Check that the object is a JS object. Load map into r3.
__ CompareObjectType(receiver, r3, r4, FIRST_JS_OBJECT_TYPE);
......@@ -3662,7 +3662,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
__ b(ne, &slow);
// Check that the key is a smi.
__ BranchOnNotSmi(key, &slow);
__ JumpIfNotSmi(key, &slow);
// Check that the elements array is the appropriate type of ExternalArray.
__ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
......@@ -3682,7 +3682,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
// runtime for all other kinds of values.
// r3: external array.
// r4: key (integer).
__ BranchOnNotSmi(value, &check_heap_number);
__ JumpIfNotSmi(value, &check_heap_number);
__ mov(r5, Operand(value, ASR, kSmiTagSize)); // Untag the value.
__ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment