MIPS: Cleanup: use JumpIf[Not]Smi() whenever we can

Port r8322 (7b8b4a951f).

BUG=
TEST=

Review URL: http://codereview.chromium.org/8428004
Patch from Gergely Kis <gergely@homejinni.com>.

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@9885 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent f33e9909
......@@ -628,8 +628,7 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
Label slow, non_function_call;
// Check that the function is not a smi.
__ And(t0, a1, Operand(kSmiTagMask));
__ Branch(&non_function_call, eq, t0, Operand(zero_reg));
__ JumpIfSmi(a1, &non_function_call);
// Check that the function is a JSFunction.
__ GetObjectType(a1, a2, a2);
__ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
......@@ -705,8 +704,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Load the initial map and verify that it is in fact a map.
// a1: constructor function
__ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
__ And(t0, a2, Operand(kSmiTagMask));
__ Branch(&rt_call, eq, t0, Operand(zero_reg));
__ JumpIfSmi(a2, &rt_call);
__ GetObjectType(a2, a3, t4);
__ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
......@@ -984,8 +982,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// sp[0]: receiver (newly allocated object)
// sp[1]: constructor function
// sp[2]: number of arguments (smi-tagged)
__ And(t0, v0, Operand(kSmiTagMask));
__ Branch(&use_receiver, eq, t0, Operand(zero_reg));
__ JumpIfSmi(v0, &use_receiver);
// If the type of the result (stored in its map) is less than
// FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
......@@ -1284,8 +1281,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ sll(at, a0, kPointerSizeLog2);
__ addu(at, sp, at);
__ lw(a1, MemOperand(at));
__ And(at, a1, Operand(kSmiTagMask));
__ Branch(&non_function, eq, at, Operand(zero_reg));
__ JumpIfSmi(a1, &non_function);
__ GetObjectType(a1, a2, a2);
__ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
......@@ -1524,8 +1520,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
__ Branch(&push_receiver, ne, t3, Operand(zero_reg));
// Compute the receiver in non-strict mode.
__ And(t3, a0, Operand(kSmiTagMask));
__ Branch(&call_to_object, eq, t3, Operand(zero_reg));
__ JumpIfSmi(a0, &call_to_object);
__ LoadRoot(a1, Heap::kNullValueRootIndex);
__ Branch(&use_global_receiver, eq, a0, Operand(a1));
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
......
......@@ -1156,8 +1156,7 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
(lhs.is(a1) && rhs.is(a0)));
Label lhs_is_smi;
__ And(t0, lhs, Operand(kSmiTagMask));
__ Branch(&lhs_is_smi, eq, t0, Operand(zero_reg));
__ JumpIfSmi(lhs, &lhs_is_smi);
// Rhs is a Smi.
// Check whether the non-smi is a heap number.
__ GetObjectType(lhs, t4, t4);
......@@ -4712,8 +4711,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Check that the third argument is a positive smi less than the subject
// string length. A negative value will be greater (unsigned comparison).
__ lw(a0, MemOperand(sp, kPreviousIndexOffset));
__ And(at, a0, Operand(kSmiTagMask));
__ Branch(&runtime, ne, at, Operand(zero_reg));
__ JumpIfNotSmi(a0, &runtime);
__ Branch(&runtime, ls, a3, Operand(a0));
// a2: Number of capture registers
......
......@@ -3385,8 +3385,7 @@ void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
__ Branch(&ok, eq, left, Operand(right));
// Fail if either is a non-HeapObject.
__ And(tmp, left, Operand(right));
__ And(at, tmp, Operand(kSmiTagMask));
__ Branch(&fail, eq, at, Operand(zero_reg));
__ JumpIfSmi(tmp, &fail);
__ lw(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
__ lbu(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
__ Branch(&fail, ne, tmp2, Operand(JS_REGEXP_TYPE));
......@@ -4271,8 +4270,7 @@ void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
__ Branch(if_true, eq, a0, Operand(a1));
__ LoadRoot(a1, other_nil_value);
__ Branch(if_true, eq, a0, Operand(a1));
__ And(at, a0, Operand(kSmiTagMask));
__ Branch(if_false, eq, at, Operand(zero_reg));
__ JumpIfSmi(a0, if_false);
// It can be an undetectable object.
__ lw(a1, FieldMemOperand(a0, HeapObject::kMapOffset));
__ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
......
......@@ -527,8 +527,7 @@ void CallICBase::GenerateMiss(MacroAssembler* masm,
if (id == IC::kCallIC_Miss) {
Label invoke, global;
__ lw(a2, MemOperand(sp, argc * kPointerSize));
__ andi(t0, a2, kSmiTagMask);
__ Branch(&invoke, eq, t0, Operand(zero_reg));
__ JumpIfSmi(a2, &invoke);
__ GetObjectType(a2, a3, a3);
__ Branch(&global, eq, a3, Operand(JS_GLOBAL_OBJECT_TYPE));
__ Branch(&invoke, ne, a3, Operand(JS_BUILTINS_OBJECT_TYPE));
......
......@@ -243,8 +243,7 @@ void MacroAssembler::RecordWrite(Register object,
if (smi_check == INLINE_SMI_CHECK) {
ASSERT_EQ(0, kSmiTag);
And(t8, value, Operand(kSmiTagMask));
Branch(&done, eq, t8, Operand(zero_reg));
JumpIfSmi(value, &done);
}
CheckPageFlag(value,
......@@ -4480,8 +4479,7 @@ void MacroAssembler::JumpIfNotBothSmi(Register reg1,
STATIC_ASSERT(kSmiTag == 0);
ASSERT_EQ(1, kSmiTagMask);
or_(at, reg1, reg2);
andi(at, at, kSmiTagMask);
Branch(on_not_both_smi, ne, at, Operand(zero_reg));
JumpIfNotSmi(at, on_not_both_smi);
}
......@@ -4492,8 +4490,7 @@ void MacroAssembler::JumpIfEitherSmi(Register reg1,
ASSERT_EQ(1, kSmiTagMask);
// Both Smi tags must be 1 (not Smi).
and_(at, reg1, reg2);
andi(at, at, kSmiTagMask);
Branch(on_either_smi, eq, at, Operand(zero_reg));
JumpIfSmi(at, on_either_smi);
}
......@@ -4571,8 +4568,7 @@ void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first,
// Check that neither is a smi.
STATIC_ASSERT(kSmiTag == 0);
And(scratch1, first, Operand(second));
And(scratch1, scratch1, Operand(kSmiTagMask));
Branch(failure, eq, scratch1, Operand(zero_reg));
JumpIfSmi(scratch1, failure);
JumpIfNonSmisNotBothSequentialAsciiStrings(first,
second,
scratch1,
......
......@@ -282,8 +282,7 @@ void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
Register scratch,
Label* miss_label) {
// Check that the receiver isn't a smi.
__ And(scratch, receiver, Operand(kSmiTagMask));
__ Branch(miss_label, eq, scratch, Operand(zero_reg));
__ JumpIfSmi(receiver, miss_label);
// Check that the object is a JS array.
__ GetObjectType(receiver, scratch, scratch);
......@@ -1100,8 +1099,7 @@ void StubCompiler::GenerateLoadField(Handle<JSObject> object,
Handle<String> name,
Label* miss) {
// Check that the receiver isn't a smi.
__ And(scratch1, receiver, Operand(kSmiTagMask));
__ Branch(miss, eq, scratch1, Operand(zero_reg));
__ JumpIfSmi(receiver, miss);
// Check that the maps haven't changed.
Register reg = CheckPrototypes(
......@@ -2264,8 +2262,7 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
// Check that the receiver isn't a smi.
if (check != NUMBER_CHECK) {
__ And(t1, a1, Operand(kSmiTagMask));
__ Branch(&miss, eq, t1, Operand(zero_reg));
__ JumpIfSmi(a1, &miss);
}
// Make sure that it's okay not to patch the on stack receiver
......@@ -2310,8 +2307,7 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
if (function->IsBuiltin() || function->shared()->strict_mode()) {
Label fast;
// Check that the object is a smi or a heap number.
__ And(t1, a1, Operand(kSmiTagMask));
__ Branch(&fast, eq, t1, Operand(zero_reg));
__ JumpIfSmi(a1, &fast);
__ GetObjectType(a1, a0, a0);
__ Branch(&miss, ne, a0, Operand(HEAP_NUMBER_TYPE));
__ bind(&fast);
......@@ -2775,8 +2771,7 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
// object which can only happen for contextual calls. In this case,
// the receiver cannot be a smi.
if (!object.is_identical_to(holder)) {
__ And(t0, a0, Operand(kSmiTagMask));
__ Branch(&miss, eq, t0, Operand(zero_reg));
__ JumpIfSmi(a0, &miss);
}
// Check that the map of the global has not changed.
......@@ -3136,8 +3131,7 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
// a1: constructor function
// t7: undefined
__ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
__ And(t0, a2, Operand(kSmiTagMask));
__ Branch(&generic_stub_call, eq, t0, Operand(zero_reg));
__ JumpIfSmi(a2, &generic_stub_call);
__ GetObjectType(a2, a3, t0);
__ Branch(&generic_stub_call, ne, t0, Operand(MAP_TYPE));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment