Remove support for branch hints from the IA32 and X64 assembler.

They were not on by default and should not be needed on modern
platforms.
Review URL: http://codereview.chromium.org/7001025

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7866 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 03c00ee6
......@@ -147,7 +147,6 @@ DEFINE_bool(optimize_closures, true, "optimize closures")
DEFINE_bool(debug_code, false,
"generate extra code (assertions) for debugging")
DEFINE_bool(code_comments, false, "emit comments in code disassembly")
DEFINE_bool(emit_branch_hints, false, "emit branch hints")
DEFINE_bool(peephole_optimization, true,
"perform peephole optimizations in assembly code")
DEFINE_bool(print_peephole_optimization, false,
......
......@@ -1423,10 +1423,9 @@ void Assembler::jmp(Handle<Code> code, RelocInfo::Mode rmode) {
}
void Assembler::j(Condition cc, Label* L, Hint hint, Label::Distance distance) {
void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
EnsureSpace ensure_space(this);
ASSERT(0 <= cc && cc < 16);
if (FLAG_emit_branch_hints && hint != no_hint) EMIT(hint);
if (L->is_bound()) {
const int short_size = 2;
const int long_size = 6;
......@@ -1456,10 +1455,9 @@ void Assembler::j(Condition cc, Label* L, Hint hint, Label::Distance distance) {
}
void Assembler::j(Condition cc, byte* entry, RelocInfo::Mode rmode, Hint hint) {
void Assembler::j(Condition cc, byte* entry, RelocInfo::Mode rmode) {
EnsureSpace ensure_space(this);
ASSERT((0 <= cc) && (cc < 16));
if (FLAG_emit_branch_hints && hint != no_hint) EMIT(hint);
// 0000 1111 1000 tttn #32-bit disp.
EMIT(0x0F);
EMIT(0x80 | cc);
......@@ -1467,9 +1465,8 @@ void Assembler::j(Condition cc, byte* entry, RelocInfo::Mode rmode, Hint hint) {
}
void Assembler::j(Condition cc, Handle<Code> code, Hint hint) {
void Assembler::j(Condition cc, Handle<Code> code) {
EnsureSpace ensure_space(this);
if (FLAG_emit_branch_hints && hint != no_hint) EMIT(hint);
// 0000 1111 1000 tttn #32-bit disp
EMIT(0x0F);
EMIT(0x80 | cc);
......
......@@ -249,23 +249,6 @@ inline Condition ReverseCondition(Condition cc) {
}
enum Hint {
no_hint = 0,
not_taken = 0x2e,
taken = 0x3e
};
// The result of negating a hint is as if the corresponding condition
// were negated by NegateCondition. That is, no_hint is mapped to
// itself and not_taken and taken are mapped to each other.
inline Hint NegateHint(Hint hint) {
return (hint == no_hint)
? no_hint
: ((hint == not_taken) ? taken : not_taken);
}
// -----------------------------------------------------------------------------
// Machine instruction Immediates
......@@ -863,13 +846,9 @@ class Assembler : public AssemblerBase {
// Conditional jumps
void j(Condition cc,
Label* L,
Hint hint,
Label::Distance distance = Label::kFar);
void j(Condition cc, Label* L, Label::Distance distance = Label::kFar) {
j(cc, L, no_hint, distance);
}
void j(Condition cc, byte* entry, RelocInfo::Mode rmode, Hint hint = no_hint);
void j(Condition cc, Handle<Code> code, Hint hint = no_hint);
void j(Condition cc, byte* entry, RelocInfo::Mode rmode);
void j(Condition cc, Handle<Code> code);
// Floating-point operations
void fld(int i);
......
......@@ -356,12 +356,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// If the result is a smi, it is *not* an object in the ECMA sense.
__ test(eax, Immediate(kSmiTagMask));
__ j(zero, &use_receiver, not_taken);
__ j(zero, &use_receiver);
// If the type of the result (stored in its map) is less than
// FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
__ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
__ j(above_equal, &exit, not_taken);
__ j(above_equal, &exit);
// Throw away the result of the constructor invocation and use the
// on-stack receiver as the result.
......@@ -568,7 +568,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// 1. Make sure we have at least one argument.
{ Label done;
__ test(eax, Operand(eax));
__ j(not_zero, &done, taken);
__ j(not_zero, &done);
__ pop(ebx);
__ push(Immediate(factory->undefined_value()));
__ push(ebx);
......@@ -582,9 +582,9 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// 1 ~ return address.
__ mov(edi, Operand(esp, eax, times_4, 1 * kPointerSize));
__ test(edi, Immediate(kSmiTagMask));
__ j(zero, &non_function, not_taken);
__ j(zero, &non_function);
__ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
__ j(not_equal, &non_function, not_taken);
__ j(not_equal, &non_function);
// 3a. Patch the first argument if necessary when calling a function.
......@@ -684,7 +684,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin.
{ Label function;
__ test(edi, Operand(edi));
__ j(not_zero, &function, taken);
__ j(not_zero, &function);
__ Set(ebx, Immediate(0));
__ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
__ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
......@@ -733,7 +733,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
__ shl(edx, kPointerSizeLog2 - kSmiTagSize);
// Check if the arguments will overflow the stack.
__ cmp(ecx, Operand(edx));
__ j(greater, &okay, taken); // Signed comparison.
__ j(greater, &okay); // Signed comparison.
// Out of stack space.
__ push(Operand(ebp, 4 * kPointerSize)); // push this
......@@ -1589,7 +1589,7 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
ExternalReference stack_limit =
ExternalReference::address_of_stack_limit(masm->isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(above_equal, &ok, taken, Label::kNear);
__ j(above_equal, &ok, Label::kNear);
StackCheckStub stub;
__ TailCallStub(&stub);
__ Abort("Unreachable code: returned from tail call.");
......
......@@ -1018,7 +1018,7 @@ void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
// 3. Perform the smi check of the operands.
STATIC_ASSERT(kSmiTag == 0); // Adjust zero check if not the case.
__ test(combined, Immediate(kSmiTagMask));
__ j(not_zero, &not_smis, not_taken);
__ j(not_zero, &not_smis);
// 4. Operands are both smis, perform the operation leaving the result in
// eax and check the result if necessary.
......@@ -1047,7 +1047,7 @@ void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
__ shl_cl(left);
// Check that the *signed* result fits in a smi.
__ cmp(left, 0xc0000000);
__ j(sign, &use_fp_on_smis, not_taken);
__ j(sign, &use_fp_on_smis);
// Tag the result and store it in register eax.
__ SmiTag(left);
__ mov(eax, left);
......@@ -1077,7 +1077,7 @@ void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
// Smi tagging these two cases can only happen with shifts
// by 0 or 1 when handed a valid smi.
__ test(left, Immediate(0xc0000000));
__ j(not_zero, slow, not_taken);
__ j(not_zero, slow);
// Tag the result and store it in register eax.
__ SmiTag(left);
__ mov(eax, left);
......@@ -1086,12 +1086,12 @@ void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
case Token::ADD:
ASSERT(right.is(eax));
__ add(right, Operand(left)); // Addition is commutative.
__ j(overflow, &use_fp_on_smis, not_taken);
__ j(overflow, &use_fp_on_smis);
break;
case Token::SUB:
__ sub(left, Operand(right));
__ j(overflow, &use_fp_on_smis, not_taken);
__ j(overflow, &use_fp_on_smis);
__ mov(eax, left);
break;
......@@ -1105,7 +1105,7 @@ void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
__ SmiUntag(right);
// Do multiplication.
__ imul(right, Operand(left)); // Multiplication is commutative.
__ j(overflow, &use_fp_on_smis, not_taken);
__ j(overflow, &use_fp_on_smis);
// Check for negative zero result. Use combined = left | right.
__ NegativeZeroTest(right, combined, &use_fp_on_smis);
break;
......@@ -1116,7 +1116,7 @@ void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
__ mov(edi, left);
// Check for 0 divisor.
__ test(right, Operand(right));
__ j(zero, &use_fp_on_smis, not_taken);
__ j(zero, &use_fp_on_smis);
// Sign extend left into edx:eax.
ASSERT(left.is(eax));
__ cdq();
......@@ -1140,7 +1140,7 @@ void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
case Token::MOD:
// Check for 0 divisor.
__ test(right, Operand(right));
__ j(zero, &not_smis, not_taken);
__ j(zero, &not_smis);
// Sign extend left into edx:eax.
ASSERT(left.is(eax));
......@@ -1541,7 +1541,7 @@ void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
__ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1 * kPointerSize : 2 * kPointerSize));
__ test(eax, Immediate(kSmiTagMask));
__ j(not_zero, &skip_allocation, not_taken, Label::kNear);
__ j(not_zero, &skip_allocation, Label::kNear);
// Fall through!
case NO_OVERWRITE:
__ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
......@@ -1756,7 +1756,7 @@ void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
__ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1 * kPointerSize : 2 * kPointerSize));
__ test(eax, Immediate(kSmiTagMask));
__ j(not_zero, &skip_allocation, not_taken, Label::kNear);
__ j(not_zero, &skip_allocation, Label::kNear);
// Fall through!
case NO_OVERWRITE:
__ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
......@@ -1956,7 +1956,7 @@ void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
__ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1 * kPointerSize : 2 * kPointerSize));
__ test(eax, Immediate(kSmiTagMask));
__ j(not_zero, &skip_allocation, not_taken, Label::kNear);
__ j(not_zero, &skip_allocation, Label::kNear);
// Fall through!
case NO_OVERWRITE:
__ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
......@@ -2074,7 +2074,7 @@ void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation(
// If the argument in edx is already an object, we skip the
// allocation of a heap number.
__ test(edx, Immediate(kSmiTagMask));
__ j(not_zero, &skip_allocation, not_taken);
__ j(not_zero, &skip_allocation);
// Allocate a heap number for the result. Keep eax and edx intact
// for the possible runtime call.
__ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
......@@ -2090,7 +2090,7 @@ void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation(
// If the argument in eax is already an object, we skip the
// allocation of a heap number.
__ test(eax, Immediate(kSmiTagMask));
__ j(not_zero, &skip_allocation, not_taken);
__ j(not_zero, &skip_allocation);
// Fall through!
case NO_OVERWRITE:
// Allocate a heap number for the result. Keep eax and edx intact
......@@ -2333,11 +2333,11 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
int supported_exponent_limit =
(63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift;
__ cmp(Operand(edi), Immediate(supported_exponent_limit));
__ j(below, &in_range, taken, Label::kNear);
__ j(below, &in_range, Label::kNear);
// Check for infinity and NaN. Both return NaN for sin.
__ cmp(Operand(edi), Immediate(0x7ff00000));
Label non_nan_result;
__ j(not_equal, &non_nan_result, taken, Label::kNear);
__ j(not_equal, &non_nan_result, Label::kNear);
// Input is +/-Infinity or NaN. Result is NaN.
__ fstp(0);
// NaN is represented by 0x7ff8000000000000.
......@@ -2553,7 +2553,7 @@ void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
Label load_smi, done;
__ test(number, Immediate(kSmiTagMask));
__ j(zero, &load_smi, not_taken, Label::kNear);
__ j(zero, &load_smi, Label::kNear);
__ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
__ jmp(&done, Label::kNear);
......@@ -2572,14 +2572,14 @@ void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) {
// Load operand in edx into xmm0.
__ test(edx, Immediate(kSmiTagMask));
// Argument in edx is a smi.
__ j(zero, &load_smi_edx, not_taken, Label::kNear);
__ j(zero, &load_smi_edx, Label::kNear);
__ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
__ bind(&load_eax);
// Load operand in eax into xmm1.
__ test(eax, Immediate(kSmiTagMask));
// Argument in eax is a smi.
__ j(zero, &load_smi_eax, not_taken, Label::kNear);
__ j(zero, &load_smi_eax, Label::kNear);
__ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
__ jmp(&done, Label::kNear);
......@@ -2604,7 +2604,7 @@ void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
// Load operand in edx into xmm0, or branch to not_numbers.
__ test(edx, Immediate(kSmiTagMask));
// Argument in edx is a smi.
__ j(zero, &load_smi_edx, not_taken, Label::kNear);
__ j(zero, &load_smi_edx, Label::kNear);
Factory* factory = masm->isolate()->factory();
__ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
__ j(not_equal, not_numbers); // Argument in edx is not a number.
......@@ -2613,7 +2613,7 @@ void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
// Load operand in eax into xmm1, or branch to not_numbers.
__ test(eax, Immediate(kSmiTagMask));
// Argument in eax is a smi.
__ j(zero, &load_smi_eax, not_taken, Label::kNear);
__ j(zero, &load_smi_eax, Label::kNear);
__ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
__ j(equal, &load_float_eax, Label::kNear);
__ jmp(not_numbers); // Argument in eax is not a number.
......@@ -2674,7 +2674,7 @@ void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
__ mov(scratch, Operand(esp, 2 * kPointerSize));
}
__ test(scratch, Immediate(kSmiTagMask));
__ j(zero, &load_smi_1, not_taken, Label::kNear);
__ j(zero, &load_smi_1, Label::kNear);
__ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
__ bind(&done_load_1);
......@@ -2684,7 +2684,7 @@ void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
__ mov(scratch, Operand(esp, 1 * kPointerSize));
}
__ test(scratch, Immediate(kSmiTagMask));
__ j(zero, &load_smi_2, not_taken, Label::kNear);
__ j(zero, &load_smi_2, Label::kNear);
__ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
__ jmp(&done, Label::kNear);
......@@ -2730,7 +2730,7 @@ void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
// Test if both operands are floats or smi -> scratch=k_is_float;
// Otherwise scratch = k_not_float.
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &test_other, not_taken, Label::kNear); // argument in edx is OK
__ j(zero, &test_other, Label::kNear); // argument in edx is OK
__ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
Factory* factory = masm->isolate()->factory();
__ cmp(scratch, factory->heap_number_map());
......@@ -2923,7 +2923,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
// Check that the key is a smi.
Label slow;
__ test(edx, Immediate(kSmiTagMask));
__ j(not_zero, &slow, not_taken);
__ j(not_zero, &slow);
// Check if the calling frame is an arguments adaptor frame.
Label adaptor;
......@@ -2936,7 +2936,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
// through register eax. Use unsigned comparison to get negative
// check for free.
__ cmp(edx, Operand(eax));
__ j(above_equal, &slow, not_taken);
__ j(above_equal, &slow);
// Read the argument from the stack and return it.
STATIC_ASSERT(kSmiTagSize == 1);
......@@ -2952,7 +2952,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
__ bind(&adaptor);
__ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ cmp(edx, Operand(ecx));
__ j(above_equal, &slow, not_taken);
__ j(above_equal, &slow);
// Read the argument from the stack and return it.
STATIC_ASSERT(kSmiTagSize == 1);
......@@ -3115,7 +3115,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
ExternalReference::address_of_regexp_stack_memory_size(masm->isolate());
__ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
__ test(ebx, Operand(ebx));
__ j(zero, &runtime, not_taken);
__ j(zero, &runtime);
// Check that the first argument is a JSRegExp object.
__ mov(eax, Operand(esp, kJSRegExpOffset));
......@@ -3336,10 +3336,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Check the result.
Label success;
__ cmp(eax, NativeRegExpMacroAssembler::SUCCESS);
__ j(equal, &success, taken);
__ j(equal, &success);
Label failure;
__ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
__ j(equal, &failure, taken);
__ j(equal, &failure);
__ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
// If not exception it can only be retry. Handle that in the runtime system.
__ j(not_equal, &runtime);
......@@ -3658,7 +3658,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
__ mov(ecx, Operand(edx));
__ or_(ecx, Operand(eax));
__ test(ecx, Immediate(kSmiTagMask));
__ j(not_zero, &non_smi, not_taken);
__ j(not_zero, &non_smi);
__ sub(edx, Operand(eax)); // Return on the result of the subtraction.
__ j(no_overflow, &smi_done);
__ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
......@@ -3833,7 +3833,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
__ ucomisd(xmm0, xmm1);
// Don't base result on EFLAGS when a NaN is involved.
__ j(parity_even, &unordered, not_taken);
__ j(parity_even, &unordered);
// Return a result of -1, 0, or 1, based on EFLAGS.
__ mov(eax, 0); // equal
__ mov(ecx, Immediate(Smi::FromInt(1)));
......@@ -3849,12 +3849,12 @@ void CompareStub::Generate(MacroAssembler* masm) {
__ FCmp();
// Don't base result on EFLAGS when a NaN is involved.
__ j(parity_even, &unordered, not_taken);
__ j(parity_even, &unordered);
Label below_label, above_label;
// Return a result of -1, 0, or 1, based on EFLAGS.
__ j(below, &below_label, not_taken);
__ j(above, &above_label, not_taken);
__ j(below, &below_label);
__ j(above, &above_label);
__ Set(eax, Immediate(0));
__ ret(0);
......@@ -4012,7 +4012,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// Check if receiver is a smi (which is a number value).
__ test(eax, Immediate(kSmiTagMask));
__ j(zero, &receiver_is_value, not_taken);
__ j(zero, &receiver_is_value);
// Check if the receiver is a valid JS object.
__ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, edi);
......@@ -4035,10 +4035,10 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// Check that the function really is a JavaScript function.
__ test(edi, Immediate(kSmiTagMask));
__ j(zero, &slow, not_taken);
__ j(zero, &slow);
// Goto slow case if we do not have a function.
__ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
__ j(not_equal, &slow, not_taken);
__ j(not_equal, &slow);
// Fast-case: Just invoke the function.
ParameterCount actual(argc_);
......@@ -4132,7 +4132,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
__ lea(ecx, Operand(eax, 1));
// Lower 2 bits of ecx are 0 iff eax has failure tag.
__ test(ecx, Immediate(kFailureTagMask));
__ j(zero, &failure_returned, not_taken);
__ j(zero, &failure_returned);
ExternalReference pending_exception_address(
Isolate::k_pending_exception_address, masm->isolate());
......@@ -4163,7 +4163,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
// If the returned exception is RETRY_AFTER_GC continue at retry label
STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
__ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
__ j(zero, &retry, taken);
__ j(zero, &retry);
// Special handling of out of memory exceptions.
__ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
......@@ -4417,7 +4417,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
// Check that the left hand is a JS object.
__ test(object, Immediate(kSmiTagMask));
__ j(zero, &not_js_object, not_taken);
__ j(zero, &not_js_object);
__ IsObjectJSObjectType(object, map, scratch, &not_js_object);
// If there is a call site cache don't look in the global cache, but do the
......@@ -4445,7 +4445,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
// Check that the function prototype is a JS object.
__ test(prototype, Immediate(kSmiTagMask));
__ j(zero, &slow, not_taken);
__ j(zero, &slow);
__ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
// Update the global instanceof or call site inlined cache with the current
......@@ -4535,9 +4535,9 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
// Before null, smi and string value checks, check that the rhs is a function
// as for a non-function rhs an exception needs to be thrown.
__ test(function, Immediate(kSmiTagMask));
__ j(zero, &slow, not_taken);
__ j(zero, &slow);
__ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
__ j(not_equal, &slow, not_taken);
__ j(not_equal, &slow);
// Null is not instance of anything.
__ cmp(object, factory->null_value());
......@@ -4548,7 +4548,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ bind(&object_not_null);
// Smi values is not instance of anything.
__ test(object, Immediate(kSmiTagMask));
__ j(not_zero, &object_not_null_or_smi, not_taken);
__ j(not_zero, &object_not_null_or_smi);
__ Set(eax, Immediate(Smi::FromInt(1)));
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
......@@ -4820,7 +4820,7 @@ void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
__ test(code_,
Immediate(kSmiTagMask |
((~String::kMaxAsciiCharCode) << kSmiTagSize)));
__ j(not_zero, &slow_case_, not_taken);
__ j(not_zero, &slow_case_);
Factory* factory = masm->isolate()->factory();
__ Set(result_, Immediate(factory->single_character_string_cache()));
......@@ -4832,7 +4832,7 @@ void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
code_, times_half_pointer_size,
FixedArray::kHeaderSize));
__ cmp(result_, factory->undefined_value());
__ j(equal, &slow_case_, not_taken);
__ j(equal, &slow_case_);
__ bind(&exit_);
}
......@@ -5781,7 +5781,7 @@ void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
__ mov(ecx, Operand(edx));
__ or_(ecx, Operand(eax));
__ test(ecx, Immediate(kSmiTagMask));
__ j(not_zero, &miss, not_taken, Label::kNear);
__ j(not_zero, &miss, Label::kNear);
if (GetCondition() == equal) {
// For equality we do not care about the sign of the result.
......@@ -5811,12 +5811,12 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
__ mov(ecx, Operand(edx));
__ and_(ecx, Operand(eax));
__ test(ecx, Immediate(kSmiTagMask));
__ j(zero, &generic_stub, not_taken, Label::kNear);
__ j(zero, &generic_stub, Label::kNear);
__ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx);
__ j(not_equal, &miss, not_taken, Label::kNear);
__ j(not_equal, &miss, Label::kNear);
__ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
__ j(not_equal, &miss, not_taken, Label::kNear);
__ j(not_equal, &miss, Label::kNear);
// Inlining the double comparison and falling back to the general compare
// stub if NaN is involved or SS2 or CMOV is unsupported.
......@@ -5832,7 +5832,7 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
__ ucomisd(xmm0, xmm1);
// Don't base result on EFLAGS when a NaN is involved.
__ j(parity_even, &unordered, not_taken, Label::kNear);
__ j(parity_even, &unordered, Label::kNear);
// Return a result of -1, 0, or 1, based on EFLAGS.
// Performing mov, because xor would destroy the flag register.
......@@ -5984,12 +5984,12 @@ void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
__ mov(ecx, Operand(edx));
__ and_(ecx, Operand(eax));
__ test(ecx, Immediate(kSmiTagMask));
__ j(zero, &miss, not_taken, Label::kNear);
__ j(zero, &miss, Label::kNear);
__ CmpObjectType(eax, JS_OBJECT_TYPE, ecx);
__ j(not_equal, &miss, not_taken, Label::kNear);
__ j(not_equal, &miss, Label::kNear);
__ CmpObjectType(edx, JS_OBJECT_TYPE, ecx);
__ j(not_equal, &miss, not_taken, Label::kNear);
__ j(not_equal, &miss, Label::kNear);
ASSERT(GetCondition() == equal);
__ sub(eax, Operand(edx));
......@@ -6069,17 +6069,17 @@ MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup(
__ mov(entity_name, Operand(properties, index, times_half_pointer_size,
kElementsStartOffset - kHeapObjectTag));
__ cmp(entity_name, masm->isolate()->factory()->undefined_value());
__ j(equal, done, taken);
__ j(equal, done);
// Stop if found the property.
__ cmp(entity_name, Handle<String>(name));
__ j(equal, miss, not_taken);
__ j(equal, miss);
// Check if the entry name is not a symbol.
__ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
__ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset),
kIsSymbolMask);
__ j(zero, miss, not_taken);
__ j(zero, miss);
}
StringDictionaryLookupStub stub(properties,
......@@ -6136,7 +6136,7 @@ void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
r0,
times_4,
kElementsStartOffset - kHeapObjectTag));
__ j(equal, done, taken);
__ j(equal, done);
}
StringDictionaryLookupStub stub(elements,
......
......@@ -245,7 +245,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
ExternalReference stack_limit =
ExternalReference::address_of_stack_limit(isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(above_equal, &ok, taken, Label::kNear);
__ j(above_equal, &ok, Label::kNear);
StackCheckStub stub;
__ CallStub(&stub);
__ bind(&ok);
......@@ -278,7 +278,7 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
ExternalReference stack_limit =
ExternalReference::address_of_stack_limit(isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(above_equal, &ok, taken, Label::kNear);
__ j(above_equal, &ok, Label::kNear);
StackCheckStub stub;
__ CallStub(&stub);
// Record a mapping of this PC offset to the OSR id. This is used to find
......@@ -1725,7 +1725,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ imul(eax, Operand(ecx));
__ j(overflow, &stub_call);
__ test(eax, Operand(eax));
__ j(not_zero, &done, taken, Label::kNear);
__ j(not_zero, &done, Label::kNear);
__ mov(ebx, edx);
__ or_(ebx, Operand(ecx));
__ j(negative, &stub_call);
......
......@@ -50,11 +50,11 @@ static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
// Register usage:
// type: holds the receiver instance type on entry.
__ cmp(type, JS_GLOBAL_OBJECT_TYPE);
__ j(equal, global_object, not_taken);
__ j(equal, global_object);
__ cmp(type, JS_BUILTINS_OBJECT_TYPE);
__ j(equal, global_object, not_taken);
__ j(equal, global_object);
__ cmp(type, JS_GLOBAL_PROXY_TYPE);
__ j(equal, global_object, not_taken);
__ j(equal, global_object);
}
......@@ -73,13 +73,13 @@ static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
// Check that the receiver isn't a smi.
__ test(receiver, Immediate(kSmiTagMask));
__ j(zero, miss, not_taken);
__ j(zero, miss);
// Check that the receiver is a valid JS object.
__ mov(r1, FieldOperand(receiver, HeapObject::kMapOffset));
__ movzx_b(r0, FieldOperand(r1, Map::kInstanceTypeOffset));
__ cmp(r0, FIRST_JS_OBJECT_TYPE);
__ j(below, miss, not_taken);
__ j(below, miss);
// If this assert fails, we have to check upper bound too.
ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
......@@ -90,7 +90,7 @@ static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
__ test_b(FieldOperand(r1, Map::kBitFieldOffset),
(1 << Map::kIsAccessCheckNeeded) |
(1 << Map::kHasNamedInterceptor));
__ j(not_zero, miss, not_taken);
__ j(not_zero, miss);
__ mov(r0, FieldOperand(receiver, JSObject::kPropertiesOffset));
__ CheckMap(r0, FACTORY->hash_table_map(), miss, true);
......@@ -146,7 +146,7 @@ static void GenerateDictionaryLoad(MacroAssembler* masm,
const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
__ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag),
Immediate(PropertyDetails::TypeField::mask() << kSmiTagSize));
__ j(not_zero, miss_label, not_taken);
__ j(not_zero, miss_label);
// Get the value at the masked, scaled index.
const int kValueOffset = kElementsStartOffset + kPointerSize;
......@@ -204,7 +204,7 @@ static void GenerateDictionaryStore(MacroAssembler* masm,
PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
__ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag),
Immediate(kTypeAndReadOnlyMask));
__ j(not_zero, miss_label, not_taken);
__ j(not_zero, miss_label);
// Store the value at the masked, scaled index.
const int kValueOffset = kElementsStartOffset + kPointerSize;
......@@ -294,9 +294,9 @@ static void GenerateNumberDictionaryLoad(MacroAssembler* masm,
times_pointer_size,
NumberDictionary::kElementsStartOffset));
if (i != (kProbes - 1)) {
__ j(equal, &done, taken);
__ j(equal, &done);
} else {
__ j(not_equal, miss, not_taken);
__ j(not_equal, miss);
}
}
......@@ -374,7 +374,7 @@ static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
// Check that the object isn't a smi.
__ test(receiver, Immediate(kSmiTagMask));
__ j(zero, slow, not_taken);
__ j(zero, slow);
// Get the map of the receiver.
__ mov(map, FieldOperand(receiver, HeapObject::kMapOffset));
......@@ -382,7 +382,7 @@ static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
// Check bit field.
__ test_b(FieldOperand(map, Map::kBitFieldOffset),
(1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit));
__ j(not_zero, slow, not_taken);
__ j(not_zero, slow);
// Check that the object is some kind of JS object EXCEPT JS Value type.
// In the case that the object is a value-wrapper object,
// we enter the runtime system to make sure that indexing
......@@ -390,7 +390,7 @@ static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
__ CmpInstanceType(map, JS_OBJECT_TYPE);
__ j(below, slow, not_taken);
__ j(below, slow);
}
......@@ -453,12 +453,12 @@ static void GenerateKeyStringCheck(MacroAssembler* masm,
// Is the string an array index, with cached numeric value?
__ mov(hash, FieldOperand(key, String::kHashFieldOffset));
__ test(hash, Immediate(String::kContainsCachedArrayIndexMask));
__ j(zero, index_string, not_taken);
__ j(zero, index_string);
// Is the string a symbol?
ASSERT(kSymbolTag != 0);
__ test_b(FieldOperand(map, Map::kInstanceTypeOffset), kIsSymbolMask);
__ j(zero, not_symbol, not_taken);
__ j(zero, not_symbol);
}
......@@ -473,7 +473,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// Check that the key is a smi.
__ test(eax, Immediate(kSmiTagMask));
__ j(not_zero, &check_string, not_taken);
__ j(not_zero, &check_string);
__ bind(&index_smi);
// Now the key is known to be a smi. This place is also jumped to from
// where a numeric string is converted to a smi.
......@@ -485,7 +485,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// now in ecx.
__ test_b(FieldOperand(ecx, Map::kBitField2Offset),
1 << Map::kHasFastElements);
__ j(zero, &check_number_dictionary, not_taken);
__ j(zero, &check_number_dictionary);
GenerateFastArrayLoad(masm,
edx,
......@@ -663,11 +663,11 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
// Check that the receiver isn't a smi.
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &slow, not_taken);
__ j(zero, &slow);
// Check that the key is an array index, that is Uint32.
__ test(eax, Immediate(kSmiTagMask | kSmiSignMask));
__ j(not_zero, &slow, not_taken);
__ j(not_zero, &slow);
// Get the map of the receiver.
__ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
......@@ -677,7 +677,7 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
__ movzx_b(ecx, FieldOperand(ecx, Map::kBitFieldOffset));
__ and_(Operand(ecx), Immediate(kSlowCaseBitFieldMask));
__ cmp(Operand(ecx), Immediate(1 << Map::kHasIndexedInterceptor));
__ j(not_zero, &slow, not_taken);
__ j(not_zero, &slow);
// Everything is fine, call runtime.
__ pop(ecx);
......@@ -708,22 +708,22 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
// Check that the object isn't a smi.
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &slow, not_taken);
__ j(zero, &slow);
// Get the map from the receiver.
__ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
// Check that the receiver does not require access checks. We need
// to do this because this generic stub does not perform map checks.
__ test_b(FieldOperand(edi, Map::kBitFieldOffset),
1 << Map::kIsAccessCheckNeeded);
__ j(not_zero, &slow, not_taken);
__ j(not_zero, &slow);
// Check that the key is a smi.
__ test(ecx, Immediate(kSmiTagMask));
__ j(not_zero, &slow, not_taken);
__ j(not_zero, &slow);
__ CmpInstanceType(edi, JS_ARRAY_TYPE);
__ j(equal, &array);
// Check that the object is some kind of JS object.
__ CmpInstanceType(edi, FIRST_JS_OBJECT_TYPE);
__ j(below, &slow, not_taken);
__ j(below, &slow);
// Object case: Check key against length in the elements array.
// eax: value
......@@ -733,7 +733,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
// Check that the object is in fast mode and writable.
__ CheckMap(edi, FACTORY->fixed_array_map(), &slow, true);
__ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset));
__ j(below, &fast, taken);
__ j(below, &fast);
// Slow case: call runtime.
__ bind(&slow);
......@@ -749,9 +749,9 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
// edi: receiver->elements, a FixedArray
// flags: compare (ecx, edx.length())
// do not leave holes in the array:
__ j(not_equal, &slow, not_taken);
__ j(not_equal, &slow);
__ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset));
__ j(above_equal, &slow, not_taken);
__ j(above_equal, &slow);
// Add 1 to receiver->length, and go to fast array write.
__ add(FieldOperand(edx, JSArray::kLengthOffset),
Immediate(Smi::FromInt(1)));
......@@ -770,7 +770,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
// Check the key against the length in the array, compute the
// address to store into and fall through to fast case.
__ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // Compare smis.
__ j(above_equal, &extra, not_taken);
__ j(above_equal, &extra);
// Fast case: Do the store.
__ bind(&fast);
......@@ -814,9 +814,9 @@ static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
//
// Check for number.
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &number, not_taken);
__ j(zero, &number);
__ CmpObjectType(edx, HEAP_NUMBER_TYPE, ebx);
__ j(not_equal, &non_number, taken);
__ j(not_equal, &non_number);
__ bind(&number);
StubCompiler::GenerateLoadGlobalFunctionPrototype(
masm, Context::NUMBER_FUNCTION_INDEX, edx);
......@@ -825,7 +825,7 @@ static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
// Check for string.
__ bind(&non_number);
__ CmpInstanceType(ebx, FIRST_NONSTRING_TYPE);
__ j(above_equal, &non_string, taken);
__ j(above_equal, &non_string);
StubCompiler::GenerateLoadGlobalFunctionPrototype(
masm, Context::STRING_FUNCTION_INDEX, edx);
__ jmp(&probe);
......@@ -833,9 +833,9 @@ static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
// Check for boolean.
__ bind(&non_string);
__ cmp(edx, FACTORY->true_value());
__ j(equal, &boolean, not_taken);
__ j(equal, &boolean);
__ cmp(edx, FACTORY->false_value());
__ j(not_equal, &miss, taken);
__ j(not_equal, &miss);
__ bind(&boolean);
StubCompiler::GenerateLoadGlobalFunctionPrototype(
masm, Context::BOOLEAN_FUNCTION_INDEX, edx);
......@@ -862,11 +862,11 @@ static void GenerateFunctionTailCall(MacroAssembler* masm,
// Check that the result is not a smi.
__ test(edi, Immediate(kSmiTagMask));
__ j(zero, miss, not_taken);
__ j(zero, miss);
// Check that the value is a JavaScript function, fetching its map into eax.
__ CmpObjectType(edi, JS_FUNCTION_TYPE, eax);
__ j(not_equal, miss, not_taken);
__ j(not_equal, miss);
// Invoke the function.
ParameterCount actual(argc);
......@@ -942,7 +942,7 @@ static void GenerateCallMiss(MacroAssembler* masm,
Label invoke, global;
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); // receiver
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &invoke, not_taken, Label::kNear);
__ j(zero, &invoke, Label::kNear);
__ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
__ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
__ cmp(ebx, JS_GLOBAL_OBJECT_TYPE);
......@@ -1024,7 +1024,7 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
// Check that the key is a smi.
__ test(ecx, Immediate(kSmiTagMask));
__ j(not_zero, &check_string, not_taken);
__ j(not_zero, &check_string);
__ bind(&index_smi);
// Now the key is known to be a smi. This place is also jumped to from
......@@ -1314,22 +1314,22 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
// Check that the receiver isn't a smi.
__ test(receiver, Immediate(kSmiTagMask));
__ j(zero, &miss, not_taken);
__ j(zero, &miss);
// Check that the object is a JS array.
__ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
// Check that elements are FixedArray.
// We rely on StoreIC_ArrayLength below to deal with all types of
// fast elements (including COW).
__ mov(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
__ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
// Check that value is a smi.
__ test(value, Immediate(kSmiTagMask));
__ j(not_zero, &miss, not_taken);
__ j(not_zero, &miss);
// Prepare tail call to StoreIC_ArrayLength.
__ pop(scratch);
......
......@@ -572,7 +572,7 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
__ jmp(entry, RelocInfo::RUNTIME_ENTRY);
__ bind(&done);
} else {
__ j(cc, entry, RelocInfo::RUNTIME_ENTRY, not_taken);
__ j(cc, entry, RelocInfo::RUNTIME_ENTRY);
}
}
}
......@@ -1482,7 +1482,7 @@ void LCodeGen::DoCmpID(LCmpID* instr) {
// Don't base result on EFLAGS when a NaN is involved. Instead
// jump to the unordered case, which produces a false value.
__ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
__ j(parity_even, &unordered, not_taken, Label::kNear);
__ j(parity_even, &unordered, Label::kNear);
} else {
EmitCmpI(left, right);
}
......@@ -2031,7 +2031,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
// A Smi is not an instance of anything.
__ test(object, Immediate(kSmiTagMask));
__ j(zero, &false_result, not_taken);
__ j(zero, &false_result);
// This is the inlined call site instanceof cache. The two occurences of the
// hole value will be patched to the last map/result pair generated by the
......@@ -2041,7 +2041,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
__ mov(map, FieldOperand(object, HeapObject::kMapOffset));
__ bind(deferred->map_check()); // Label for calculating code patching.
__ cmp(map, factory()->the_hole_value()); // Patched to cached map.
__ j(not_equal, &cache_miss, not_taken, Label::kNear);
__ j(not_equal, &cache_miss, Label::kNear);
__ mov(eax, factory()->the_hole_value()); // Patched to either true or false.
__ jmp(&done);
......@@ -3622,7 +3622,7 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
// Smi check.
__ test(input_reg, Immediate(kSmiTagMask));
__ j(zero, &load_smi, not_taken, Label::kNear);
__ j(zero, &load_smi, Label::kNear);
// Heap number map check.
__ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
......
......@@ -513,7 +513,7 @@ void MacroAssembler::Throw(Register value) {
Set(esi, Immediate(0)); // Tentatively set context pointer to NULL.
Label skip;
cmp(ebp, 0);
j(equal, &skip, not_taken, Label::kNear);
j(equal, &skip, Label::kNear);
mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
bind(&skip);
......@@ -614,7 +614,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
// Check if both contexts are the same.
cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
j(equal, &same_contexts, taken);
j(equal, &same_contexts);
// Compare security tokens, save holder_reg on the stack so we can use it
// as a temporary register.
......@@ -644,7 +644,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
mov(scratch, FieldOperand(scratch, token_offset));
cmp(scratch, FieldOperand(holder_reg, token_offset));
pop(holder_reg);
j(not_equal, miss, not_taken);
j(not_equal, miss);
bind(&same_contexts);
}
......@@ -732,9 +732,9 @@ void MacroAssembler::AllocateInNewSpace(int object_size,
mov(top_reg, result);
}
add(Operand(top_reg), Immediate(object_size));
j(carry, gc_required, not_taken);
j(carry, gc_required);
cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
j(above, gc_required, not_taken);
j(above, gc_required);
// Update allocation top.
UpdateAllocationTopHelper(top_reg, scratch);
......@@ -831,9 +831,9 @@ void MacroAssembler::AllocateInNewSpace(Register object_size,
mov(result_end, object_size);
}
add(result_end, Operand(result));
j(carry, gc_required, not_taken);
j(carry, gc_required);
cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
j(above, gc_required, not_taken);
j(above, gc_required);
// Tag result if requested.
if ((flags & TAG_OBJECT) != 0) {
......@@ -1062,9 +1062,9 @@ void MacroAssembler::NegativeZeroTest(Register result,
Label* then_label) {
Label ok;
test(result, Operand(result));
j(not_zero, &ok, taken);
j(not_zero, &ok);
test(op, Operand(op));
j(sign, then_label, not_taken);
j(sign, then_label);
bind(&ok);
}
......@@ -1076,10 +1076,10 @@ void MacroAssembler::NegativeZeroTest(Register result,
Label* then_label) {
Label ok;
test(result, Operand(result));
j(not_zero, &ok, taken);
j(not_zero, &ok);
mov(scratch, Operand(op1));
or_(scratch, Operand(op2));
j(sign, then_label, not_taken);
j(sign, then_label);
bind(&ok);
}
......@@ -1090,17 +1090,17 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
Label* miss) {
// Check that the receiver isn't a smi.
test(function, Immediate(kSmiTagMask));
j(zero, miss, not_taken);
j(zero, miss);
// Check that the function really is a function.
CmpObjectType(function, JS_FUNCTION_TYPE, result);
j(not_equal, miss, not_taken);
j(not_equal, miss);
// Make sure that the function has an instance prototype.
Label non_instance;
movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
j(not_zero, &non_instance, not_taken);
j(not_zero, &non_instance);
// Get the prototype or initial map from the function.
mov(result,
......@@ -1110,7 +1110,7 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
// simply miss the cache instead. This will allow us to allocate a
// prototype object on-demand in the runtime system.
cmp(Operand(result), Immediate(isolate()->factory()->the_hole_value()));
j(equal, miss, not_taken);
j(equal, miss);
// If the function does not have an initial map, we're done.
Label done;
......@@ -1391,7 +1391,7 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function,
// Check if the result handle holds 0.
test(eax, Operand(eax));
j(zero, &empty_handle, not_taken);
j(zero, &empty_handle);
// It was non-zero. Dereference to get the result value.
mov(eax, Operand(eax, 0));
bind(&prologue);
......@@ -1401,7 +1401,7 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function,
sub(Operand::StaticVariable(level_address), Immediate(1));
Assert(above_equal, "Invalid HandleScope level");
cmp(edi, Operand::StaticVariable(limit_address));
j(not_equal, &delete_allocated_handles, not_taken);
j(not_equal, &delete_allocated_handles);
bind(&leave_exit_frame);
// Check if the function scheduled an exception.
......@@ -1409,7 +1409,7 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function,
ExternalReference::scheduled_exception_address(isolate());
cmp(Operand::StaticVariable(scheduled_exception_address),
Immediate(isolate()->factory()->the_hole_value()));
j(not_equal, &promote_scheduled_exception, not_taken);
j(not_equal, &promote_scheduled_exception);
LeaveApiExitFrame();
ret(stack_space * kPointerSize);
bind(&promote_scheduled_exception);
......@@ -1849,7 +1849,7 @@ void MacroAssembler::AssertFastElements(Register elements) {
void MacroAssembler::Check(Condition cc, const char* msg) {
Label L;
j(cc, &L, taken);
j(cc, &L);
Abort(msg);
// will not return here
bind(&L);
......
......@@ -265,12 +265,12 @@ class MacroAssembler: public Assembler {
// Jump the register contains a smi.
inline void JumpIfSmi(Register value, Label* smi_label) {
test(value, Immediate(kSmiTagMask));
j(zero, smi_label, not_taken);
j(zero, smi_label);
}
// Jump if register contain a non-smi.
inline void JumpIfNotSmi(Register value, Label* not_smi_label) {
test(value, Immediate(kSmiTagMask));
j(not_zero, not_smi_label, not_taken);
j(not_zero, not_smi_label);
}
// Assumes input is a heap object.
......
......@@ -305,7 +305,7 @@ void RegExpMacroAssemblerIA32::CheckNotBackReferenceIgnoreCase(
// The length of a capture should not be negative. This can only happen
// if the end of the capture is unrecorded, or at a point earlier than
// the start of the capture.
BranchOrBacktrack(less, on_no_match, not_taken);
BranchOrBacktrack(less, on_no_match);
// If length is zero, either the capture is empty or it is completely
// uncaptured. In either case succeed immediately.
......@@ -348,7 +348,7 @@ void RegExpMacroAssemblerIA32::CheckNotBackReferenceIgnoreCase(
__ add(Operand(edi), Immediate(1));
// Compare to end of match, and loop if not done.
__ cmp(edi, Operand(ebx));
__ j(below, &loop, taken);
__ j(below, &loop);
__ jmp(&success);
__ bind(&fail);
......@@ -687,11 +687,11 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
__ mov(ecx, esp);
__ sub(ecx, Operand::StaticVariable(stack_limit));
// Handle it if the stack pointer is already below the stack limit.
__ j(below_equal, &stack_limit_hit, not_taken);
__ j(below_equal, &stack_limit_hit);
// Check if there is room for the variable number of registers above
// the stack limit.
__ cmp(ecx, num_registers_ * kPointerSize);
__ j(above_equal, &stack_ok, taken);
__ j(above_equal, &stack_ok);
// Exit with OutOfMemory exception. There is not enough space on the stack
// for our working registers.
__ mov(eax, EXCEPTION);
......@@ -1142,8 +1142,7 @@ void RegExpMacroAssemblerIA32::CheckPosition(int cp_offset,
void RegExpMacroAssemblerIA32::BranchOrBacktrack(Condition condition,
Label* to,
Hint hint) {
Label* to) {
if (condition < 0) { // No condition
if (to == NULL) {
Backtrack();
......@@ -1153,10 +1152,10 @@ void RegExpMacroAssemblerIA32::BranchOrBacktrack(Condition condition,
return;
}
if (to == NULL) {
__ j(condition, &backtrack_label_, hint);
__ j(condition, &backtrack_label_);
return;
}
__ j(condition, to, hint);
__ j(condition, to);
}
......@@ -1209,7 +1208,7 @@ void RegExpMacroAssemblerIA32::CheckPreemption() {
ExternalReference stack_limit =
ExternalReference::address_of_stack_limit(masm_->isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(above, &no_preempt, taken);
__ j(above, &no_preempt);
SafeCall(&check_preempt_label_);
......
......@@ -168,7 +168,7 @@ class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
// Equivalent to a conditional branch to the label, unless the label
// is NULL, in which case it is a conditional Backtrack.
void BranchOrBacktrack(Condition condition, Label* to, Hint hint = no_hint);
void BranchOrBacktrack(Condition condition, Label* to);
// Call and return internally in the generated code in a way that
// is GC-safe (i.e., doesn't leave absolute code addresses on the stack)
......
......@@ -57,7 +57,7 @@ static void ProbeTable(Isolate* isolate,
// Check that the key in the entry matches the name.
__ cmp(name, Operand::StaticArray(offset, times_2, key_offset));
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
// Check that the flags match what we're looking for.
__ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
......@@ -76,7 +76,7 @@ static void ProbeTable(Isolate* isolate,
// Check that the key in the entry matches the name.
__ cmp(name, Operand::StaticArray(offset, times_2, key_offset));
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
// Get the code entry from the cache.
__ mov(offset, Operand::StaticArray(offset, times_2, value_offset));
......@@ -126,11 +126,11 @@ static MaybeObject* GenerateDictionaryNegativeLookup(MacroAssembler* masm,
// Bail out if the receiver has a named interceptor or requires access checks.
__ test_b(FieldOperand(r0, Map::kBitFieldOffset),
kInterceptorOrAccessCheckNeededMask);
__ j(not_zero, miss_label, not_taken);
__ j(not_zero, miss_label);
// Check that receiver is a JSObject.
__ CmpInstanceType(r0, FIRST_JS_OBJECT_TYPE);
__ j(below, miss_label, not_taken);
__ j(below, miss_label);
// Load properties array.
Register properties = r0;
......@@ -189,7 +189,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
// Check that the receiver isn't a smi.
__ test(receiver, Immediate(kSmiTagMask));
__ j(zero, &miss, not_taken);
__ j(zero, &miss);
// Get the map of the receiver and compute the hash.
__ mov(scratch, FieldOperand(name, String::kHashFieldOffset));
......@@ -250,11 +250,11 @@ void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
Label* miss_label) {
// Check that the receiver isn't a smi.
__ test(receiver, Immediate(kSmiTagMask));
__ j(zero, miss_label, not_taken);
__ j(zero, miss_label);
// Check that the object is a JS array.
__ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
__ j(not_equal, miss_label, not_taken);
__ j(not_equal, miss_label);
// Load length directly from the JS array.
__ mov(eax, FieldOperand(receiver, JSArray::kLengthOffset));
......@@ -271,14 +271,14 @@ static void GenerateStringCheck(MacroAssembler* masm,
Label* non_string_object) {
// Check that the object isn't a smi.
__ test(receiver, Immediate(kSmiTagMask));
__ j(zero, smi, not_taken);
__ j(zero, smi);
// Check that the object is a string.
__ mov(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
__ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
ASSERT(kNotStringTag != 0);
__ test(scratch, Immediate(kNotStringTag));
__ j(not_zero, non_string_object, not_taken);
__ j(not_zero, non_string_object);
}
......@@ -303,7 +303,7 @@ void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
// Check if the object is a JSValue wrapper.
__ bind(&check_wrapper);
__ cmp(scratch1, JS_VALUE_TYPE);
__ j(not_equal, miss, not_taken);
__ j(not_equal, miss);
// Check if the wrapped value is a string and load the length
// directly if it is.
......@@ -508,7 +508,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
// Check that the receiver isn't a smi.
__ test(receiver, Immediate(kSmiTagMask));
__ j(zero, miss, not_taken);
__ j(zero, miss);
CallOptimization optimization(lookup);
......@@ -725,12 +725,12 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
Label* miss_label) {
// Check that the object isn't a smi.
__ test(receiver_reg, Immediate(kSmiTagMask));
__ j(zero, miss_label, not_taken);
__ j(zero, miss_label);
// Check that the map of the object hasn't changed.
__ cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset),
Immediate(Handle<Map>(object->map())));
__ j(not_equal, miss_label, not_taken);
__ j(not_equal, miss_label);
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
......@@ -820,7 +820,7 @@ MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
__ cmp(Operand::Cell(Handle<JSGlobalPropertyCell>(cell)),
Immediate(masm->isolate()->factory()->the_hole_value()));
}
__ j(not_equal, miss, not_taken);
__ j(not_equal, miss);
return cell;
}
......@@ -925,7 +925,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
__ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
__ cmp(Operand(scratch1), Immediate(Handle<Map>(current->map())));
// Branch on the result of the map check.
__ j(not_equal, miss, not_taken);
__ j(not_equal, miss);
// Check access rights to the global object. This has to happen
// after the map check so that we know that the object is
// actually a global object.
......@@ -945,7 +945,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
__ cmp(FieldOperand(reg, HeapObject::kMapOffset),
Immediate(Handle<Map>(current->map())));
// Branch on the result of the map check.
__ j(not_equal, miss, not_taken);
__ j(not_equal, miss);
// Check access rights to the global object. This has to happen
// after the map check so that we know that the object is
// actually a global object.
......@@ -972,7 +972,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
// Check the holder map.
__ cmp(FieldOperand(reg, HeapObject::kMapOffset),
Immediate(Handle<Map>(holder->map())));
__ j(not_equal, miss, not_taken);
__ j(not_equal, miss);
// Perform security check for access to the global object.
ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
......@@ -1007,7 +1007,7 @@ void StubCompiler::GenerateLoadField(JSObject* object,
Label* miss) {
// Check that the receiver isn't a smi.
__ test(receiver, Immediate(kSmiTagMask));
__ j(zero, miss, not_taken);
__ j(zero, miss);
// Check the prototype chain.
Register reg =
......@@ -1032,7 +1032,7 @@ MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
Label* miss) {
// Check that the receiver isn't a smi.
__ test(receiver, Immediate(kSmiTagMask));
__ j(zero, miss, not_taken);
__ j(zero, miss);
// Check that the maps haven't changed.
Register reg =
......@@ -1099,7 +1099,7 @@ void StubCompiler::GenerateLoadConstant(JSObject* object,
Label* miss) {
// Check that the receiver isn't a smi.
__ test(receiver, Immediate(kSmiTagMask));
__ j(zero, miss, not_taken);
__ j(zero, miss);
// Check that the maps haven't changed.
CheckPrototypes(object, receiver, holder,
......@@ -1126,7 +1126,7 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
// Check that the receiver isn't a smi.
__ test(receiver, Immediate(kSmiTagMask));
__ j(zero, miss, not_taken);
__ j(zero, miss);
// So far the most popular follow ups for interceptor loads are FIELD
// and CALLBACKS, so inline only them, other cases may be added
......@@ -1255,7 +1255,7 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
if (kind_ == Code::KEYED_CALL_IC) {
__ cmp(Operand(ecx), Immediate(Handle<String>(name)));
__ j(not_equal, miss, not_taken);
__ j(not_equal, miss);
}
}
......@@ -1277,7 +1277,7 @@ void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
// the receiver cannot be a smi.
if (object != holder) {
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, miss, not_taken);
__ j(zero, miss);
}
// Check that the maps haven't changed.
......@@ -1304,17 +1304,17 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
// function can all use this call IC. Before we load through the
// function, we have to verify that it still is a function.
__ test(edi, Immediate(kSmiTagMask));
__ j(zero, miss, not_taken);
__ j(zero, miss);
__ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
__ j(not_equal, miss, not_taken);
__ j(not_equal, miss);
// Check the shared function info. Make sure it hasn't changed.
__ cmp(FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset),
Immediate(Handle<SharedFunctionInfo>(function->shared())));
__ j(not_equal, miss, not_taken);
__ j(not_equal, miss);
} else {
__ cmp(Operand(edi), Immediate(Handle<JSFunction>(function)));
__ j(not_equal, miss, not_taken);
__ j(not_equal, miss);
}
}
......@@ -1352,7 +1352,7 @@ MUST_USE_RESULT MaybeObject* CallStubCompiler::CompileCallField(
// Check that the receiver isn't a smi.
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &miss, not_taken);
__ j(zero, &miss);
// Do the right check and compute the holder register.
Register reg = CheckPrototypes(object, edx, holder, ebx, eax, edi,
......@@ -1362,9 +1362,9 @@ MUST_USE_RESULT MaybeObject* CallStubCompiler::CompileCallField(
// Check that the function really is a function.
__ test(edi, Immediate(kSmiTagMask));
__ j(zero, &miss, not_taken);
__ j(zero, &miss);
__ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
// Patch the receiver on the stack with the global proxy if
// necessary.
......@@ -2129,7 +2129,7 @@ MaybeObject* CallStubCompiler::CompileFastApiCall(
// Check that the receiver isn't a smi.
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &miss_before_stack_reserved, not_taken);
__ j(zero, &miss_before_stack_reserved);
Counters* counters = isolate()->counters();
__ IncrementCounter(counters->call_const(), 1);
......@@ -2197,7 +2197,7 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
// Check that the receiver isn't a smi.
if (check != NUMBER_CHECK) {
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &miss, not_taken);
__ j(zero, &miss);
}
// Make sure that it's okay not to patch the on stack receiver
......@@ -2229,7 +2229,7 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
} else {
// Check that the object is a string or a symbol.
__ CmpObjectType(edx, FIRST_NONSTRING_TYPE, eax);
__ j(above_equal, &miss, not_taken);
__ j(above_equal, &miss);
// Check that the maps starting from the prototype haven't changed.
GenerateDirectLoadGlobalFunctionPrototype(
masm(), Context::STRING_FUNCTION_INDEX, eax, &miss);
......@@ -2247,9 +2247,9 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
Label fast;
// Check that the object is a smi or a heap number.
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &fast, taken);
__ j(zero, &fast);
__ CmpObjectType(edx, HEAP_NUMBER_TYPE, eax);
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
__ bind(&fast);
// Check that the maps starting from the prototype haven't changed.
GenerateDirectLoadGlobalFunctionPrototype(
......@@ -2269,9 +2269,9 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
Label fast;
// Check that the object is a boolean.
__ cmp(edx, factory()->true_value());
__ j(equal, &fast, taken);
__ j(equal, &fast);
__ cmp(edx, factory()->false_value());
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
__ bind(&fast);
// Check that the maps starting from the prototype haven't changed.
GenerateDirectLoadGlobalFunctionPrototype(
......@@ -2339,9 +2339,9 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
// Check that the function really is a function.
__ test(eax, Immediate(kSmiTagMask));
__ j(zero, &miss, not_taken);
__ j(zero, &miss);
__ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
// Patch the receiver on the stack with the global proxy if
// necessary.
......@@ -2478,12 +2478,12 @@ MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
// Check that the object isn't a smi.
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &miss, not_taken);
__ j(zero, &miss);
// Check that the map of the object hasn't changed.
__ cmp(FieldOperand(edx, HeapObject::kMapOffset),
Immediate(Handle<Map>(object->map())));
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
......@@ -2528,12 +2528,12 @@ MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
// Check that the object isn't a smi.
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &miss, not_taken);
__ j(zero, &miss);
// Check that the map of the object hasn't changed.
__ cmp(FieldOperand(edx, HeapObject::kMapOffset),
Immediate(Handle<Map>(receiver->map())));
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
// Perform global security token check if needed.
if (receiver->IsJSGlobalProxy()) {
......@@ -2580,7 +2580,7 @@ MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
// Check that the map of the global has not changed.
__ cmp(FieldOperand(edx, HeapObject::kMapOffset),
Immediate(Handle<Map>(object->map())));
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
// Compute the cell operand to use.
......@@ -2633,7 +2633,7 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
// Check that the name has not changed.
__ cmp(Operand(ecx), Immediate(Handle<String>(name)));
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
// Generate store field code. Trashes the name register.
GenerateStoreField(masm(),
......@@ -2666,30 +2666,30 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
// Check that the receiver isn't a smi.
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &miss, not_taken);
__ j(zero, &miss);
// Check that the map matches.
__ cmp(FieldOperand(edx, HeapObject::kMapOffset),
Immediate(Handle<Map>(receiver->map())));
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
// Check that the key is a smi.
__ test(ecx, Immediate(kSmiTagMask));
__ j(not_zero, &miss, not_taken);
__ j(not_zero, &miss);
// Get the elements array and make sure it is a fast element array, not 'cow'.
__ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
__ cmp(FieldOperand(edi, HeapObject::kMapOffset),
Immediate(factory()->fixed_array_map()));
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
// Check that the key is within bounds.
if (receiver->IsJSArray()) {
__ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // Compare smis.
__ j(above_equal, &miss, not_taken);
__ j(above_equal, &miss);
} else {
__ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); // Compare smis.
__ j(above_equal, &miss, not_taken);
__ j(above_equal, &miss);
}
// Do the store and update the write barrier. Make sure to preserve
......@@ -2723,7 +2723,7 @@ MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
// Check that the receiver isn't a smi.
__ test(eax, Immediate(kSmiTagMask));
__ j(zero, &miss, not_taken);
__ j(zero, &miss);
ASSERT(last->IsGlobalObject() || last->HasFastProperties());
......@@ -2876,7 +2876,7 @@ MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
// the receiver cannot be a smi.
if (object != holder) {
__ test(eax, Immediate(kSmiTagMask));
__ j(zero, &miss, not_taken);
__ j(zero, &miss);
}
// Check that the maps haven't changed.
......@@ -2893,7 +2893,7 @@ MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
// Check for deleted property if property can actually be deleted.
if (!is_dont_delete) {
__ cmp(ebx, factory()->the_hole_value());
__ j(equal, &miss, not_taken);
__ j(equal, &miss);
} else if (FLAG_debug_code) {
__ cmp(ebx, factory()->the_hole_value());
__ Check(not_equal, "DontDelete cells can't contain the hole");
......@@ -2929,7 +2929,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
// Check that the name has not changed.
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
GenerateLoadField(receiver, holder, edx, ebx, ecx, edi, index, name, &miss);
......@@ -2959,7 +2959,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
// Check that the name has not changed.
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
MaybeObject* result = GenerateLoadCallback(receiver, holder, edx, eax, ebx,
ecx, edi, callback, name, &miss);
......@@ -2994,7 +2994,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
// Check that the name has not changed.
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
GenerateLoadConstant(receiver, holder, edx, ebx, ecx, edi,
value, name, &miss);
......@@ -3022,7 +3022,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
// Check that the name has not changed.
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
LookupResult lookup;
LookupPostInterceptor(holder, name, &lookup);
......@@ -3058,7 +3058,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
// Check that the name has not changed.
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
GenerateLoadArrayLength(masm(), edx, ecx, &miss);
__ bind(&miss);
......@@ -3083,7 +3083,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
// Check that the name has not changed.
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
GenerateLoadStringLength(masm(), edx, ecx, ebx, &miss, true);
__ bind(&miss);
......@@ -3108,7 +3108,7 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
// Check that the name has not changed.
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
GenerateLoadFunctionPrototype(masm(), edx, ecx, ebx, &miss);
__ bind(&miss);
......@@ -3130,16 +3130,16 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
// Check that the receiver isn't a smi.
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &miss, not_taken);
__ j(zero, &miss);
// Check that the map matches.
__ cmp(FieldOperand(edx, HeapObject::kMapOffset),
Immediate(Handle<Map>(receiver->map())));
__ j(not_equal, &miss, not_taken);
__ j(not_equal, &miss);
// Check that the key is a smi.
__ test(eax, Immediate(kSmiTagMask));
__ j(not_zero, &miss, not_taken);
__ j(not_zero, &miss);
// Get the elements array.
__ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
......@@ -3147,13 +3147,13 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
// Check that the key is within bounds.
__ cmp(eax, FieldOperand(ecx, FixedArray::kLengthOffset));
__ j(above_equal, &miss, not_taken);
__ j(above_equal, &miss);
// Load the result and make sure it's not the hole.
__ mov(ebx, Operand(ecx, eax, times_2,
FixedArray::kHeaderSize - kHeapObjectTag));
__ cmp(ebx, factory()->the_hole_value());
__ j(equal, &miss, not_taken);
__ j(equal, &miss);
__ mov(eax, ebx);
__ ret(0);
......@@ -3182,7 +3182,7 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
__ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
__ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kDebugInfoOffset));
__ cmp(ebx, factory()->undefined_value());
__ j(not_equal, &generic_stub_call, not_taken);
__ j(not_equal, &generic_stub_call);
#endif
// Load the initial map and verify that it is in fact a map.
......@@ -3315,11 +3315,11 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
// Check that the object isn't a smi.
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &slow, not_taken);
__ j(zero, &slow);
// Check that the key is a smi.
__ test(eax, Immediate(kSmiTagMask));
__ j(not_zero, &slow, not_taken);
__ j(not_zero, &slow);
// Check that the map matches.
__ CheckMap(edx, Handle<Map>(receiver->map()), &slow, false);
......
......@@ -1215,7 +1215,7 @@ void Assembler::int3() {
}
void Assembler::j(Condition cc, Label* L, Hint hint, Label::Distance distance) {
void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
if (cc == always) {
jmp(L);
return;
......@@ -1224,7 +1224,6 @@ void Assembler::j(Condition cc, Label* L, Hint hint, Label::Distance distance) {
}
EnsureSpace ensure_space(this);
ASSERT(is_uint4(cc));
if (FLAG_emit_branch_hints && hint != no_hint) emit(hint);
if (L->is_bound()) {
const int short_size = 2;
const int long_size = 6;
......
......@@ -327,22 +327,6 @@ inline Condition ReverseCondition(Condition cc) {
}
enum Hint {
no_hint = 0,
not_taken = 0x2e,
taken = 0x3e
};
// The result of negating a hint is as if the corresponding condition
// were negated by NegateCondition. That is, no_hint is mapped to
// itself and not_taken and taken are mapped to each other.
inline Hint NegateHint(Hint hint) {
return (hint == no_hint)
? no_hint
: ((hint == not_taken) ? taken : not_taken);
}
// -----------------------------------------------------------------------------
// Machine instruction Immediates
......@@ -1214,11 +1198,7 @@ class Assembler : public AssemblerBase {
// Conditional jumps
void j(Condition cc,
Label* L,
Hint hint,
Label::Distance distance = Label::kFar);
void j(Condition cc, Label* L, Label::Distance distance = Label::kFar) {
j(cc, L, no_hint, distance);
}
void j(Condition cc, Handle<Code> target, RelocInfo::Mode rmode);
// Floating-point operations
......
......@@ -4859,9 +4859,9 @@ void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
__ j(either_smi, &miss, Label::kNear);
__ CmpObjectType(rax, JS_OBJECT_TYPE, rcx);
__ j(not_equal, &miss, not_taken, Label::kNear);
__ j(not_equal, &miss, Label::kNear);
__ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
__ j(not_equal, &miss, not_taken, Label::kNear);
__ j(not_equal, &miss, Label::kNear);
ASSERT(GetCondition() == equal);
__ subq(rax, rdx);
......
......@@ -102,7 +102,7 @@ TEST(AssemblerIa321) {
__ bind(&C);
__ test(edx, Operand(edx));
__ j(not_zero, &L, taken);
__ j(not_zero, &L);
__ ret(0);
CodeDesc desc;
......@@ -140,7 +140,7 @@ TEST(AssemblerIa322) {
__ bind(&C);
__ test(edx, Operand(edx));
__ j(not_zero, &L, taken);
__ j(not_zero, &L);
__ ret(0);
// some relocated stuff here, not executed
......@@ -351,10 +351,10 @@ TEST(AssemblerIa329) {
__ fld_d(Operand(esp, 3 * kPointerSize));
__ fld_d(Operand(esp, 1 * kPointerSize));
__ FCmp();
__ j(parity_even, &nan_l, taken);
__ j(equal, &equal_l, taken);
__ j(below, &less_l, taken);
__ j(above, &greater_l, taken);
__ j(parity_even, &nan_l);
__ j(equal, &equal_l);
__ j(below, &less_l);
__ j(above, &greater_l);
__ mov(eax, kUndefined);
__ ret(0);
......
......@@ -330,11 +330,6 @@ TEST(DisasmIa320) {
__ j(less_equal, &Ljcc);
__ j(greater, &Ljcc);
// checking hints
__ j(zero, &Ljcc, taken);
__ j(zero, &Ljcc, not_taken);
// __ mov(Operand::StaticVariable(Isolate::handler_address()), eax);
// 0xD9 instructions
__ nop();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment