Commit 240cee91 authored by ricow@chromium.org's avatar ricow@chromium.org

Add support for near labels.

This change introduces near labels in the assembler, allowing us to
uptimize forward jumps (conditional and unconditional) if we can
guarantee that the jump is witin range -128 to +127.

I changed a large fractions of the existing Labels to NearLabels, and
left out cases where it was not immediately clear if it could be used
or not (not immediately clear means labels covering a large code
block, or used in function calls which we could potentially change to
accept near labels). 

Review URL: http://codereview.chromium.org/3388004

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5460 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent fbd67b10
...@@ -91,6 +91,57 @@ class Label BASE_EMBEDDED { ...@@ -91,6 +91,57 @@ class Label BASE_EMBEDDED {
}; };
// -----------------------------------------------------------------------------
// NearLabels are labels used for short jumps (in Intel jargon).
// NearLabels should be used if it can be guaranteed that the jump range is
// within -128 to +127. We already use short jumps when jumping backwards,
// so using a NearLabel will only have performance impact if used for forward
// jumps.
class NearLabel BASE_EMBEDDED {
public:
NearLabel() { Unuse(); }
~NearLabel() { ASSERT(!is_linked()); }
void Unuse() {
pos_ = -1;
unresolved_branches_ = 0;
#ifdef DEBUG
for (int i = 0; i < kMaxUnresolvedBranches; i++) {
unresolved_positions_[i] = -1;
}
#endif
}
int pos() {
ASSERT(is_bound());
return pos_;
}
bool is_bound() { return pos_ >= 0; }
bool is_linked() { return !is_bound() && unresolved_branches_ > 0; }
bool is_unused() { return !is_bound() && unresolved_branches_ == 0; }
void bind_to(int position) {
ASSERT(!is_bound());
pos_ = position;
}
void link_to(int position) {
ASSERT(!is_bound());
ASSERT(unresolved_branches_ < kMaxUnresolvedBranches);
unresolved_positions_[unresolved_branches_++] = position;
}
private:
static const int kMaxUnresolvedBranches = 8;
int pos_;
int unresolved_branches_;
int unresolved_positions_[kMaxUnresolvedBranches];
friend class Assembler;
};
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// Relocation information // Relocation information
......
...@@ -1511,32 +1511,6 @@ void Assembler::bind_to(Label* L, int pos) { ...@@ -1511,32 +1511,6 @@ void Assembler::bind_to(Label* L, int pos) {
} }
void Assembler::link_to(Label* L, Label* appendix) {
EnsureSpace ensure_space(this);
last_pc_ = NULL;
if (appendix->is_linked()) {
if (L->is_linked()) {
// Append appendix to L's list.
Label p;
Label q = *L;
do {
p = q;
Displacement disp = disp_at(&q);
disp.next(&q);
} while (q.is_linked());
Displacement disp = disp_at(&p);
disp.link_to(appendix);
disp_at_put(&p, disp);
p.Unuse(); // to avoid assertion failure in ~Label
} else {
// L is empty, simply use appendix.
*L = *appendix;
}
}
appendix->Unuse(); // appendix should not be used anymore
}
void Assembler::bind(Label* L) { void Assembler::bind(Label* L) {
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
last_pc_ = NULL; last_pc_ = NULL;
...@@ -1545,6 +1519,19 @@ void Assembler::bind(Label* L) { ...@@ -1545,6 +1519,19 @@ void Assembler::bind(Label* L) {
} }
void Assembler::bind(NearLabel* L) {
ASSERT(!L->is_bound());
last_pc_ = NULL;
while (L->unresolved_branches_ > 0) {
int branch_pos = L->unresolved_positions_[L->unresolved_branches_ - 1];
int disp = pc_offset() - branch_pos;
ASSERT(is_int8(disp));
set_byte_at(branch_pos - sizeof(int8_t), disp);
L->unresolved_branches_--;
}
L->bind_to(pc_offset());
}
void Assembler::call(Label* L) { void Assembler::call(Label* L) {
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
last_pc_ = pc_; last_pc_ = pc_;
...@@ -1641,6 +1628,24 @@ void Assembler::jmp(Handle<Code> code, RelocInfo::Mode rmode) { ...@@ -1641,6 +1628,24 @@ void Assembler::jmp(Handle<Code> code, RelocInfo::Mode rmode) {
} }
void Assembler::jmp(NearLabel* L) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
if (L->is_bound()) {
const int short_size = 2;
int offs = L->pos() - pc_offset();
ASSERT(offs <= 0);
ASSERT(is_int8(offs - short_size));
// 1110 1011 #8-bit disp.
EMIT(0xEB);
EMIT((offs - short_size) & 0xFF);
} else {
EMIT(0xEB);
EMIT(0x00); // The displacement will be resolved later.
L->link_to(pc_offset());
}
}
void Assembler::j(Condition cc, Label* L, Hint hint) { void Assembler::j(Condition cc, Label* L, Hint hint) {
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
...@@ -1696,6 +1701,27 @@ void Assembler::j(Condition cc, Handle<Code> code, Hint hint) { ...@@ -1696,6 +1701,27 @@ void Assembler::j(Condition cc, Handle<Code> code, Hint hint) {
} }
void Assembler::j(Condition cc, NearLabel* L, Hint hint) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
ASSERT(0 <= cc && cc < 16);
if (FLAG_emit_branch_hints && hint != no_hint) EMIT(hint);
if (L->is_bound()) {
const int short_size = 2;
int offs = L->pos() - pc_offset();
ASSERT(offs <= 0);
ASSERT(is_int8(offs - short_size));
// 0111 tttn #8-bit disp
EMIT(0x70 | cc);
EMIT((offs - short_size) & 0xFF);
} else {
EMIT(0x70 | cc);
EMIT(0x00); // The displacement will be resolved later.
L->link_to(pc_offset());
}
}
// FPU instructions. // FPU instructions.
void Assembler::fld(int i) { void Assembler::fld(int i) {
......
...@@ -687,6 +687,7 @@ class Assembler : public Malloced { ...@@ -687,6 +687,7 @@ class Assembler : public Malloced {
// but it may be bound only once. // but it may be bound only once.
void bind(Label* L); // binds an unbound label L to the current code position void bind(Label* L); // binds an unbound label L to the current code position
void bind(NearLabel* L);
// Calls // Calls
void call(Label* L); void call(Label* L);
...@@ -701,11 +702,17 @@ class Assembler : public Malloced { ...@@ -701,11 +702,17 @@ class Assembler : public Malloced {
void jmp(const Operand& adr); void jmp(const Operand& adr);
void jmp(Handle<Code> code, RelocInfo::Mode rmode); void jmp(Handle<Code> code, RelocInfo::Mode rmode);
// Short jump
void jmp(NearLabel* L);
// Conditional jumps // Conditional jumps
void j(Condition cc, Label* L, Hint hint = no_hint); void j(Condition cc, Label* L, Hint hint = no_hint);
void j(Condition cc, byte* entry, RelocInfo::Mode rmode, Hint hint = no_hint); void j(Condition cc, byte* entry, RelocInfo::Mode rmode, Hint hint = no_hint);
void j(Condition cc, Handle<Code> code, Hint hint = no_hint); void j(Condition cc, Handle<Code> code, Hint hint = no_hint);
// Conditional short jump
void j(Condition cc, NearLabel* L, Hint hint = no_hint);
// Floating-point operations // Floating-point operations
void fld(int i); void fld(int i);
void fstp(int i); void fstp(int i);
...@@ -868,6 +875,7 @@ class Assembler : public Malloced { ...@@ -868,6 +875,7 @@ class Assembler : public Malloced {
private: private:
byte* addr_at(int pos) { return buffer_ + pos; } byte* addr_at(int pos) { return buffer_ + pos; }
byte byte_at(int pos) { return buffer_[pos]; } byte byte_at(int pos) { return buffer_[pos]; }
void set_byte_at(int pos, byte value) { buffer_[pos] = value; }
uint32_t long_at(int pos) { uint32_t long_at(int pos) {
return *reinterpret_cast<uint32_t*>(addr_at(pos)); return *reinterpret_cast<uint32_t*>(addr_at(pos));
} }
...@@ -902,7 +910,6 @@ class Assembler : public Malloced { ...@@ -902,7 +910,6 @@ class Assembler : public Malloced {
// labels // labels
void print(Label* L); void print(Label* L);
void bind_to(Label* L, int pos); void bind_to(Label* L, int pos);
void link_to(Label* L, Label* appendix);
// displacements // displacements
inline Displacement disp_at(Label* L); inline Displacement disp_at(Label* L);
......
...@@ -208,7 +208,7 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { ...@@ -208,7 +208,7 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
// NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined). // NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined).
void ToBooleanStub::Generate(MacroAssembler* masm) { void ToBooleanStub::Generate(MacroAssembler* masm) {
Label false_result, true_result, not_string; NearLabel false_result, true_result, not_string;
__ mov(eax, Operand(esp, 1 * kPointerSize)); __ mov(eax, Operand(esp, 1 * kPointerSize));
// 'null' => false. // 'null' => false.
...@@ -966,7 +966,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) { ...@@ -966,7 +966,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
__ bind(&non_smi_result); __ bind(&non_smi_result);
// Allocate a heap number if needed. // Allocate a heap number if needed.
__ mov(ebx, Operand(eax)); // ebx: result __ mov(ebx, Operand(eax)); // ebx: result
Label skip_allocation; NearLabel skip_allocation;
switch (mode_) { switch (mode_) {
case OVERWRITE_LEFT: case OVERWRITE_LEFT:
case OVERWRITE_RIGHT: case OVERWRITE_RIGHT:
...@@ -1036,7 +1036,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) { ...@@ -1036,7 +1036,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
} }
// Test if left operand is a string. // Test if left operand is a string.
Label lhs_not_string; NearLabel lhs_not_string;
__ test(lhs, Immediate(kSmiTagMask)); __ test(lhs, Immediate(kSmiTagMask));
__ j(zero, &lhs_not_string); __ j(zero, &lhs_not_string);
__ CmpObjectType(lhs, FIRST_NONSTRING_TYPE, ecx); __ CmpObjectType(lhs, FIRST_NONSTRING_TYPE, ecx);
...@@ -1045,7 +1045,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) { ...@@ -1045,7 +1045,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB); StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB);
__ TailCallStub(&string_add_left_stub); __ TailCallStub(&string_add_left_stub);
Label call_runtime_with_args; NearLabel call_runtime_with_args;
// Left operand is not a string, test right. // Left operand is not a string, test right.
__ bind(&lhs_not_string); __ bind(&lhs_not_string);
__ test(rhs, Immediate(kSmiTagMask)); __ test(rhs, Immediate(kSmiTagMask));
...@@ -1221,8 +1221,8 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) { ...@@ -1221,8 +1221,8 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
// Test that eax is a number. // Test that eax is a number.
Label runtime_call; Label runtime_call;
Label runtime_call_clear_stack; Label runtime_call_clear_stack;
Label input_not_smi; NearLabel input_not_smi;
Label loaded; NearLabel loaded;
__ mov(eax, Operand(esp, kPointerSize)); __ mov(eax, Operand(esp, kPointerSize));
__ test(eax, Immediate(kSmiTagMask)); __ test(eax, Immediate(kSmiTagMask));
__ j(not_zero, &input_not_smi); __ j(not_zero, &input_not_smi);
...@@ -1295,7 +1295,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) { ...@@ -1295,7 +1295,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
__ lea(ecx, Operand(ecx, ecx, times_2, 0)); __ lea(ecx, Operand(ecx, ecx, times_2, 0));
__ lea(ecx, Operand(eax, ecx, times_4, 0)); __ lea(ecx, Operand(eax, ecx, times_4, 0));
// Check if cache matches: Double value is stored in uint32_t[2] array. // Check if cache matches: Double value is stored in uint32_t[2] array.
Label cache_miss; NearLabel cache_miss;
__ cmp(ebx, Operand(ecx, 0)); __ cmp(ebx, Operand(ecx, 0));
__ j(not_equal, &cache_miss); __ j(not_equal, &cache_miss);
__ cmp(edx, Operand(ecx, kIntSize)); __ cmp(edx, Operand(ecx, kIntSize));
...@@ -1338,7 +1338,7 @@ Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { ...@@ -1338,7 +1338,7 @@ Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) { void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
// Only free register is edi. // Only free register is edi.
Label done; NearLabel done;
ASSERT(type_ == TranscendentalCache::SIN || ASSERT(type_ == TranscendentalCache::SIN ||
type_ == TranscendentalCache::COS); type_ == TranscendentalCache::COS);
// More transcendental types can be added later. // More transcendental types can be added later.
...@@ -1346,7 +1346,7 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) { ...@@ -1346,7 +1346,7 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
// Both fsin and fcos require arguments in the range +/-2^63 and // Both fsin and fcos require arguments in the range +/-2^63 and
// return NaN for infinities and NaN. They can share all code except // return NaN for infinities and NaN. They can share all code except
// the actual fsin/fcos operation. // the actual fsin/fcos operation.
Label in_range; NearLabel in_range;
// If argument is outside the range -2^63..2^63, fsin/cos doesn't // If argument is outside the range -2^63..2^63, fsin/cos doesn't
// work. We must reduce it to the appropriate range. // work. We must reduce it to the appropriate range.
__ mov(edi, edx); __ mov(edi, edx);
...@@ -1357,7 +1357,7 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) { ...@@ -1357,7 +1357,7 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
__ j(below, &in_range, taken); __ j(below, &in_range, taken);
// Check for infinity and NaN. Both return NaN for sin. // Check for infinity and NaN. Both return NaN for sin.
__ cmp(Operand(edi), Immediate(0x7ff00000)); __ cmp(Operand(edi), Immediate(0x7ff00000));
Label non_nan_result; NearLabel non_nan_result;
__ j(not_equal, &non_nan_result, taken); __ j(not_equal, &non_nan_result, taken);
// Input is +/-Infinity or NaN. Result is NaN. // Input is +/-Infinity or NaN. Result is NaN.
__ fstp(0); __ fstp(0);
...@@ -1377,7 +1377,7 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) { ...@@ -1377,7 +1377,7 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
__ fld(1); __ fld(1);
// FPU Stack: input, 2*pi, input. // FPU Stack: input, 2*pi, input.
{ {
Label no_exceptions; NearLabel no_exceptions;
__ fwait(); __ fwait();
__ fnstsw_ax(); __ fnstsw_ax();
// Clear if Illegal Operand or Zero Division exceptions are set. // Clear if Illegal Operand or Zero Division exceptions are set.
...@@ -1389,7 +1389,7 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) { ...@@ -1389,7 +1389,7 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
// Compute st(0) % st(1) // Compute st(0) % st(1)
{ {
Label partial_remainder_loop; NearLabel partial_remainder_loop;
__ bind(&partial_remainder_loop); __ bind(&partial_remainder_loop);
__ fprem1(); __ fprem1();
__ fwait(); __ fwait();
...@@ -1552,7 +1552,7 @@ void IntegerConvert(MacroAssembler* masm, ...@@ -1552,7 +1552,7 @@ void IntegerConvert(MacroAssembler* masm,
__ shr_cl(scratch2); __ shr_cl(scratch2);
// Now the unsigned answer is in scratch2. We need to move it to ecx and // Now the unsigned answer is in scratch2. We need to move it to ecx and
// we may need to fix the sign. // we may need to fix the sign.
Label negative; NearLabel negative;
__ xor_(ecx, Operand(ecx)); __ xor_(ecx, Operand(ecx));
__ cmp(ecx, FieldOperand(source, HeapNumber::kExponentOffset)); __ cmp(ecx, FieldOperand(source, HeapNumber::kExponentOffset));
__ j(greater, &negative); __ j(greater, &negative);
...@@ -1702,7 +1702,7 @@ void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm, ...@@ -1702,7 +1702,7 @@ void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm, void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
Register number) { Register number) {
Label load_smi, done; NearLabel load_smi, done;
__ test(number, Immediate(kSmiTagMask)); __ test(number, Immediate(kSmiTagMask));
__ j(zero, &load_smi, not_taken); __ j(zero, &load_smi, not_taken);
...@@ -1720,7 +1720,7 @@ void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm, ...@@ -1720,7 +1720,7 @@ void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) { void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) {
Label load_smi_edx, load_eax, load_smi_eax, done; NearLabel load_smi_edx, load_eax, load_smi_eax, done;
// Load operand in edx into xmm0. // Load operand in edx into xmm0.
__ test(edx, Immediate(kSmiTagMask)); __ test(edx, Immediate(kSmiTagMask));
__ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi. __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
...@@ -1750,7 +1750,7 @@ void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) { ...@@ -1750,7 +1750,7 @@ void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) {
void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm, void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
Label* not_numbers) { Label* not_numbers) {
Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done; NearLabel load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
// Load operand in edx into xmm0, or branch to not_numbers. // Load operand in edx into xmm0, or branch to not_numbers.
__ test(edx, Immediate(kSmiTagMask)); __ test(edx, Immediate(kSmiTagMask));
__ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi. __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
...@@ -1798,7 +1798,7 @@ void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm, ...@@ -1798,7 +1798,7 @@ void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm,
void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm, void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
Register scratch, Register scratch,
ArgLocation arg_location) { ArgLocation arg_location) {
Label load_smi_1, load_smi_2, done_load_1, done; NearLabel load_smi_1, load_smi_2, done_load_1, done;
if (arg_location == ARGS_IN_REGISTERS) { if (arg_location == ARGS_IN_REGISTERS) {
__ mov(scratch, edx); __ mov(scratch, edx);
} else { } else {
...@@ -1857,7 +1857,7 @@ void FloatingPointHelper::LoadFloatSmis(MacroAssembler* masm, ...@@ -1857,7 +1857,7 @@ void FloatingPointHelper::LoadFloatSmis(MacroAssembler* masm,
void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm, void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
Label* non_float, Label* non_float,
Register scratch) { Register scratch) {
Label test_other, done; NearLabel test_other, done;
// Test if both operands are floats or smi -> scratch=k_is_float; // Test if both operands are floats or smi -> scratch=k_is_float;
// Otherwise scratch = k_not_float. // Otherwise scratch = k_not_float.
__ test(edx, Immediate(kSmiTagMask)); __ test(edx, Immediate(kSmiTagMask));
...@@ -1884,7 +1884,7 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) { ...@@ -1884,7 +1884,7 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
if (op_ == Token::SUB) { if (op_ == Token::SUB) {
if (include_smi_code_) { if (include_smi_code_) {
// Check whether the value is a smi. // Check whether the value is a smi.
Label try_float; NearLabel try_float;
__ test(eax, Immediate(kSmiTagMask)); __ test(eax, Immediate(kSmiTagMask));
__ j(not_zero, &try_float, not_taken); __ j(not_zero, &try_float, not_taken);
...@@ -1953,7 +1953,7 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) { ...@@ -1953,7 +1953,7 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
&slow); &slow);
// Do the bitwise operation and check if the result fits in a smi. // Do the bitwise operation and check if the result fits in a smi.
Label try_float; NearLabel try_float;
__ not_(ecx); __ not_(ecx);
__ cmp(ecx, 0xc0000000); __ cmp(ecx, 0xc0000000);
__ j(sign, &try_float, not_taken); __ j(sign, &try_float, not_taken);
...@@ -2026,7 +2026,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { ...@@ -2026,7 +2026,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
__ j(not_zero, &slow, not_taken); __ j(not_zero, &slow, not_taken);
// Check if the calling frame is an arguments adaptor frame. // Check if the calling frame is an arguments adaptor frame.
Label adaptor; NearLabel adaptor;
__ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
__ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset)); __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
__ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
...@@ -2103,7 +2103,7 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) { ...@@ -2103,7 +2103,7 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
// Try the new space allocation. Start out with computing the size of // Try the new space allocation. Start out with computing the size of
// the arguments object and the elements array. // the arguments object and the elements array.
Label add_arguments_object; NearLabel add_arguments_object;
__ bind(&try_allocate); __ bind(&try_allocate);
__ test(ecx, Operand(ecx)); __ test(ecx, Operand(ecx));
__ j(zero, &add_arguments_object); __ j(zero, &add_arguments_object);
...@@ -2155,7 +2155,7 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) { ...@@ -2155,7 +2155,7 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
__ SmiUntag(ecx); __ SmiUntag(ecx);
// Copy the fixed array slots. // Copy the fixed array slots.
Label loop; NearLabel loop;
__ bind(&loop); __ bind(&loop);
__ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver. __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
__ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx); __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
...@@ -2383,7 +2383,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { ...@@ -2383,7 +2383,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Argument 4: End of string data // Argument 4: End of string data
// Argument 3: Start of string data // Argument 3: Start of string data
Label setup_two_byte, setup_rest; NearLabel setup_two_byte, setup_rest;
__ test(edi, Operand(edi)); __ test(edi, Operand(edi));
__ mov(edi, FieldOperand(eax, String::kLengthOffset)); __ mov(edi, FieldOperand(eax, String::kLengthOffset));
__ j(zero, &setup_two_byte); __ j(zero, &setup_two_byte);
...@@ -2477,7 +2477,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { ...@@ -2477,7 +2477,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// ebx: last_match_info backing store (FixedArray) // ebx: last_match_info backing store (FixedArray)
// ecx: offsets vector // ecx: offsets vector
// edx: number of capture registers // edx: number of capture registers
Label next_capture, done; NearLabel next_capture, done;
// Capture register counter starts from number of capture registers and // Capture register counter starts from number of capture registers and
// counts down until wraping after zero. // counts down until wraping after zero.
__ bind(&next_capture); __ bind(&next_capture);
...@@ -2533,13 +2533,13 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm, ...@@ -2533,13 +2533,13 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
// number string cache for smis is just the smi value, and the hash for // number string cache for smis is just the smi value, and the hash for
// doubles is the xor of the upper and lower words. See // doubles is the xor of the upper and lower words. See
// Heap::GetNumberStringCache. // Heap::GetNumberStringCache.
Label smi_hash_calculated; NearLabel smi_hash_calculated;
Label load_result_from_cache; NearLabel load_result_from_cache;
if (object_is_smi) { if (object_is_smi) {
__ mov(scratch, object); __ mov(scratch, object);
__ SmiUntag(scratch); __ SmiUntag(scratch);
} else { } else {
Label not_smi, hash_calculated; NearLabel not_smi, hash_calculated;
STATIC_ASSERT(kSmiTag == 0); STATIC_ASSERT(kSmiTag == 0);
__ test(object, Immediate(kSmiTagMask)); __ test(object, Immediate(kSmiTagMask));
__ j(not_zero, &not_smi); __ j(not_zero, &not_smi);
...@@ -2663,7 +2663,7 @@ void CompareStub::Generate(MacroAssembler* masm) { ...@@ -2663,7 +2663,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
if (cc_ != equal) { if (cc_ != equal) {
// Check for undefined. undefined OP undefined is false even though // Check for undefined. undefined OP undefined is false even though
// undefined == undefined. // undefined == undefined.
Label check_for_nan; NearLabel check_for_nan;
__ cmp(edx, Factory::undefined_value()); __ cmp(edx, Factory::undefined_value());
__ j(not_equal, &check_for_nan); __ j(not_equal, &check_for_nan);
__ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_)))); __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
...@@ -2678,7 +2678,7 @@ void CompareStub::Generate(MacroAssembler* masm) { ...@@ -2678,7 +2678,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
__ Set(eax, Immediate(Smi::FromInt(EQUAL))); __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
__ ret(0); __ ret(0);
} else { } else {
Label heap_number; NearLabel heap_number;
__ cmp(FieldOperand(edx, HeapObject::kMapOffset), __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
Immediate(Factory::heap_number_map())); Immediate(Factory::heap_number_map()));
__ j(equal, &heap_number); __ j(equal, &heap_number);
...@@ -2713,7 +2713,7 @@ void CompareStub::Generate(MacroAssembler* masm) { ...@@ -2713,7 +2713,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
__ setcc(above_equal, eax); __ setcc(above_equal, eax);
__ ret(0); __ ret(0);
} else { } else {
Label nan; NearLabel nan;
__ j(above_equal, &nan); __ j(above_equal, &nan);
__ Set(eax, Immediate(Smi::FromInt(EQUAL))); __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
__ ret(0); __ ret(0);
...@@ -2730,7 +2730,7 @@ void CompareStub::Generate(MacroAssembler* masm) { ...@@ -2730,7 +2730,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
// Non-strict object equality is slower, so it is handled later in the stub. // Non-strict object equality is slower, so it is handled later in the stub.
if (cc_ == equal && strict_) { if (cc_ == equal && strict_) {
Label slow; // Fallthrough label. Label slow; // Fallthrough label.
Label not_smis; NearLabel not_smis;
// If we're doing a strict equality comparison, we don't have to do // If we're doing a strict equality comparison, we don't have to do
// type conversion, so we generate code to do fast comparison for objects // type conversion, so we generate code to do fast comparison for objects
// and oddballs. Non-smi numbers and strings still go through the usual // and oddballs. Non-smi numbers and strings still go through the usual
...@@ -2771,13 +2771,13 @@ void CompareStub::Generate(MacroAssembler* masm) { ...@@ -2771,13 +2771,13 @@ void CompareStub::Generate(MacroAssembler* masm) {
// Get the type of the first operand. // Get the type of the first operand.
// If the first object is a JS object, we have done pointer comparison. // If the first object is a JS object, we have done pointer comparison.
Label first_non_object; NearLabel first_non_object;
STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
__ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx); __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
__ j(below, &first_non_object); __ j(below, &first_non_object);
// Return non-zero (eax is not zero) // Return non-zero (eax is not zero)
Label return_not_equal; NearLabel return_not_equal;
STATIC_ASSERT(kHeapObjectTag != 0); STATIC_ASSERT(kHeapObjectTag != 0);
__ bind(&return_not_equal); __ bind(&return_not_equal);
__ ret(0); __ ret(0);
...@@ -2828,7 +2828,7 @@ void CompareStub::Generate(MacroAssembler* masm) { ...@@ -2828,7 +2828,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
// Don't base result on EFLAGS when a NaN is involved. // Don't base result on EFLAGS when a NaN is involved.
__ j(parity_even, &unordered, not_taken); __ j(parity_even, &unordered, not_taken);
Label below_label, above_label; NearLabel below_label, above_label;
// Return a result of -1, 0, or 1, based on EFLAGS. // Return a result of -1, 0, or 1, based on EFLAGS.
__ j(below, &below_label, not_taken); __ j(below, &below_label, not_taken);
__ j(above, &above_label, not_taken); __ j(above, &above_label, not_taken);
...@@ -2893,8 +2893,8 @@ void CompareStub::Generate(MacroAssembler* masm) { ...@@ -2893,8 +2893,8 @@ void CompareStub::Generate(MacroAssembler* masm) {
// Non-strict equality. Objects are unequal if // Non-strict equality. Objects are unequal if
// they are both JSObjects and not undetectable, // they are both JSObjects and not undetectable,
// and their pointers are different. // and their pointers are different.
Label not_both_objects; NearLabel not_both_objects;
Label return_unequal; NearLabel return_unequal;
// At most one is a smi, so we can test for smi by adding the two. // At most one is a smi, so we can test for smi by adding the two.
// A smi plus a heap object has the low bit set, a heap object plus // A smi plus a heap object has the low bit set, a heap object plus
// a heap object has the low bit clear. // a heap object has the low bit clear.
...@@ -3056,7 +3056,7 @@ void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { ...@@ -3056,7 +3056,7 @@ void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
// not NULL. The frame pointer is NULL in the exception handler of // not NULL. The frame pointer is NULL in the exception handler of
// a JS entry frame. // a JS entry frame.
__ xor_(esi, Operand(esi)); // Tentatively set context pointer to NULL. __ xor_(esi, Operand(esi)); // Tentatively set context pointer to NULL.
Label skip; NearLabel skip;
__ cmp(ebp, 0); __ cmp(ebp, 0);
__ j(equal, &skip, not_taken); __ j(equal, &skip, not_taken);
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
...@@ -3188,7 +3188,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, ...@@ -3188,7 +3188,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
// Make sure we're not trying to return 'the hole' from the runtime // Make sure we're not trying to return 'the hole' from the runtime
// call as this may lead to crashes in the IC code later. // call as this may lead to crashes in the IC code later.
if (FLAG_debug_code) { if (FLAG_debug_code) {
Label okay; NearLabel okay;
__ cmp(eax, Factory::the_hole_value()); __ cmp(eax, Factory::the_hole_value());
__ j(not_equal, &okay); __ j(not_equal, &okay);
__ int3(); __ int3();
...@@ -3250,7 +3250,7 @@ void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm, ...@@ -3250,7 +3250,7 @@ void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
__ mov(esp, Operand::StaticVariable(handler_address)); __ mov(esp, Operand::StaticVariable(handler_address));
// Unwind the handlers until the ENTRY handler is found. // Unwind the handlers until the ENTRY handler is found.
Label loop, done; NearLabel loop, done;
__ bind(&loop); __ bind(&loop);
// Load the type of the current stack handler. // Load the type of the current stack handler.
const int kStateOffset = StackHandlerConstants::kStateOffset; const int kStateOffset = StackHandlerConstants::kStateOffset;
...@@ -3468,7 +3468,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { ...@@ -3468,7 +3468,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
// edx is function, eax is map. // edx is function, eax is map.
// Look up the function and the map in the instanceof cache. // Look up the function and the map in the instanceof cache.
Label miss; NearLabel miss;
ExternalReference roots_address = ExternalReference::roots_address(); ExternalReference roots_address = ExternalReference::roots_address();
__ mov(ecx, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); __ mov(ecx, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
__ cmp(edx, Operand::StaticArray(ecx, times_pointer_size, roots_address)); __ cmp(edx, Operand::StaticArray(ecx, times_pointer_size, roots_address));
...@@ -3500,7 +3500,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { ...@@ -3500,7 +3500,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ mov(ecx, FieldOperand(eax, Map::kPrototypeOffset)); __ mov(ecx, FieldOperand(eax, Map::kPrototypeOffset));
// Loop through the prototype chain looking for the function prototype. // Loop through the prototype chain looking for the function prototype.
Label loop, is_instance, is_not_instance; NearLabel loop, is_instance, is_not_instance;
__ bind(&loop); __ bind(&loop);
__ cmp(ecx, Operand(ebx)); __ cmp(ecx, Operand(ebx));
__ j(equal, &is_instance); __ j(equal, &is_instance);
...@@ -3837,7 +3837,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { ...@@ -3837,7 +3837,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// eax: first string // eax: first string
// edx: second string // edx: second string
// Check if either of the strings are empty. In that case return the other. // Check if either of the strings are empty. In that case return the other.
Label second_not_zero_length, both_not_zero_length; NearLabel second_not_zero_length, both_not_zero_length;
__ mov(ecx, FieldOperand(edx, String::kLengthOffset)); __ mov(ecx, FieldOperand(edx, String::kLengthOffset));
STATIC_ASSERT(kSmiTag == 0); STATIC_ASSERT(kSmiTag == 0);
__ test(ecx, Operand(ecx)); __ test(ecx, Operand(ecx));
...@@ -4123,7 +4123,7 @@ void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, ...@@ -4123,7 +4123,7 @@ void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
Register count, Register count,
Register scratch, Register scratch,
bool ascii) { bool ascii) {
Label loop; NearLabel loop;
__ bind(&loop); __ bind(&loop);
// This loop just copies one character at a time, as it is only used for very // This loop just copies one character at a time, as it is only used for very
// short strings. // short strings.
...@@ -4170,7 +4170,7 @@ void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm, ...@@ -4170,7 +4170,7 @@ void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
} }
// Don't enter the rep movs if there are less than 4 bytes to copy. // Don't enter the rep movs if there are less than 4 bytes to copy.
Label last_bytes; NearLabel last_bytes;
__ test(count, Immediate(~3)); __ test(count, Immediate(~3));
__ j(zero, &last_bytes); __ j(zero, &last_bytes);
...@@ -4190,7 +4190,7 @@ void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm, ...@@ -4190,7 +4190,7 @@ void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
__ j(zero, &done); __ j(zero, &done);
// Copy remaining characters. // Copy remaining characters.
Label loop; NearLabel loop;
__ bind(&loop); __ bind(&loop);
__ mov_b(scratch, Operand(src, 0)); __ mov_b(scratch, Operand(src, 0));
__ mov_b(Operand(dest, 0), scratch); __ mov_b(Operand(dest, 0), scratch);
...@@ -4216,7 +4216,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm, ...@@ -4216,7 +4216,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
// Make sure that both characters are not digits as such strings has a // Make sure that both characters are not digits as such strings has a
// different hash algorithm. Don't try to look for these in the symbol table. // different hash algorithm. Don't try to look for these in the symbol table.
Label not_array_index; NearLabel not_array_index;
__ mov(scratch, c1); __ mov(scratch, c1);
__ sub(Operand(scratch), Immediate(static_cast<int>('0'))); __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
__ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0'))); __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
...@@ -4374,7 +4374,7 @@ void StringHelper::GenerateHashGetHash(MacroAssembler* masm, ...@@ -4374,7 +4374,7 @@ void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
__ add(hash, Operand(scratch)); __ add(hash, Operand(scratch));
// if (hash == 0) hash = 27; // if (hash == 0) hash = 27;
Label hash_not_zero; NearLabel hash_not_zero;
__ test(hash, Operand(hash)); __ test(hash, Operand(hash));
__ j(not_zero, &hash_not_zero); __ j(not_zero, &hash_not_zero);
__ mov(hash, Immediate(27)); __ mov(hash, Immediate(27));
...@@ -4543,7 +4543,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, ...@@ -4543,7 +4543,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
__ IncrementCounter(&Counters::string_compare_native, 1); __ IncrementCounter(&Counters::string_compare_native, 1);
// Find minimum length. // Find minimum length.
Label left_shorter; NearLabel left_shorter;
__ mov(scratch1, FieldOperand(left, String::kLengthOffset)); __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
__ mov(scratch3, scratch1); __ mov(scratch3, scratch1);
__ sub(scratch3, FieldOperand(right, String::kLengthOffset)); __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
...@@ -4579,7 +4579,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, ...@@ -4579,7 +4579,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
{ {
// Compare loop. // Compare loop.
Label loop; NearLabel loop;
__ bind(&loop); __ bind(&loop);
// Compare characters. // Compare characters.
__ mov_b(scratch2, Operand(left, index, times_1, 0)); __ mov_b(scratch2, Operand(left, index, times_1, 0));
...@@ -4625,7 +4625,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) { ...@@ -4625,7 +4625,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
__ mov(edx, Operand(esp, 2 * kPointerSize)); // left __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
__ mov(eax, Operand(esp, 1 * kPointerSize)); // right __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
Label not_same; NearLabel not_same;
__ cmp(edx, Operand(eax)); __ cmp(edx, Operand(eax));
__ j(not_equal, &not_same); __ j(not_equal, &not_same);
STATIC_ASSERT(EQUAL == 0); STATIC_ASSERT(EQUAL == 0);
......
...@@ -162,7 +162,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) { ...@@ -162,7 +162,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
} }
{ Comment cmnt(masm_, "[ Stack check"); { Comment cmnt(masm_, "[ Stack check");
Label ok; NearLabel ok;
ExternalReference stack_limit = ExternalReference stack_limit =
ExternalReference::address_of_stack_limit(); ExternalReference::address_of_stack_limit();
__ cmp(esp, Operand::StaticVariable(stack_limit)); __ cmp(esp, Operand::StaticVariable(stack_limit));
...@@ -403,7 +403,7 @@ void FullCodeGenerator::Apply(Expression::Context context, ...@@ -403,7 +403,7 @@ void FullCodeGenerator::Apply(Expression::Context context,
break; break;
case Expression::kValue: { case Expression::kValue: {
Label done; NearLabel done;
switch (location_) { switch (location_) {
case kAccumulator: case kAccumulator:
__ bind(materialize_true); __ bind(materialize_true);
...@@ -686,7 +686,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { ...@@ -686,7 +686,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
__ mov(edx, Operand(esp, 0)); // Switch value. __ mov(edx, Operand(esp, 0)); // Switch value.
bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
if (inline_smi_code) { if (inline_smi_code) {
Label slow_case; NearLabel slow_case;
__ mov(ecx, edx); __ mov(ecx, edx);
__ or_(ecx, Operand(eax)); __ or_(ecx, Operand(eax));
__ test(ecx, Immediate(kSmiTagMask)); __ test(ecx, Immediate(kSmiTagMask));
...@@ -749,7 +749,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { ...@@ -749,7 +749,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ j(equal, &exit); __ j(equal, &exit);
// Convert the object to a JS object. // Convert the object to a JS object.
Label convert, done_convert; NearLabel convert, done_convert;
__ test(eax, Immediate(kSmiTagMask)); __ test(eax, Immediate(kSmiTagMask));
__ j(zero, &convert); __ j(zero, &convert);
__ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx); __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
...@@ -790,7 +790,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { ...@@ -790,7 +790,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ j(zero, &call_runtime); __ j(zero, &call_runtime);
// For all objects but the receiver, check that the cache is empty. // For all objects but the receiver, check that the cache is empty.
Label check_prototype; NearLabel check_prototype;
__ cmp(ecx, Operand(eax)); __ cmp(ecx, Operand(eax));
__ j(equal, &check_prototype); __ j(equal, &check_prototype);
__ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset)); __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
...@@ -805,7 +805,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { ...@@ -805,7 +805,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// The enum cache is valid. Load the map of the object being // The enum cache is valid. Load the map of the object being
// iterated over and use the cache for the iteration. // iterated over and use the cache for the iteration.
Label use_cache; NearLabel use_cache;
__ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
__ jmp(&use_cache); __ jmp(&use_cache);
...@@ -817,7 +817,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { ...@@ -817,7 +817,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// If we got a map from the runtime call, we can do a fast // If we got a map from the runtime call, we can do a fast
// modification check. Otherwise, we got a fixed array, and we have // modification check. Otherwise, we got a fixed array, and we have
// to do a slow check. // to do a slow check.
Label fixed_array; NearLabel fixed_array;
__ cmp(FieldOperand(eax, HeapObject::kMapOffset), Factory::meta_map()); __ cmp(FieldOperand(eax, HeapObject::kMapOffset), Factory::meta_map());
__ j(not_equal, &fixed_array); __ j(not_equal, &fixed_array);
...@@ -859,7 +859,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { ...@@ -859,7 +859,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// Check if the expected map still matches that of the enumerable. // Check if the expected map still matches that of the enumerable.
// If not, we have to filter the key. // If not, we have to filter the key.
Label update_each; NearLabel update_each;
__ mov(ecx, Operand(esp, 4 * kPointerSize)); __ mov(ecx, Operand(esp, 4 * kPointerSize));
__ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset)); __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
__ j(equal, &update_each); __ j(equal, &update_each);
...@@ -882,7 +882,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { ...@@ -882,7 +882,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
EmitAssignment(stmt->each()); EmitAssignment(stmt->each());
// Generate code for the body of the loop. // Generate code for the body of the loop.
Label stack_limit_hit, stack_check_done; Label stack_limit_hit;
NearLabel stack_check_done;
Visit(stmt->body()); Visit(stmt->body());
__ StackLimitCheck(&stack_limit_hit); __ StackLimitCheck(&stack_limit_hit);
...@@ -964,7 +965,7 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions( ...@@ -964,7 +965,7 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
if (s != NULL && s->is_eval_scope()) { if (s != NULL && s->is_eval_scope()) {
// Loop up the context chain. There is no frame effect so it is // Loop up the context chain. There is no frame effect so it is
// safe to use raw labels here. // safe to use raw labels here.
Label next, fast; NearLabel next, fast;
if (!context.is(temp)) { if (!context.is(temp)) {
__ mov(temp, context); __ mov(temp, context);
} }
...@@ -1124,7 +1125,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var, ...@@ -1124,7 +1125,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var,
if (var->mode() == Variable::CONST) { if (var->mode() == Variable::CONST) {
// Constants may be the hole value if they have not been initialized. // Constants may be the hole value if they have not been initialized.
// Unhole them. // Unhole them.
Label done; NearLabel done;
MemOperand slot_operand = EmitSlotSearch(slot, eax); MemOperand slot_operand = EmitSlotSearch(slot, eax);
__ mov(eax, slot_operand); __ mov(eax, slot_operand);
__ cmp(eax, Factory::the_hole_value()); __ cmp(eax, Factory::the_hole_value());
...@@ -1173,7 +1174,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var, ...@@ -1173,7 +1174,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var,
void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
Comment cmnt(masm_, "[ RegExpLiteral"); Comment cmnt(masm_, "[ RegExpLiteral");
Label materialized; NearLabel materialized;
// Registers will be used as follows: // Registers will be used as follows:
// edi = JS function. // edi = JS function.
// ecx = literals array. // ecx = literals array.
...@@ -1494,7 +1495,8 @@ void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr, ...@@ -1494,7 +1495,8 @@ void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr,
OverwriteMode mode, OverwriteMode mode,
bool left_is_constant_smi, bool left_is_constant_smi,
Smi* value) { Smi* value) {
Label call_stub, done; NearLabel call_stub;
Label done;
__ add(Operand(eax), Immediate(value)); __ add(Operand(eax), Immediate(value));
__ j(overflow, &call_stub); __ j(overflow, &call_stub);
__ test(eax, Immediate(kSmiTagMask)); __ test(eax, Immediate(kSmiTagMask));
...@@ -2719,7 +2721,7 @@ void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) { ...@@ -2719,7 +2721,7 @@ void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) {
VisitForValue(args->at(0), kAccumulator); // Load the object. VisitForValue(args->at(0), kAccumulator); // Load the object.
Label done; NearLabel done;
// If the object is a smi return the object. // If the object is a smi return the object.
__ test(eax, Immediate(kSmiTagMask)); __ test(eax, Immediate(kSmiTagMask));
__ j(zero, &done); __ j(zero, &done);
...@@ -2750,7 +2752,7 @@ void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) { ...@@ -2750,7 +2752,7 @@ void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
VisitForValue(args->at(1), kAccumulator); // Load the value. VisitForValue(args->at(1), kAccumulator); // Load the value.
__ pop(ebx); // eax = value. ebx = object. __ pop(ebx); // eax = value. ebx = object.
Label done; NearLabel done;
// If the object is a smi, return the value. // If the object is a smi, return the value.
__ test(ebx, Immediate(kSmiTagMask)); __ test(ebx, Immediate(kSmiTagMask));
__ j(zero, &done); __ j(zero, &done);
...@@ -3279,7 +3281,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { ...@@ -3279,7 +3281,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
Label done; Label done;
bool inline_smi_case = ShouldInlineSmiCase(expr->op()); bool inline_smi_case = ShouldInlineSmiCase(expr->op());
if (inline_smi_case) { if (inline_smi_case) {
Label call_stub; NearLabel call_stub;
__ test(eax, Immediate(kSmiTagMask)); __ test(eax, Immediate(kSmiTagMask));
__ j(not_zero, &call_stub); __ j(not_zero, &call_stub);
__ lea(eax, Operand(eax, kSmiTagMask)); __ lea(eax, Operand(eax, kSmiTagMask));
...@@ -3357,7 +3359,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { ...@@ -3357,7 +3359,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
} }
// Call ToNumber only if operand is not a smi. // Call ToNumber only if operand is not a smi.
Label no_conversion; NearLabel no_conversion;
if (ShouldInlineSmiCase(expr->op())) { if (ShouldInlineSmiCase(expr->op())) {
__ test(eax, Immediate(kSmiTagMask)); __ test(eax, Immediate(kSmiTagMask));
__ j(zero, &no_conversion); __ j(zero, &no_conversion);
...@@ -3395,7 +3397,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { ...@@ -3395,7 +3397,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
} }
// Inline smi case if we are in a loop. // Inline smi case if we are in a loop.
Label stub_call, done; NearLabel stub_call;
Label done;
if (ShouldInlineSmiCase(expr->op())) { if (ShouldInlineSmiCase(expr->op())) {
if (expr->op() == Token::INC) { if (expr->op() == Token::INC) {
__ add(Operand(eax), Immediate(Smi::FromInt(1))); __ add(Operand(eax), Immediate(Smi::FromInt(1)));
...@@ -3684,7 +3687,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { ...@@ -3684,7 +3687,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
bool inline_smi_code = ShouldInlineSmiCase(op); bool inline_smi_code = ShouldInlineSmiCase(op);
if (inline_smi_code) { if (inline_smi_code) {
Label slow_case; NearLabel slow_case;
__ mov(ecx, Operand(edx)); __ mov(ecx, Operand(edx));
__ or_(ecx, Operand(eax)); __ or_(ecx, Operand(eax));
__ test(ecx, Immediate(kSmiTagMask)); __ test(ecx, Immediate(kSmiTagMask));
......
...@@ -418,6 +418,20 @@ void Assembler::bind(Label* L) { ...@@ -418,6 +418,20 @@ void Assembler::bind(Label* L) {
} }
void Assembler::bind(NearLabel* L) {
ASSERT(!L->is_bound());
last_pc_ = NULL;
while (L->unresolved_branches_ > 0) {
int branch_pos = L->unresolved_positions_[L->unresolved_branches_ - 1];
int disp = pc_offset() - branch_pos;
ASSERT(is_int8(disp));
set_byte_at(branch_pos - sizeof(int8_t), disp);
L->unresolved_branches_--;
}
L->bind_to(pc_offset());
}
void Assembler::GrowBuffer() { void Assembler::GrowBuffer() {
ASSERT(buffer_overflow()); ASSERT(buffer_overflow());
if (!own_buffer_) FATAL("external code buffer is too small"); if (!own_buffer_) FATAL("external code buffer is too small");
...@@ -1227,6 +1241,27 @@ void Assembler::j(Condition cc, ...@@ -1227,6 +1241,27 @@ void Assembler::j(Condition cc,
} }
void Assembler::j(Condition cc, NearLabel* L, Hint hint) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
ASSERT(0 <= cc && cc < 16);
if (FLAG_emit_branch_hints && hint != no_hint) emit(hint);
if (L->is_bound()) {
const int short_size = 2;
int offs = L->pos() - pc_offset();
ASSERT(offs <= 0);
ASSERT(is_int8(offs - short_size));
// 0111 tttn #8-bit disp
emit(0x70 | cc);
emit((offs - short_size) & 0xFF);
} else {
emit(0x70 | cc);
emit(0x00); // The displacement will be resolved later.
L->link_to(pc_offset());
}
}
void Assembler::jmp(Label* L) { void Assembler::jmp(Label* L) {
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
last_pc_ = pc_; last_pc_ = pc_;
...@@ -1269,6 +1304,25 @@ void Assembler::jmp(Handle<Code> target, RelocInfo::Mode rmode) { ...@@ -1269,6 +1304,25 @@ void Assembler::jmp(Handle<Code> target, RelocInfo::Mode rmode) {
} }
void Assembler::jmp(NearLabel* L) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
if (L->is_bound()) {
const int short_size = sizeof(int8_t);
int offs = L->pos() - pc_offset();
ASSERT(offs <= 0);
ASSERT(is_int8(offs - short_size));
// 1110 1011 #8-bit disp.
emit(0xEB);
emit((offs - short_size) & 0xFF);
} else {
emit(0xEB);
emit(0x00); // The displacement will be resolved later.
L->link_to(pc_offset());
}
}
void Assembler::jmp(Register target) { void Assembler::jmp(Register target) {
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
last_pc_ = pc_; last_pc_ = pc_;
......
...@@ -1005,6 +1005,7 @@ class Assembler : public Malloced { ...@@ -1005,6 +1005,7 @@ class Assembler : public Malloced {
// but it may be bound only once. // but it may be bound only once.
void bind(Label* L); // binds an unbound label L to the current code position void bind(Label* L); // binds an unbound label L to the current code position
void bind(NearLabel* L);
// Calls // Calls
// Call near relative 32-bit displacement, relative to next instruction. // Call near relative 32-bit displacement, relative to next instruction.
...@@ -1029,10 +1030,16 @@ class Assembler : public Malloced { ...@@ -1029,10 +1030,16 @@ class Assembler : public Malloced {
// Jump near absolute indirect (m64) // Jump near absolute indirect (m64)
void jmp(const Operand& src); void jmp(const Operand& src);
// Short jump
void jmp(NearLabel* L);
// Conditional jumps // Conditional jumps
void j(Condition cc, Label* L); void j(Condition cc, Label* L);
void j(Condition cc, Handle<Code> target, RelocInfo::Mode rmode); void j(Condition cc, Handle<Code> target, RelocInfo::Mode rmode);
// Conditional short jump
void j(Condition cc, NearLabel* L, Hint hint = no_hint);
// Floating-point operations // Floating-point operations
void fld(int i); void fld(int i);
...@@ -1196,6 +1203,7 @@ class Assembler : public Malloced { ...@@ -1196,6 +1203,7 @@ class Assembler : public Malloced {
private: private:
byte* addr_at(int pos) { return buffer_ + pos; } byte* addr_at(int pos) { return buffer_ + pos; }
byte byte_at(int pos) { return buffer_[pos]; } byte byte_at(int pos) { return buffer_[pos]; }
void set_byte_at(int pos, byte value) { buffer_[pos] = value; }
uint32_t long_at(int pos) { uint32_t long_at(int pos) {
return *reinterpret_cast<uint32_t*>(addr_at(pos)); return *reinterpret_cast<uint32_t*>(addr_at(pos));
} }
...@@ -1371,7 +1379,6 @@ class Assembler : public Malloced { ...@@ -1371,7 +1379,6 @@ class Assembler : public Malloced {
// labels // labels
// void print(Label* L); // void print(Label* L);
void bind_to(Label* L, int pos); void bind_to(Label* L, int pos);
void link_to(Label* L, Label* appendix);
// record reloc info for current pc_ // record reloc info for current pc_
void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0); void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0);
......
...@@ -203,7 +203,7 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { ...@@ -203,7 +203,7 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
void ToBooleanStub::Generate(MacroAssembler* masm) { void ToBooleanStub::Generate(MacroAssembler* masm) {
Label false_result, true_result, not_string; NearLabel false_result, true_result, not_string;
__ movq(rax, Operand(rsp, 1 * kPointerSize)); __ movq(rax, Operand(rsp, 1 * kPointerSize));
// 'null' => false. // 'null' => false.
...@@ -989,7 +989,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) { ...@@ -989,7 +989,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
Label runtime_call; Label runtime_call;
Label runtime_call_clear_stack; Label runtime_call_clear_stack;
Label input_not_smi; Label input_not_smi;
Label loaded; NearLabel loaded;
// Test that rax is a number. // Test that rax is a number.
__ movq(rax, Operand(rsp, kPointerSize)); __ movq(rax, Operand(rsp, kPointerSize));
__ JumpIfNotSmi(rax, &input_not_smi); __ JumpIfNotSmi(rax, &input_not_smi);
...@@ -1069,7 +1069,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) { ...@@ -1069,7 +1069,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
__ addl(rcx, rcx); __ addl(rcx, rcx);
__ lea(rcx, Operand(rax, rcx, times_8, 0)); __ lea(rcx, Operand(rax, rcx, times_8, 0));
// Check if cache matches: Double value is stored in uint32_t[2] array. // Check if cache matches: Double value is stored in uint32_t[2] array.
Label cache_miss; NearLabel cache_miss;
__ cmpq(rbx, Operand(rcx, 0)); __ cmpq(rbx, Operand(rcx, 0));
__ j(not_equal, &cache_miss); __ j(not_equal, &cache_miss);
// Cache hit! // Cache hit!
...@@ -1160,7 +1160,7 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm, ...@@ -1160,7 +1160,7 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm,
// Compute st(0) % st(1) // Compute st(0) % st(1)
{ {
Label partial_remainder_loop; NearLabel partial_remainder_loop;
__ bind(&partial_remainder_loop); __ bind(&partial_remainder_loop);
__ fprem1(); __ fprem1();
__ fwait(); __ fwait();
...@@ -1202,7 +1202,7 @@ void IntegerConvert(MacroAssembler* masm, ...@@ -1202,7 +1202,7 @@ void IntegerConvert(MacroAssembler* masm,
// cvttsd2si (32-bit version) directly. // cvttsd2si (32-bit version) directly.
Register double_exponent = rbx; Register double_exponent = rbx;
Register double_value = rdi; Register double_value = rdi;
Label done, exponent_63_plus; NearLabel done, exponent_63_plus;
// Get double and extract exponent. // Get double and extract exponent.
__ movq(double_value, FieldOperand(source, HeapNumber::kValueOffset)); __ movq(double_value, FieldOperand(source, HeapNumber::kValueOffset));
// Clear result preemptively, in case we need to return zero. // Clear result preemptively, in case we need to return zero.
...@@ -1771,7 +1771,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { ...@@ -1771,7 +1771,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// rcx: RegExp data (FixedArray) // rcx: RegExp data (FixedArray)
// Check the representation and encoding of the subject string. // Check the representation and encoding of the subject string.
Label seq_ascii_string, seq_two_byte_string, check_code; NearLabel seq_ascii_string, seq_two_byte_string, check_code;
__ movq(rax, Operand(rsp, kSubjectOffset)); __ movq(rax, Operand(rsp, kSubjectOffset));
__ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
__ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
...@@ -1896,7 +1896,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { ...@@ -1896,7 +1896,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Argument 4: End of string data // Argument 4: End of string data
// Argument 3: Start of string data // Argument 3: Start of string data
Label setup_two_byte, setup_rest; NearLabel setup_two_byte, setup_rest;
__ testb(rdi, rdi); __ testb(rdi, rdi);
__ j(zero, &setup_two_byte); __ j(zero, &setup_two_byte);
__ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset)); __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
...@@ -1923,10 +1923,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { ...@@ -1923,10 +1923,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ pop(rsi); __ pop(rsi);
// Check the result. // Check the result.
Label success; NearLabel success;
__ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS)); __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS));
__ j(equal, &success); __ j(equal, &success);
Label failure; NearLabel failure;
__ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE)); __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
__ j(equal, &failure); __ j(equal, &failure);
__ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION)); __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
...@@ -1981,7 +1981,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { ...@@ -1981,7 +1981,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// rbx: last_match_info backing store (FixedArray) // rbx: last_match_info backing store (FixedArray)
// rcx: offsets vector // rcx: offsets vector
// rdx: number of capture registers // rdx: number of capture registers
Label next_capture, done; NearLabel next_capture, done;
// Capture register counter starts from number of capture registers and // Capture register counter starts from number of capture registers and
// counts down until wraping after zero. // counts down until wraping after zero.
__ bind(&next_capture); __ bind(&next_capture);
...@@ -2155,14 +2155,14 @@ void CompareStub::Generate(MacroAssembler* masm) { ...@@ -2155,14 +2155,14 @@ void CompareStub::Generate(MacroAssembler* masm) {
// Two identical objects are equal unless they are both NaN or undefined. // Two identical objects are equal unless they are both NaN or undefined.
{ {
Label not_identical; NearLabel not_identical;
__ cmpq(rax, rdx); __ cmpq(rax, rdx);
__ j(not_equal, &not_identical); __ j(not_equal, &not_identical);
if (cc_ != equal) { if (cc_ != equal) {
// Check for undefined. undefined OP undefined is false even though // Check for undefined. undefined OP undefined is false even though
// undefined == undefined. // undefined == undefined.
Label check_for_nan; NearLabel check_for_nan;
__ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
__ j(not_equal, &check_for_nan); __ j(not_equal, &check_for_nan);
__ Set(rax, NegativeComparisonResult(cc_)); __ Set(rax, NegativeComparisonResult(cc_));
...@@ -2180,7 +2180,7 @@ void CompareStub::Generate(MacroAssembler* masm) { ...@@ -2180,7 +2180,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
__ Set(rax, EQUAL); __ Set(rax, EQUAL);
__ ret(0); __ ret(0);
} else { } else {
Label heap_number; NearLabel heap_number;
// If it's not a heap number, then return equal for (in)equality operator. // If it's not a heap number, then return equal for (in)equality operator.
__ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
Factory::heap_number_map()); Factory::heap_number_map());
...@@ -2244,7 +2244,7 @@ void CompareStub::Generate(MacroAssembler* masm) { ...@@ -2244,7 +2244,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
// If the first object is a JS object, we have done pointer comparison. // If the first object is a JS object, we have done pointer comparison.
STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
Label first_non_object; NearLabel first_non_object;
__ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
__ j(below, &first_non_object); __ j(below, &first_non_object);
// Return non-zero (eax (not rax) is not zero) // Return non-zero (eax (not rax) is not zero)
...@@ -2273,7 +2273,7 @@ void CompareStub::Generate(MacroAssembler* masm) { ...@@ -2273,7 +2273,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
// Generate the number comparison code. // Generate the number comparison code.
if (include_number_compare_) { if (include_number_compare_) {
Label non_number_comparison; Label non_number_comparison;
Label unordered; NearLabel unordered;
FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison); FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
__ xorl(rax, rax); __ xorl(rax, rax);
__ xorl(rcx, rcx); __ xorl(rcx, rcx);
...@@ -2337,7 +2337,7 @@ void CompareStub::Generate(MacroAssembler* masm) { ...@@ -2337,7 +2337,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
// Not strict equality. Objects are unequal if // Not strict equality. Objects are unequal if
// they are both JSObjects and not undetectable, // they are both JSObjects and not undetectable,
// and their pointers are different. // and their pointers are different.
Label not_both_objects, return_unequal; NearLabel not_both_objects, return_unequal;
// At most one is a smi, so we can test for smi by adding the two. // At most one is a smi, so we can test for smi by adding the two.
// A smi plus a heap object has the low bit set, a heap object plus // A smi plus a heap object has the low bit set, a heap object plus
// a heap object has the low bit clear. // a heap object has the low bit clear.
...@@ -2495,7 +2495,7 @@ void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { ...@@ -2495,7 +2495,7 @@ void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
// Before returning we restore the context from the frame pointer if not NULL. // Before returning we restore the context from the frame pointer if not NULL.
// The frame pointer is NULL in the exception handler of a JS entry frame. // The frame pointer is NULL in the exception handler of a JS entry frame.
__ xor_(rsi, rsi); // tentatively set context pointer to NULL __ xor_(rsi, rsi); // tentatively set context pointer to NULL
Label skip; NearLabel skip;
__ cmpq(rbp, Immediate(0)); __ cmpq(rbp, Immediate(0));
__ j(equal, &skip); __ j(equal, &skip);
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
...@@ -2655,7 +2655,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, ...@@ -2655,7 +2655,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
// Handling of failure. // Handling of failure.
__ bind(&failure_returned); __ bind(&failure_returned);
Label retry; NearLabel retry;
// If the returned exception is RETRY_AFTER_GC continue at retry label // If the returned exception is RETRY_AFTER_GC continue at retry label
STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
__ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
...@@ -2695,7 +2695,7 @@ void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm, ...@@ -2695,7 +2695,7 @@ void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
__ movq(rsp, Operand(kScratchRegister, 0)); __ movq(rsp, Operand(kScratchRegister, 0));
// Unwind the handlers until the ENTRY handler is found. // Unwind the handlers until the ENTRY handler is found.
Label loop, done; NearLabel loop, done;
__ bind(&loop); __ bind(&loop);
// Load the type of the current stack handler. // Load the type of the current stack handler.
const int kStateOffset = StackHandlerConstants::kStateOffset; const int kStateOffset = StackHandlerConstants::kStateOffset;
...@@ -2965,7 +2965,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { ...@@ -2965,7 +2965,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
// rdx is function, rax is map. // rdx is function, rax is map.
// Look up the function and the map in the instanceof cache. // Look up the function and the map in the instanceof cache.
Label miss; NearLabel miss;
__ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
__ j(not_equal, &miss); __ j(not_equal, &miss);
__ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
...@@ -2993,7 +2993,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { ...@@ -2993,7 +2993,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset)); __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
// Loop through the prototype chain looking for the function prototype. // Loop through the prototype chain looking for the function prototype.
Label loop, is_instance, is_not_instance; NearLabel loop, is_instance, is_not_instance;
__ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex); __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
__ bind(&loop); __ bind(&loop);
__ cmpq(rcx, rbx); __ cmpq(rcx, rbx);
...@@ -3305,7 +3305,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { ...@@ -3305,7 +3305,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// rax: first string // rax: first string
// rdx: second string // rdx: second string
// Check if either of the strings are empty. In that case return the other. // Check if either of the strings are empty. In that case return the other.
Label second_not_zero_length, both_not_zero_length; NearLabel second_not_zero_length, both_not_zero_length;
__ movq(rcx, FieldOperand(rdx, String::kLengthOffset)); __ movq(rcx, FieldOperand(rdx, String::kLengthOffset));
__ SmiTest(rcx); __ SmiTest(rcx);
__ j(not_zero, &second_not_zero_length); __ j(not_zero, &second_not_zero_length);
...@@ -3561,7 +3561,7 @@ void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm, ...@@ -3561,7 +3561,7 @@ void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
ASSERT(count.is(rcx)); // rep movs count ASSERT(count.is(rcx)); // rep movs count
// Nothing to do for zero characters. // Nothing to do for zero characters.
Label done; NearLabel done;
__ testl(count, count); __ testl(count, count);
__ j(zero, &done); __ j(zero, &done);
...@@ -3572,7 +3572,7 @@ void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm, ...@@ -3572,7 +3572,7 @@ void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
} }
// Don't enter the rep movs if there are less than 4 bytes to copy. // Don't enter the rep movs if there are less than 4 bytes to copy.
Label last_bytes; NearLabel last_bytes;
__ testl(count, Immediate(~7)); __ testl(count, Immediate(~7));
__ j(zero, &last_bytes); __ j(zero, &last_bytes);
...@@ -3616,7 +3616,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm, ...@@ -3616,7 +3616,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
// Make sure that both characters are not digits as such strings has a // Make sure that both characters are not digits as such strings has a
// different hash algorithm. Don't try to look for these in the symbol table. // different hash algorithm. Don't try to look for these in the symbol table.
Label not_array_index; NearLabel not_array_index;
__ leal(scratch, Operand(c1, -'0')); __ leal(scratch, Operand(c1, -'0'));
__ cmpl(scratch, Immediate(static_cast<int>('9' - '0'))); __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
__ j(above, &not_array_index); __ j(above, &not_array_index);
...@@ -3940,7 +3940,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, ...@@ -3940,7 +3940,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
NULL); NULL);
// Register scratch4 now holds left.length - right.length. // Register scratch4 now holds left.length - right.length.
const Register length_difference = scratch4; const Register length_difference = scratch4;
Label left_shorter; NearLabel left_shorter;
__ j(less, &left_shorter); __ j(less, &left_shorter);
// The right string isn't longer that the left one. // The right string isn't longer that the left one.
// Get the right string's length by subtracting the (non-negative) difference // Get the right string's length by subtracting the (non-negative) difference
...@@ -3950,7 +3950,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, ...@@ -3950,7 +3950,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
// Register scratch1 now holds Min(left.length, right.length). // Register scratch1 now holds Min(left.length, right.length).
const Register min_length = scratch1; const Register min_length = scratch1;
Label compare_lengths; NearLabel compare_lengths;
// If min-length is zero, go directly to comparing lengths. // If min-length is zero, go directly to comparing lengths.
__ SmiTest(min_length); __ SmiTest(min_length);
__ j(zero, &compare_lengths); __ j(zero, &compare_lengths);
...@@ -3958,7 +3958,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, ...@@ -3958,7 +3958,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
__ SmiToInteger32(min_length, min_length); __ SmiToInteger32(min_length, min_length);
// Registers scratch2 and scratch3 are free. // Registers scratch2 and scratch3 are free.
Label result_not_equal; NearLabel result_not_equal;
Label loop; Label loop;
{ {
// Check characters 0 .. min_length - 1 in a loop. // Check characters 0 .. min_length - 1 in a loop.
...@@ -3994,7 +3994,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, ...@@ -3994,7 +3994,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
__ Move(rax, Smi::FromInt(EQUAL)); __ Move(rax, Smi::FromInt(EQUAL));
__ ret(0); __ ret(0);
Label result_greater; NearLabel result_greater;
__ bind(&result_not_equal); __ bind(&result_not_equal);
// Unequal comparison of left to right, either character or length. // Unequal comparison of left to right, either character or length.
__ j(greater, &result_greater); __ j(greater, &result_greater);
...@@ -4022,7 +4022,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) { ...@@ -4022,7 +4022,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
__ movq(rax, Operand(rsp, 1 * kPointerSize)); // right __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right
// Check for identity. // Check for identity.
Label not_same; NearLabel not_same;
__ cmpq(rdx, rax); __ cmpq(rdx, rax);
__ j(not_equal, &not_same); __ j(not_equal, &not_same);
__ Move(rax, Smi::FromInt(EQUAL)); __ Move(rax, Smi::FromInt(EQUAL));
......
...@@ -165,7 +165,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) { ...@@ -165,7 +165,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
} }
{ Comment cmnt(masm_, "[ Stack check"); { Comment cmnt(masm_, "[ Stack check");
Label ok; NearLabel ok;
__ CompareRoot(rsp, Heap::kStackLimitRootIndex); __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(above_equal, &ok); __ j(above_equal, &ok);
StackCheckStub stub; StackCheckStub stub;
...@@ -396,7 +396,7 @@ void FullCodeGenerator::Apply(Expression::Context context, ...@@ -396,7 +396,7 @@ void FullCodeGenerator::Apply(Expression::Context context,
break; break;
case Expression::kValue: { case Expression::kValue: {
Label done; NearLabel done;
switch (location_) { switch (location_) {
case kAccumulator: case kAccumulator:
__ bind(materialize_true); __ bind(materialize_true);
...@@ -762,7 +762,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { ...@@ -762,7 +762,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// If we got a map from the runtime call, we can do a fast // If we got a map from the runtime call, we can do a fast
// modification check. Otherwise, we got a fixed array, and we have // modification check. Otherwise, we got a fixed array, and we have
// to do a slow check. // to do a slow check.
Label fixed_array; NearLabel fixed_array;
__ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
Heap::kMetaMapRootIndex); Heap::kMetaMapRootIndex);
__ j(not_equal, &fixed_array); __ j(not_equal, &fixed_array);
...@@ -808,7 +808,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { ...@@ -808,7 +808,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// Check if the expected map still matches that of the enumerable. // Check if the expected map still matches that of the enumerable.
// If not, we have to filter the key. // If not, we have to filter the key.
Label update_each; NearLabel update_each;
__ movq(rcx, Operand(rsp, 4 * kPointerSize)); __ movq(rcx, Operand(rsp, 4 * kPointerSize));
__ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset)); __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
__ j(equal, &update_each); __ j(equal, &update_each);
...@@ -913,7 +913,7 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions( ...@@ -913,7 +913,7 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
if (s != NULL && s->is_eval_scope()) { if (s != NULL && s->is_eval_scope()) {
// Loop up the context chain. There is no frame effect so it is // Loop up the context chain. There is no frame effect so it is
// safe to use raw labels here. // safe to use raw labels here.
Label next, fast; NearLabel next, fast;
if (!context.is(temp)) { if (!context.is(temp)) {
__ movq(temp, context); __ movq(temp, context);
} }
...@@ -1073,7 +1073,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var, ...@@ -1073,7 +1073,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var,
if (var->mode() == Variable::CONST) { if (var->mode() == Variable::CONST) {
// Constants may be the hole value if they have not been initialized. // Constants may be the hole value if they have not been initialized.
// Unhole them. // Unhole them.
Label done; NearLabel done;
MemOperand slot_operand = EmitSlotSearch(slot, rax); MemOperand slot_operand = EmitSlotSearch(slot, rax);
__ movq(rax, slot_operand); __ movq(rax, slot_operand);
__ CompareRoot(rax, Heap::kTheHoleValueRootIndex); __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
...@@ -1892,7 +1892,7 @@ void FullCodeGenerator::VisitCall(Call* expr) { ...@@ -1892,7 +1892,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// function and receiver and have the slow path jump around this // function and receiver and have the slow path jump around this
// code. // code.
if (done.is_linked()) { if (done.is_linked()) {
Label call; NearLabel call;
__ jmp(&call); __ jmp(&call);
__ bind(&done); __ bind(&done);
// Push function. // Push function.
...@@ -2254,7 +2254,7 @@ void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) { ...@@ -2254,7 +2254,7 @@ void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) {
void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) { void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
ASSERT(args->length() == 0); ASSERT(args->length() == 0);
Label exit; NearLabel exit;
// Get the number of formal parameters. // Get the number of formal parameters.
__ Move(rax, Smi::FromInt(scope()->num_parameters())); __ Move(rax, Smi::FromInt(scope()->num_parameters()));
...@@ -2713,7 +2713,7 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) { ...@@ -2713,7 +2713,7 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
__ movq(cache, __ movq(cache,
FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
Label done, not_found; NearLabel done, not_found;
// tmp now holds finger offset as a smi. // tmp now holds finger offset as a smi.
ASSERT(kSmiTag == 0 && kSmiTagSize == 1); ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
__ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset)); __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
...@@ -2752,7 +2752,7 @@ void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) { ...@@ -2752,7 +2752,7 @@ void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
VisitForValue(args->at(1), kAccumulator); VisitForValue(args->at(1), kAccumulator);
__ pop(left); __ pop(left);
Label done, fail, ok; NearLabel done, fail, ok;
__ cmpq(left, right); __ cmpq(left, right);
__ j(equal, &ok); __ j(equal, &ok);
// Fail if either is a non-HeapObject. // Fail if either is a non-HeapObject.
...@@ -2944,7 +2944,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { ...@@ -2944,7 +2944,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
case Token::ADD: { case Token::ADD: {
Comment cmt(masm_, "[ UnaryOperation (ADD)"); Comment cmt(masm_, "[ UnaryOperation (ADD)");
VisitForValue(expr->expression(), kAccumulator); VisitForValue(expr->expression(), kAccumulator);
Label no_conversion; NearLabel no_conversion;
Condition is_smi = masm_->CheckSmi(result_register()); Condition is_smi = masm_->CheckSmi(result_register());
__ j(is_smi, &no_conversion); __ j(is_smi, &no_conversion);
__ push(result_register()); __ push(result_register());
...@@ -3051,7 +3051,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { ...@@ -3051,7 +3051,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
} }
// Call ToNumber only if operand is not a smi. // Call ToNumber only if operand is not a smi.
Label no_conversion; NearLabel no_conversion;
Condition is_smi; Condition is_smi;
is_smi = masm_->CheckSmi(rax); is_smi = masm_->CheckSmi(rax);
__ j(is_smi, &no_conversion); __ j(is_smi, &no_conversion);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment