Implement type recording for ToBoolean on x64.

Handle oddballs on ia32 via root indices, similar to other platforms. Added a
special case for Smi types on ia32 to make lithium code generation on both Intel
platforms more similar.
Review URL: http://codereview.chromium.org/7544012

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@8767 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 15c979ee
......@@ -955,7 +955,7 @@ class ToBooleanStub: public CodeStub {
void CheckOddball(MacroAssembler* masm,
Type type,
Handle<Object> value,
Heap::RootListIndex value,
bool result,
Label* patch);
void GenerateTypeTransition(MacroAssembler* masm);
......
......@@ -249,14 +249,14 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
}
// undefined -> false
CheckOddball(masm, UNDEFINED, factory->undefined_value(), false, &patch);
CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false, &patch);
// Boolean -> its value
CheckOddball(masm, BOOLEAN, factory->false_value(), false, &patch);
CheckOddball(masm, BOOLEAN, factory->true_value(), true, &patch);
CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false, &patch);
CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true, &patch);
// 'null' -> false.
CheckOddball(masm, NULL_TYPE, factory->null_value(), false, &patch);
// 'null' -> false.!!!
CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false, &patch);
if (types_.Contains(SMI)) {
// Smis: 0 -> false, all other -> true
......@@ -351,14 +351,14 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
void ToBooleanStub::CheckOddball(MacroAssembler* masm,
Type type,
Handle<Object> value,
Heap::RootListIndex value,
bool result,
Label* patch) {
const Register argument = eax;
if (types_.Contains(type)) {
// If we see an expected oddball, return its ToBoolean value tos_.
Label different_value;
__ cmp(argument, value);
__ CompareRoot(argument, value);
__ j(not_equal, &different_value, Label::kNear);
__ Set(tos_, Immediate(result ? 1 : 0));
__ ret(1 * kPointerSize);
......@@ -366,7 +366,7 @@ void ToBooleanStub::CheckOddball(MacroAssembler* masm,
} else if (types_.Contains(INTERNAL_OBJECT)) {
// If we see an unexpected oddball and handle internal objects, we must
// patch because the code for internal objects doesn't handle it explictly.
__ cmp(argument, value);
__ CompareRoot(argument, value);
__ j(equal, patch);
}
}
......
......@@ -1393,9 +1393,13 @@ void LCodeGen::DoBranch(LBranch* instr) {
} else {
ASSERT(r.IsTagged());
Register reg = ToRegister(instr->InputAt(0));
if (instr->hydrogen()->value()->type().IsBoolean()) {
HType type = instr->hydrogen()->value()->type();
if (type.IsBoolean()) {
__ cmp(reg, factory()->true_value());
EmitBranch(true_block, false_block, equal);
} else if (type.IsSmi()) {
__ test(reg, Operand(reg));
EmitBranch(true_block, false_block, not_equal);
} else {
Label* true_label = chunk_->GetAssemblyLabel(true_block);
Label* false_label = chunk_->GetAssemblyLabel(false_block);
......
......@@ -263,6 +263,14 @@ void MacroAssembler::SafePush(const Immediate& x) {
}
void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
// see ROOT_ACCESSOR macro in factory.h
Handle<Object> value(BitCast<Object**>(
&isolate()->heap()->roots_address()[index]));
cmp(with, value);
}
void MacroAssembler::CmpObjectType(Register heap_object,
InstanceType type,
Register map) {
......
......@@ -209,6 +209,9 @@ class MacroAssembler: public Assembler {
void SafeSet(Register dst, const Immediate& x);
void SafePush(const Immediate& x);
// Compare a register against a known root, e.g. undefined, null, true, ...
void CompareRoot(Register with, Heap::RootListIndex index);
// Compare object type for heap object.
// Incoming register is heap_object and outgoing register is map.
void CmpObjectType(Register heap_object, InstanceType type, Register map);
......
......@@ -230,68 +230,151 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
}
// The stub returns zero for false, and a non-zero value for true.
// The stub expects its argument on the stack and returns its result in tos_:
// zero for false, and a non-zero value for true.
void ToBooleanStub::Generate(MacroAssembler* masm) {
Label false_result, true_result, not_string;
Label patch;
const Register argument = rax;
const Register map = rdx;
__ movq(rax, Operand(rsp, 1 * kPointerSize));
if (!types_.IsEmpty()) {
__ movq(argument, Operand(rsp, 1 * kPointerSize));
}
// undefined -> false
__ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
__ j(equal, &false_result);
CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false, &patch);
// Boolean -> its value
__ CompareRoot(rax, Heap::kFalseValueRootIndex);
__ j(equal, &false_result);
__ CompareRoot(rax, Heap::kTrueValueRootIndex);
__ j(equal, &true_result);
// Smis: 0 -> false, all other -> true
__ Cmp(rax, Smi::FromInt(0));
__ j(equal, &false_result);
__ JumpIfSmi(rax, &true_result);
CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false, &patch);
CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true, &patch);
// 'null' -> false.
__ CompareRoot(rax, Heap::kNullValueRootIndex);
__ j(equal, &false_result, Label::kNear);
CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false, &patch);
if (types_.Contains(SMI)) {
// Smis: 0 -> false, all other -> true
Label not_smi;
__ JumpIfNotSmi(argument, &not_smi, Label::kNear);
// argument contains the correct return value already
if (!tos_.is(argument)) {
__ movq(tos_, argument);
}
__ ret(1 * kPointerSize);
__ bind(&not_smi);
} else if (types_.NeedsMap()) {
// If we need a map later and have a Smi -> patch.
__ JumpIfSmi(argument, &patch, Label::kNear);
}
// Get the map of the heap object.
__ movq(map, FieldOperand(rax, HeapObject::kMapOffset));
if (types_.NeedsMap()) {
__ movq(map, FieldOperand(argument, HeapObject::kMapOffset));
// Undetectable -> false.
__ testb(FieldOperand(map, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
__ j(not_zero, &false_result, Label::kNear);
// Everything with a map could be undetectable, so check this now.
__ testb(FieldOperand(map, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
// Undetectable -> false.
Label not_undetectable;
__ j(zero, &not_undetectable, Label::kNear);
__ Set(tos_, 0);
__ ret(1 * kPointerSize);
__ bind(&not_undetectable);
}
// JavaScript object -> true.
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
__ j(above_equal, &true_result, Label::kNear);
if (types_.Contains(SPEC_OBJECT)) {
// spec object -> true.
Label not_js_object;
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
__ j(below, &not_js_object, Label::kNear);
__ Set(tos_, 1);
__ ret(1 * kPointerSize);
__ bind(&not_js_object);
} else if (types_.Contains(INTERNAL_OBJECT)) {
// We've seen a spec object for the first time -> patch.
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
__ j(above_equal, &patch, Label::kNear);
}
// String value -> false iff empty.
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
__ j(above_equal, &not_string, Label::kNear);
__ cmpq(FieldOperand(rax, String::kLengthOffset), Immediate(0));
__ j(zero, &false_result, Label::kNear);
__ jmp(&true_result, Label::kNear);
if (types_.Contains(STRING)) {
// String value -> false iff empty.
Label not_string;
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
__ j(above_equal, &not_string, Label::kNear);
__ movq(tos_, FieldOperand(argument, String::kLengthOffset));
__ ret(1 * kPointerSize); // the string length is OK as the return value
__ bind(&not_string);
} else if (types_.Contains(INTERNAL_OBJECT)) {
// We've seen a string for the first time -> patch
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
__ j(below, &patch, Label::kNear);
}
__ bind(&not_string);
// HeapNumber -> false iff +0, -0, or NaN.
// These three cases set the zero flag when compared to zero using ucomisd.
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
__ j(not_equal, &true_result, Label::kNear);
__ xorps(xmm0, xmm0);
__ ucomisd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
__ j(zero, &false_result, Label::kNear);
// Fall through to |true_result|.
if (types_.Contains(HEAP_NUMBER)) {
// heap number -> false iff +0, -0, or NaN.
Label not_heap_number, false_result;
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
__ j(not_equal, &not_heap_number, Label::kNear);
__ xorps(xmm0, xmm0);
__ ucomisd(xmm0, FieldOperand(argument, HeapNumber::kValueOffset));
__ j(zero, &false_result, Label::kNear);
__ Set(tos_, 1);
__ ret(1 * kPointerSize);
__ bind(&false_result);
__ Set(tos_, 0);
__ ret(1 * kPointerSize);
__ bind(&not_heap_number);
} else if (types_.Contains(INTERNAL_OBJECT)) {
// We've seen a heap number for the first time -> patch
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
__ j(equal, &patch, Label::kNear);
}
// Return 1/0 for true/false in tos_.
__ bind(&true_result);
__ Set(tos_, 1);
__ ret(1 * kPointerSize);
__ bind(&false_result);
__ Set(tos_, 0);
__ ret(1 * kPointerSize);
if (types_.Contains(INTERNAL_OBJECT)) {
// internal objects -> true
__ Set(tos_, 1);
__ ret(1 * kPointerSize);
}
if (!types_.IsAll()) {
__ bind(&patch);
GenerateTypeTransition(masm);
}
}
void ToBooleanStub::CheckOddball(MacroAssembler* masm,
Type type,
Heap::RootListIndex value,
bool result,
Label* patch) {
const Register argument = rax;
if (types_.Contains(type)) {
// If we see an expected oddball, return its ToBoolean value tos_.
Label different_value;
__ CompareRoot(argument, value);
__ j(not_equal, &different_value, Label::kNear);
__ Set(tos_, result ? 1 : 0);
__ ret(1 * kPointerSize);
__ bind(&different_value);
} else if (types_.Contains(INTERNAL_OBJECT)) {
// If we see an unexpected oddball and handle internal objects, we must
// patch because the code for internal objects doesn't handle it explictly.
__ CompareRoot(argument, value);
__ j(equal, patch);
}
}
void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
__ pop(rcx); // Get return address, operand is now on top of stack.
__ Push(Smi::FromInt(tos_.code()));
__ Push(Smi::FromInt(types_.ToByte()));
__ push(rcx); // Push return address.
// Patch the caller to an appropriate specialized stub and return the
// operation result to the caller of the stub.
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
3,
1);
}
......
......@@ -1402,39 +1402,119 @@ void LCodeGen::DoBranch(LBranch* instr) {
Label* true_label = chunk_->GetAssemblyLabel(true_block);
Label* false_label = chunk_->GetAssemblyLabel(false_block);
__ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
__ j(equal, false_label);
__ CompareRoot(reg, Heap::kTrueValueRootIndex);
__ j(equal, true_label);
__ CompareRoot(reg, Heap::kFalseValueRootIndex);
__ j(equal, false_label);
__ Cmp(reg, Smi::FromInt(0));
__ j(equal, false_label);
__ JumpIfSmi(reg, true_label);
// Test for double values. Plus/minus zero and NaN are false.
Label call_stub;
__ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset),
Heap::kHeapNumberMapRootIndex);
__ j(not_equal, &call_stub, Label::kNear);
// HeapNumber => false iff +0, -0, or NaN. These three cases set the
// zero flag when compared to zero using ucomisd.
__ xorps(xmm0, xmm0);
__ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
__ j(zero, false_label);
__ jmp(true_label);
// The conversion stub doesn't cause garbage collections so it's
// safe to not record a safepoint after the call.
__ bind(&call_stub);
ToBooleanStub stub(rax);
__ Pushad();
__ push(reg);
__ CallStub(&stub);
__ testq(rax, rax);
__ Popad();
EmitBranch(true_block, false_block, not_zero);
ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
// Avoid deopts in the case where we've never executed this path before.
if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
if (expected.Contains(ToBooleanStub::UNDEFINED)) {
// undefined -> false.
__ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
__ j(equal, false_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen undefined for the first time -> deopt.
__ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
DeoptimizeIf(equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::BOOLEAN)) {
// true -> true.
__ CompareRoot(reg, Heap::kTrueValueRootIndex);
__ j(equal, true_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a boolean for the first time -> deopt.
__ CompareRoot(reg, Heap::kTrueValueRootIndex);
DeoptimizeIf(equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::BOOLEAN)) {
// false -> false.
__ CompareRoot(reg, Heap::kFalseValueRootIndex);
__ j(equal, false_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a boolean for the first time -> deopt.
__ CompareRoot(reg, Heap::kFalseValueRootIndex);
DeoptimizeIf(equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
// 'null' -> false.
__ CompareRoot(reg, Heap::kNullValueRootIndex);
__ j(equal, false_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen null for the first time -> deopt.
__ CompareRoot(reg, Heap::kNullValueRootIndex);
DeoptimizeIf(equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::SMI)) {
// Smis: 0 -> false, all other -> true.
__ Cmp(reg, Smi::FromInt(0));
__ j(equal, false_label);
__ JumpIfSmi(reg, true_label);
} else if (expected.NeedsMap()) {
// If we need a map later and have a Smi -> deopt.
__ testb(reg, Immediate(kSmiTagMask));
DeoptimizeIf(zero, instr->environment());
}
const Register map = kScratchRegister;
if (expected.NeedsMap()) {
__ movq(map, FieldOperand(reg, HeapObject::kMapOffset));
// Everything with a map could be undetectable, so check this now.
__ testb(FieldOperand(map, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
// Undetectable -> false.
__ j(not_zero, false_label);
}
if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
// spec object -> true.
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
__ j(above_equal, true_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a spec object for the first time -> deopt.
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
DeoptimizeIf(above_equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::STRING)) {
// String value -> false iff empty.
Label not_string;
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
__ j(above_equal, &not_string, Label::kNear);
__ cmpq(FieldOperand(reg, String::kLengthOffset), Immediate(0));
__ j(not_zero, true_label);
__ jmp(false_label);
__ bind(&not_string);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a string for the first time -> deopt
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
DeoptimizeIf(below, instr->environment());
}
if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
// heap number -> false iff +0, -0, or NaN.
Label not_heap_number;
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
__ j(not_equal, &not_heap_number, Label::kNear);
__ xorps(xmm0, xmm0);
__ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
__ j(zero, false_label);
__ jmp(true_label);
__ bind(&not_heap_number);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a heap number for the first time -> deopt.
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
DeoptimizeIf(equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// internal objects -> true
__ jmp(true_label);
} else {
// We've seen something for the first time -> deopt.
DeoptimizeIf(no_condition, instr->environment());
}
}
}
}
......
......@@ -1036,7 +1036,11 @@ LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
: instr->SecondSuccessor();
return new LGoto(successor->block_id());
}
return new LBranch(UseRegisterAtStart(v));
LInstruction* branch = new LBranch(UseRegister(v));
// When we handle all cases, we never deopt, so we don't need to assign the
// environment then.
bool all_cases_handled = instr->expected_input_types().IsAll();
return all_cases_handled ? branch : AssignEnvironment(branch);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment