Improve performance of keyed loads/stores which have a HeapNumber index.

Some GWT compiled code results in array access that has a heap number (e.g. -0)
as an index. Until now this would result in a generic IC.

For example:

a[-0] === a[0] or

a[0.25 * 4] === a[1]

This change detects heap numbers that are representable as a smi
and converts them. As a result we can still use the fast keyed monomorphic
ICs. Optimized code already handles keyed access with a double-key efficiently.

As a result the frame rate on the reported benchmark improves by roughly 2x.

BUG=v8:1388,v8:1295
Review URL: https://chromiumcodereview.appspot.com/9837109

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@11282 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent de169b7d
......@@ -3368,6 +3368,44 @@ static bool IsElementTypeSigned(ElementsKind elements_kind) {
}
static void GenerateSmiKeyCheck(MacroAssembler* masm,
Register key,
Register scratch0,
Register scratch1,
DwVfpRegister double_scratch0,
Label* fail) {
if (CpuFeatures::IsSupported(VFP3)) {
CpuFeatures::Scope scope(VFP3);
Label key_ok;
// Check for smi or a smi inside a heap number. We convert the heap
// number and check if the conversion is exact and fits into the smi
// range.
__ JumpIfSmi(key, &key_ok);
__ CheckMap(key,
scratch0,
Heap::kHeapNumberMapRootIndex,
fail,
DONT_DO_SMI_CHECK);
__ sub(ip, key, Operand(kHeapObjectTag));
__ vldr(double_scratch0, ip, HeapNumber::kValueOffset);
__ EmitVFPTruncate(kRoundToZero,
double_scratch0.low(),
double_scratch0,
scratch0,
scratch1,
kCheckForInexactConversion);
__ b(ne, fail);
__ vmov(scratch0, double_scratch0.low());
__ TrySmiTag(scratch0, fail, scratch1);
__ mov(key, scratch0);
__ bind(&key_ok);
} else {
// Check that the key is a smi.
__ JumpIfNotSmi(key, fail);
}
}
void KeyedLoadStubCompiler::GenerateLoadExternalArray(
MacroAssembler* masm,
ElementsKind elements_kind) {
......@@ -3384,8 +3422,8 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(key, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, key, r4, r5, d1, &miss_force_generic);
__ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
// r3: elements array
......@@ -3715,8 +3753,8 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(key, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, key, r4, r5, d1, &miss_force_generic);
__ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
......@@ -4041,8 +4079,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(r0, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, r0, r4, r5, d1, &miss_force_generic);
// Get the elements array.
__ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
......@@ -4093,8 +4131,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(key_reg, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic);
// Get the elements array.
__ ldr(elements_reg,
......@@ -4169,8 +4207,8 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(key_reg, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic);
if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
__ JumpIfNotSmi(value_reg, &transition_elements_kind);
......@@ -4336,7 +4374,9 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
__ JumpIfNotSmi(key_reg, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic);
__ ldr(elements_reg,
FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
......
......@@ -3301,6 +3301,39 @@ void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
}
static void GenerateSmiKeyCheck(MacroAssembler* masm,
Register key,
Register scratch,
XMMRegister xmm_scratch0,
XMMRegister xmm_scratch1,
Label* fail) {
// Check that key is a smi and if SSE2 is available a heap number
// containing a smi and branch if the check fails.
if (CpuFeatures::IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
Label key_ok;
__ JumpIfSmi(key, &key_ok);
__ cmp(FieldOperand(key, HeapObject::kMapOffset),
Immediate(Handle<Map>(masm->isolate()->heap()->heap_number_map())));
__ j(not_equal, fail);
__ movdbl(xmm_scratch0, FieldOperand(key, HeapNumber::kValueOffset));
__ cvttsd2si(scratch, Operand(xmm_scratch0));
__ cvtsi2sd(xmm_scratch1, scratch);
__ ucomisd(xmm_scratch1, xmm_scratch0);
__ j(not_equal, fail);
__ j(parity_even, fail); // NaN.
// Check if the key fits in the smi range.
__ cmp(scratch, 0xc0000000);
__ j(sign, fail);
__ SmiTag(scratch);
__ mov(key, scratch);
__ bind(&key_ok);
} else {
__ JumpIfNotSmi(key, fail);
}
}
void KeyedLoadStubCompiler::GenerateLoadExternalArray(
MacroAssembler* masm,
ElementsKind elements_kind) {
......@@ -3314,8 +3347,8 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(eax, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, eax, ecx, xmm0, xmm1, &miss_force_generic);
// Check that the index is in range.
__ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
......@@ -3367,14 +3400,14 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
// it to a HeapNumber.
Label box_int;
if (elements_kind == EXTERNAL_INT_ELEMENTS) {
__ cmp(ecx, 0xC0000000);
__ cmp(ecx, 0xc0000000);
__ j(sign, &box_int);
} else {
ASSERT_EQ(EXTERNAL_UNSIGNED_INT_ELEMENTS, elements_kind);
// The test is different for unsigned int values. Since we need
// the value to be in the range of a positive smi, we can't
// handle either of the top two bits being set in the value.
__ test(ecx, Immediate(0xC0000000));
__ test(ecx, Immediate(0xc0000000));
__ j(not_zero, &box_int);
}
......@@ -3459,7 +3492,8 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
MacroAssembler* masm,
ElementsKind elements_kind) {
// ----------- S t a t e -------------
// -- eax : key
// -- eax : value
// -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
......@@ -3468,8 +3502,8 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(ecx, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, ecx, ebx, xmm0, xmm1, &miss_force_generic);
// Check that the index is in range.
__ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
......@@ -3664,8 +3698,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(eax, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, eax, ecx, xmm0, xmm1, &miss_force_generic);
// Get the elements array.
__ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
......@@ -3702,8 +3736,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(eax, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, eax, ecx, xmm0, xmm1, &miss_force_generic);
// Get the elements array.
__ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
......@@ -3771,8 +3805,8 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(ecx, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, ecx, ebx, xmm0, xmm1, &miss_force_generic);
if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
__ JumpIfNotSmi(eax, &transition_elements_kind);
......@@ -3926,8 +3960,8 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(ecx, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, ecx, ebx, xmm0, xmm1, &miss_force_generic);
// Get the elements array.
__ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
......
......@@ -1053,18 +1053,33 @@ Handle<Code> KeyedLoadIC::ComputePolymorphicStub(
}
static Handle<Object> TryConvertKey(Handle<Object> key, Isolate* isolate) {
// This helper implements a few common fast cases for converting
// non-smi keys of keyed loads/stores to a smi or a string.
if (key->IsHeapNumber()) {
double value = Handle<HeapNumber>::cast(key)->value();
if (isnan(value)) {
key = isolate->factory()->nan_symbol();
} else {
int int_value = FastD2I(value);
if (value == int_value && Smi::IsValid(int_value)) {
key = Handle<Smi>(Smi::FromInt(int_value));
}
}
} else if (key->IsUndefined()) {
key = isolate->factory()->undefined_symbol();
}
return key;
}
MaybeObject* KeyedLoadIC::Load(State state,
Handle<Object> object,
Handle<Object> key,
bool force_generic_stub) {
// Check for values that can be converted into a symbol.
// TODO(1295): Remove this code.
if (key->IsHeapNumber() &&
isnan(Handle<HeapNumber>::cast(key)->value())) {
key = isolate()->factory()->nan_symbol();
} else if (key->IsUndefined()) {
key = isolate()->factory()->undefined_symbol();
}
// Check for values that can be converted into a symbol directly or
// is representable as a smi.
key = TryConvertKey(key, isolate());
if (key->IsSymbol()) {
Handle<String> name = Handle<String>::cast(key);
......@@ -1761,6 +1776,10 @@ MaybeObject* KeyedStoreIC::Store(State state,
Handle<Object> key,
Handle<Object> value,
bool force_generic) {
// Check for values that can be converted into a symbol directly or
// is representable as a smi.
key = TryConvertKey(key, isolate());
if (key->IsSymbol()) {
Handle<String> name = Handle<String>::cast(key);
......
......@@ -3112,6 +3112,32 @@ void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
__ jmp(miss_ic, RelocInfo::CODE_TARGET);
}
static void GenerateSmiKeyCheck(MacroAssembler* masm,
Register key,
Register scratch,
XMMRegister xmm_scratch0,
XMMRegister xmm_scratch1,
Label* fail) {
// Check that key is a smi or a heap number containing a smi and branch
// if the check fails.
Label key_ok;
__ JumpIfSmi(key, &key_ok);
__ CheckMap(key,
masm->isolate()->factory()->heap_number_map(),
fail,
DONT_DO_SMI_CHECK);
__ movsd(xmm_scratch0, FieldOperand(key, HeapNumber::kValueOffset));
__ cvttsd2si(scratch, xmm_scratch0);
__ cvtlsi2sd(xmm_scratch1, scratch);
__ ucomisd(xmm_scratch1, xmm_scratch0);
__ j(not_equal, fail);
__ j(parity_even, fail); // NaN.
__ Integer32ToSmi(key, scratch);
__ bind(&key_ok);
}
void KeyedLoadStubCompiler::GenerateLoadExternalArray(
MacroAssembler* masm,
ElementsKind elements_kind) {
......@@ -3125,8 +3151,8 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(rax, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, rax, rcx, xmm0, xmm1, &miss_force_generic);
// Check that the index is in range.
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
......@@ -3260,8 +3286,8 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(rcx, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, rcx, rbx, xmm0, xmm1, &miss_force_generic);
// Check that the index is in range.
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
......@@ -3442,8 +3468,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(rax, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, rax, rcx, xmm0, xmm1, &miss_force_generic);
// Get the elements array.
__ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
......@@ -3484,8 +3510,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(rax, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, rax, rcx, xmm0, xmm1, &miss_force_generic);
// Get the elements array.
__ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
......@@ -3540,8 +3566,8 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(rcx, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, rcx, rbx, xmm0, xmm1, &miss_force_generic);
if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
__ JumpIfNotSmi(rax, &transition_elements_kind);
......@@ -3682,8 +3708,8 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
__ JumpIfNotSmi(rcx, &miss_force_generic);
// Check that the key is a smi or a heap number convertible to a smi.
GenerateSmiKeyCheck(masm, rcx, rbx, xmm0, xmm1, &miss_force_generic);
// Get the elements array.
__ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment