Second attempt at introducing a premonomorphic state in the call

target caches.

This time we don't go through the premonomorphic state for
the Array call target caches to avoid losing information from
allocation sites that aren't only used once, but where the
resulting array is used heavily.

R=mvstanton@chromium.org

Review URL: https://codereview.chromium.org/169683003

Patch from Kasper Lund <kasperl@chromium.org>.

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@19457 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent d54305e1
......@@ -3265,12 +3265,17 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// x1 : the function to call
// x2 : feedback vector
// x3 : slot in feedback vector (smi)
Label initialize, done, miss, megamorphic, not_array_function;
Label check_array, initialize_array, initialize_non_array, megamorphic, done;
ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
masm->isolate()->heap()->undefined_value());
Heap::RootListIndex kMegamorphicRootIndex = Heap::kUndefinedValueRootIndex;
ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()),
masm->isolate()->heap()->the_hole_value());
Heap::RootListIndex kUninitializedRootIndex = Heap::kTheHoleValueRootIndex;
ASSERT_EQ(*TypeFeedbackInfo::PremonomorphicSentinel(masm->isolate()),
masm->isolate()->heap()->null_value());
Heap::RootListIndex kPremonomorphicRootIndex = Heap::kNullValueRootIndex;
// Load the cache state.
__ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
......@@ -3280,43 +3285,44 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// function without changing the state.
__ Cmp(x4, x1);
__ B(eq, &done);
__ JumpIfRoot(x4, kMegamorphicRootIndex, &done);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite. Do a map check on the object in ecx.
__ Ldr(x5, FieldMemOperand(x4, AllocationSite::kMapOffset));
__ JumpIfNotRoot(x5, Heap::kAllocationSiteMapRootIndex, &miss);
// Check if we're dealing with the Array function or not.
__ LoadArrayFunction(x5);
__ Cmp(x1, x5);
__ B(eq, &check_array);
// Non-array cache: Check the cache state.
__ JumpIfRoot(x4, kPremonomorphicRootIndex, &initialize_non_array);
__ JumpIfNotRoot(x4, kUninitializedRootIndex, &megamorphic);
// Make sure the function is the Array() function
__ LoadArrayFunction(x4);
__ Cmp(x1, x4);
__ B(ne, &megamorphic);
// Non-array cache: Uninitialized -> premonomorphic. The sentinel is an
// immortal immovable object (null) so no write-barrier is needed.
__ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
__ LoadRoot(x10, kPremonomorphicRootIndex);
__ Str(x10, FieldMemOperand(x4, FixedArray::kHeaderSize));
__ B(&done);
__ Bind(&miss);
// Array cache: Check the cache state to see if we're in a monomorphic
// state where the state object is an AllocationSite object.
__ Bind(&check_array);
__ Ldr(x5, FieldMemOperand(x4, AllocationSite::kMapOffset));
__ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex, &done);
// Array cache: Uninitialized or premonomorphic -> monomorphic.
__ JumpIfRoot(x4, kUninitializedRootIndex, &initialize_array);
__ JumpIfRoot(x4, kPremonomorphicRootIndex, &initialize_array);
// A monomorphic miss (i.e, here the cache is not uninitialized) goes
// megamorphic.
__ JumpIfRoot(x4, Heap::kTheHoleValueRootIndex, &initialize);
// MegamorphicSentinel is an immortal immovable object (undefined) so no
// write-barrier is needed.
// Both caches: Monomorphic -> megamorphic. The sentinel is an
// immortal immovable object (undefined) so no write-barrier is needed.
__ Bind(&megamorphic);
__ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
__ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
__ LoadRoot(x10, kMegamorphicRootIndex);
__ Str(x10, FieldMemOperand(x4, FixedArray::kHeaderSize));
__ B(&done);
// An uninitialized cache is patched with the function or sentinel to
// indicate the ElementsKind if function is the Array constructor.
__ Bind(&initialize);
// Make sure the function is the Array() function
__ LoadArrayFunction(x4);
__ Cmp(x1, x4);
__ B(ne, &not_array_function);
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the slot.
// Array cache: Uninitialized or premonomorphic -> monomorphic.
__ Bind(&initialize_array);
{
FrameScope scope(masm, StackFrame::INTERNAL);
CreateAllocationSiteStub create_stub;
......@@ -3332,9 +3338,8 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
}
__ B(&done);
__ Bind(&not_array_function);
// An uninitialized cache is patched with the function.
// Non-array cache: Premonomorphic -> monomorphic.
__ Bind(&initialize_non_array);
__ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
// TODO(all): Does the value need to be left in x4? If not, FieldMemOperand
// could be used to avoid this add.
......@@ -3347,7 +3352,6 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ Pop(x1, x2, x4);
// TODO(all): Are x4, x2 and x1 outputs? This isn't clear.
__ Bind(&done);
}
......
......@@ -3012,12 +3012,17 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// r1 : the function to call
// r2 : Feedback vector
// r3 : slot in feedback vector (Smi)
Label initialize, done, miss, megamorphic, not_array_function;
Label check_array, initialize_array, initialize_non_array, megamorphic, done;
ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
masm->isolate()->heap()->undefined_value());
Heap::RootListIndex kMegamorphicRootIndex = Heap::kUndefinedValueRootIndex;
ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()),
masm->isolate()->heap()->the_hole_value());
Heap::RootListIndex kUninitializedRootIndex = Heap::kTheHoleValueRootIndex;
ASSERT_EQ(*TypeFeedbackInfo::PremonomorphicSentinel(masm->isolate()),
masm->isolate()->heap()->null_value());
Heap::RootListIndex kPremonomorphicRootIndex = Heap::kNullValueRootIndex;
// Load the cache state into r4.
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
......@@ -3027,45 +3032,50 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// function without changing the state.
__ cmp(r4, r1);
__ b(eq, &done);
__ CompareRoot(r4, kMegamorphicRootIndex);
__ b(eq, &done);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite. Do a map check on the object in ecx.
__ ldr(r5, FieldMemOperand(r4, 0));
__ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
__ b(ne, &miss);
// Check if we're dealing with the Array function or not.
__ LoadArrayFunction(r5);
__ cmp(r1, r5);
__ b(eq, &check_array);
// Make sure the function is the Array() function
__ LoadArrayFunction(r4);
__ cmp(r1, r4);
// Non-array cache: Check the cache state.
__ CompareRoot(r4, kPremonomorphicRootIndex);
__ b(eq, &initialize_non_array);
__ CompareRoot(r4, kUninitializedRootIndex);
__ b(ne, &megamorphic);
// Non-array cache: Uninitialized -> premonomorphic. The sentinel is an
// immortal immovable object (null) so no write-barrier is needed.
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
__ LoadRoot(ip, kPremonomorphicRootIndex);
__ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize));
__ jmp(&done);
__ bind(&miss);
// Array cache: Check the cache state to see if we're in a monomorphic
// state where the state object is an AllocationSite object.
__ bind(&check_array);
__ ldr(r5, FieldMemOperand(r4, 0));
__ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
__ b(eq, &done);
// A monomorphic miss (i.e, here the cache is not uninitialized) goes
// megamorphic.
__ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
__ b(eq, &initialize);
// MegamorphicSentinel is an immortal immovable object (undefined) so no
// write-barrier is needed.
// Array cache: Uninitialized or premonomorphic -> monomorphic.
__ CompareRoot(r4, kUninitializedRootIndex);
__ b(eq, &initialize_array);
__ CompareRoot(r4, kPremonomorphicRootIndex);
__ b(eq, &initialize_array);
// Both caches: Monomorphic -> megamorphic. The sentinel is an
// immortal immovable object (undefined) so no write-barrier is needed.
__ bind(&megamorphic);
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ LoadRoot(ip, kMegamorphicRootIndex);
__ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize));
__ jmp(&done);
// An uninitialized cache is patched with the function or sentinel to
// indicate the ElementsKind if function is the Array constructor.
__ bind(&initialize);
// Make sure the function is the Array() function
__ LoadArrayFunction(r4);
__ cmp(r1, r4);
__ b(ne, &not_array_function);
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the slot.
// Array cache: Uninitialized or premonomorphic -> monomorphic.
__ bind(&initialize_array);
{
FrameScope scope(masm, StackFrame::INTERNAL);
......@@ -3081,8 +3091,8 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
}
__ b(&done);
__ bind(&not_array_function);
// Non-array cache: Premonomorphic -> monomorphic.
__ bind(&initialize_non_array);
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
__ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ str(r1, MemOperand(r4, 0));
......
......@@ -2330,7 +2330,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// edx : slot in feedback vector (Smi)
// edi : the function to call
Isolate* isolate = masm->isolate();
Label initialize, done, miss, megamorphic, not_array_function;
Label check_array, initialize_array, initialize_non_array, megamorphic, done;
// Load the cache state into ecx.
__ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
......@@ -2343,48 +2343,53 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
__ j(equal, &done, Label::kFar);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite. Do a map check on the object in ecx.
Handle<Map> allocation_site_map =
masm->isolate()->factory()->allocation_site_map();
__ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
__ j(not_equal, &miss);
// Load the global or builtins object from the current context
// Load the global or builtins object from the current context and check
// if we're dealing with the Array function or not.
__ LoadGlobalContext(ecx);
// Make sure the function is the Array() function
__ cmp(edi, Operand(ecx,
Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
__ j(equal, &check_array);
// Non-array cache: Reload the cache state and check it.
__ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize));
__ cmp(ecx, Immediate(TypeFeedbackInfo::PremonomorphicSentinel(isolate)));
__ j(equal, &initialize_non_array);
__ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate)));
__ j(not_equal, &megamorphic);
// Non-array cache: Uninitialized -> premonomorphic. The sentinel is an
// immortal immovable object (null) so no write-barrier is needed.
__ mov(FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize),
Immediate(TypeFeedbackInfo::PremonomorphicSentinel(isolate)));
__ jmp(&done, Label::kFar);
__ bind(&miss);
// Array cache: Reload the cache state and check to see if we're in a
// monomorphic state where the state object is an AllocationSite object.
__ bind(&check_array);
__ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize));
Handle<Map> allocation_site_map = isolate->factory()->allocation_site_map();
__ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
__ j(equal, &done, Label::kFar);
// A monomorphic miss (i.e, here the cache is not uninitialized) goes
// megamorphic.
// Array cache: Uninitialized or premonomorphic -> monomorphic.
__ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate)));
__ j(equal, &initialize);
// MegamorphicSentinel is an immortal immovable object (undefined) so no
// write-barrier is needed.
__ j(equal, &initialize_array);
__ cmp(ecx, Immediate(TypeFeedbackInfo::PremonomorphicSentinel(isolate)));
__ j(equal, &initialize_array);
// Both caches: Monomorphic -> megamorphic. The sentinel is an
// immortal immovable object (undefined) so no write-barrier is needed.
__ bind(&megamorphic);
__ mov(FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize),
Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
__ jmp(&done, Label::kFar);
// An uninitialized cache is patched with the function or sentinel to
// indicate the ElementsKind if function is the Array constructor.
__ bind(&initialize);
__ LoadGlobalContext(ecx);
// Make sure the function is the Array() function
__ cmp(edi, Operand(ecx,
Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
__ j(not_equal, &not_array_function);
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the slot.
// Array cache: Uninitialized or premonomorphic -> monomorphic.
__ bind(&initialize_array);
{
FrameScope scope(masm, StackFrame::INTERNAL);
......@@ -2406,11 +2411,11 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
}
__ jmp(&done);
__ bind(&not_array_function);
// Non-array cache: Premonomorphic -> monomorphic.
__ bind(&initialize_non_array);
__ mov(FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize),
edi);
// We won't need edx or ebx anymore, just save edi
__ push(edi);
__ push(ebx);
__ push(edx);
......
......@@ -6543,6 +6543,11 @@ Handle<Object> TypeFeedbackInfo::UninitializedSentinel(Isolate* isolate) {
}
Handle<Object> TypeFeedbackInfo::PremonomorphicSentinel(Isolate* isolate) {
return isolate->factory()->null_value();
}
Handle<Object> TypeFeedbackInfo::MegamorphicSentinel(Isolate* isolate) {
return isolate->factory()->undefined_value();
}
......
......@@ -8174,6 +8174,9 @@ class TypeFeedbackInfo: public Struct {
// The object that indicates an uninitialized cache.
static inline Handle<Object> UninitializedSentinel(Isolate* isolate);
// The object that indicates a cache in pre-monomorphic state.
static inline Handle<Object> PremonomorphicSentinel(Isolate* isolate);
// The object that indicates a megamorphic state.
static inline Handle<Object> MegamorphicSentinel(Isolate* isolate);
......
......@@ -14774,6 +14774,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConstructor) {
Handle<AllocationSite> site;
if (!type_info.is_null() &&
*type_info != isolate->heap()->null_value() &&
*type_info != isolate->heap()->undefined_value()) {
site = Handle<AllocationSite>::cast(type_info);
ASSERT(!site->SitePointsToLiteral());
......
......@@ -2169,8 +2169,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// rdx : slot in feedback vector (Smi)
// rdi : the function to call
Isolate* isolate = masm->isolate();
Label initialize, done, miss, megamorphic, not_array_function,
done_no_smi_convert;
Label check_array, initialize_array, initialize_non_array, megamorphic, done;
// Load the cache state into rcx.
__ SmiToInteger32(rdx, rdx);
......@@ -2184,44 +2183,49 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate));
__ j(equal, &done);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite. Do a map check on the object in rcx.
Handle<Map> allocation_site_map =
masm->isolate()->factory()->allocation_site_map();
__ Cmp(FieldOperand(rcx, 0), allocation_site_map);
__ j(not_equal, &miss);
// Make sure the function is the Array() function
// Check if we're dealing with the Array function or not.
__ LoadArrayFunction(rcx);
__ cmpq(rdi, rcx);
__ j(equal, &check_array);
// Non-array cache: Reload the cache state and check it.
__ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
FixedArray::kHeaderSize));
__ Cmp(rcx, TypeFeedbackInfo::PremonomorphicSentinel(isolate));
__ j(equal, &initialize_non_array);
__ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate));
__ j(not_equal, &megamorphic);
__ jmp(&done);
__ bind(&miss);
// Non-array cache: Uninitialized -> premonomorphic. The sentinel is an
// immortal immovable object (null) so no write-barrier is needed.
__ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
TypeFeedbackInfo::PremonomorphicSentinel(isolate));
__ jmp(&done, Label::kFar);
// A monomorphic miss (i.e, here the cache is not uninitialized) goes
// megamorphic.
// Array cache: Reload the cache state and check to see if we're in a
// monomorphic state where the state object is an AllocationSite object.
__ bind(&check_array);
__ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
FixedArray::kHeaderSize));
Handle<Map> allocation_site_map = isolate->factory()->allocation_site_map();
__ Cmp(FieldOperand(rcx, 0), allocation_site_map);
__ j(equal, &done);
// Array cache: Uninitialized or premonomorphic -> monomorphic.
__ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate));
__ j(equal, &initialize);
// MegamorphicSentinel is an immortal immovable object (undefined) so no
// write-barrier is needed.
__ j(equal, &initialize_array);
__ Cmp(rcx, TypeFeedbackInfo::PremonomorphicSentinel(isolate));
__ j(equal, &initialize_array);
// Both caches: Monomorphic -> megamorphic. The sentinel is an
// immortal immovable object (undefined) so no write-barrier is needed.
__ bind(&megamorphic);
__ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
TypeFeedbackInfo::MegamorphicSentinel(isolate));
__ jmp(&done);
// An uninitialized cache is patched with the function or sentinel to
// indicate the ElementsKind if function is the Array constructor.
__ bind(&initialize);
// Make sure the function is the Array() function
__ LoadArrayFunction(rcx);
__ cmpq(rdi, rcx);
__ j(not_equal, &not_array_function);
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the slot.
// Array cache: Uninitialized or premonomorphic -> monomorphic.
__ bind(&initialize_array);
{
FrameScope scope(masm, StackFrame::INTERNAL);
......@@ -2242,13 +2246,13 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ pop(rax);
__ SmiToInteger32(rax, rax);
}
Label done_no_smi_convert;
__ jmp(&done_no_smi_convert);
__ bind(&not_array_function);
// Non-array cache: Premonomorphic -> monomorphic.
__ bind(&initialize_non_array);
__ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
rdi);
// We won't need rdx or rbx anymore, just save rdi
__ push(rdi);
__ push(rbx);
__ push(rdx);
......
......@@ -2848,7 +2848,9 @@ TEST(IncrementalMarkingClearsTypeFeedbackInfo) {
// originating from two different native contexts.
CcTest::global()->Set(v8_str("fun1"), fun1);
CcTest::global()->Set(v8_str("fun2"), fun2);
CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
CompileRun("function f(a, b) { a(); b(); }"
"f(fun1, fun2);" // Run twice to skip premonomorphic state.
"f(fun1, fun2)");
Handle<JSFunction> f =
v8::Utils::OpenHandle(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment