Commit b5588f48 authored by mvstanton's avatar mvstanton Committed by Commit bot

Remove --pretenure-call-new

There isn't a plan to turn it on soon, so we'll take it out in favor of cleaner code.

BUG=

Review URL: https://codereview.chromium.org/1202173002

Cr-Commit-Position: refs/heads/master@{#30767}
parent 2c54dbda
......@@ -318,8 +318,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
bool create_memento) {
bool is_api_function) {
// ----------- S t a t e -------------
// -- r0 : number of arguments
// -- r1 : constructor function
......@@ -329,9 +328,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// -- sp[...]: constructor arguments
// -----------------------------------
// Should never create mementos for api functions.
DCHECK(!is_api_function || !create_memento);
Isolate* isolate = masm->isolate();
// Enter a construct frame.
......@@ -406,9 +402,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// r2: initial map
Label rt_call_reload_new_target;
__ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
if (create_memento) {
__ add(r3, r3, Operand(AllocationMemento::kSize / kPointerSize));
}
__ Allocate(r3, r4, r5, r6, &rt_call_reload_new_target, SIZE_IN_WORDS);
......@@ -416,7 +409,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// initial map and properties and elements are set to empty fixed array.
// r1: constructor function
// r2: initial map
// r3: object size (including memento if create_memento)
// r3: object size
// r4: JSObject (not tagged)
__ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
__ mov(r5, r4);
......@@ -430,7 +423,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Fill all the in-object properties with the appropriate filler.
// r1: constructor function
// r2: initial map
// r3: object size (in words, including memento if create_memento)
// r3: object size
// r4: JSObject (not tagged)
// r5: First in-object property of JSObject (not tagged)
DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
......@@ -469,25 +462,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ bind(&no_inobject_slack_tracking);
}
if (create_memento) {
__ sub(ip, r3, Operand(AllocationMemento::kSize / kPointerSize));
__ add(r0, r4, Operand(ip, LSL, kPointerSizeLog2)); // End of object.
__ InitializeFieldsWithFiller(r5, r0, r6);
// Fill in memento fields.
// r5: points to the allocated but uninitialized memento.
__ LoadRoot(r6, Heap::kAllocationMementoMapRootIndex);
DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
__ str(r6, MemOperand(r5, kPointerSize, PostIndex));
// Load the AllocationSite
__ ldr(r6, MemOperand(sp, 3 * kPointerSize));
__ AssertUndefinedOrAllocationSite(r6, r0);
DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
__ str(r6, MemOperand(r5, kPointerSize, PostIndex));
} else {
__ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
__ InitializeFieldsWithFiller(r5, r0, r6);
}
__ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
__ InitializeFieldsWithFiller(r5, r0, r6);
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on.
......@@ -506,47 +482,16 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// r1: constructor function
// r3: original constructor
__ bind(&rt_call);
if (create_memento) {
// Get the cell or allocation site.
__ ldr(r2, MemOperand(sp, 3 * kPointerSize));
__ push(r2); // argument 1: allocation site
}
__ push(r1); // argument 2/1: constructor function
__ push(r3); // argument 3/2: original constructor
if (create_memento) {
__ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
} else {
__ CallRuntime(Runtime::kNewObject, 2);
}
__ CallRuntime(Runtime::kNewObject, 2);
__ mov(r4, r0);
// Runtime_NewObjectWithAllocationSite increments allocation count.
// Skip the increment.
Label count_incremented;
if (create_memento) {
__ jmp(&count_incremented);
}
// Receiver for constructor call allocated.
// r4: JSObject
__ bind(&allocated);
if (create_memento) {
__ ldr(r2, MemOperand(sp, 3 * kPointerSize));
__ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
__ cmp(r2, r5);
__ b(eq, &count_incremented);
// r2 is an AllocationSite. We are creating a memento from it, so we
// need to increment the memento create count.
__ ldr(r3, FieldMemOperand(r2,
AllocationSite::kPretenureCreateCountOffset));
__ add(r3, r3, Operand(Smi::FromInt(1)));
__ str(r3, FieldMemOperand(r2,
AllocationSite::kPretenureCreateCountOffset));
__ bind(&count_incremented);
}
// Restore the parameters.
__ pop(r3);
__ pop(r1);
......@@ -650,12 +595,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
Generate_JSConstructStubHelper(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, true, false);
Generate_JSConstructStubHelper(masm, true);
}
......
......@@ -2316,27 +2316,25 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
__ b(eq, &done);
__ ldr(feedback_map, FieldMemOperand(r5, HeapObject::kMapOffset));
__ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex);
__ b(ne, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
__ b(ne, &check_allocation_site);
// If the weak cell is cleared, we have a new chance to become monomorphic.
__ JumpIfSmi(weak_value, &initialize);
__ jmp(&megamorphic);
if (!FLAG_pretenuring_call_new) {
__ bind(&check_allocation_site);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite.
__ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex);
__ b(ne, &miss);
__ bind(&check_allocation_site);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite.
__ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex);
__ b(ne, &miss);
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
__ cmp(r1, r5);
__ b(ne, &megamorphic);
__ jmp(&done);
}
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
__ cmp(r1, r5);
__ b(ne, &megamorphic);
__ jmp(&done);
__ bind(&miss);
......@@ -2355,24 +2353,21 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
// An uninitialized cache is patched with the function
__ bind(&initialize);
if (!FLAG_pretenuring_call_new) {
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
__ cmp(r1, r5);
__ b(ne, &not_array_function);
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, is_super);
__ b(&done);
__ bind(&not_array_function);
}
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
__ cmp(r1, r5);
__ b(ne, &not_array_function);
CreateWeakCellStub create_stub(masm->isolate());
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, is_super);
__ b(&done);
__ bind(&not_array_function);
CreateWeakCellStub weak_cell_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
__ bind(&done);
}
......@@ -2488,21 +2483,14 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
GenerateRecordCallTarget(masm, IsSuperConstructorCall());
__ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
if (FLAG_pretenuring_call_new) {
// Put the AllocationSite from the feedback vector into r2.
// By adding kPointerSize we encode that we know the AllocationSite
// entry is at the feedback vector slot given by r3 + 1.
__ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize + kPointerSize));
} else {
Label feedback_register_initialized;
// Put the AllocationSite from the feedback vector into r2, or undefined.
__ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize));
__ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset));
__ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
__ b(eq, &feedback_register_initialized);
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
__ bind(&feedback_register_initialized);
}
Label feedback_register_initialized;
// Put the AllocationSite from the feedback vector into r2, or undefined.
__ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize));
__ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset));
__ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
__ b(eq, &feedback_register_initialized);
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
__ bind(&feedback_register_initialized);
__ AssertUndefinedOrAllocationSite(r2, r5);
}
......
......@@ -315,8 +315,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
bool create_memento) {
bool is_api_function) {
// ----------- S t a t e -------------
// -- x0 : number of arguments
// -- x1 : constructor function
......@@ -327,8 +326,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// -----------------------------------
ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
// Should never create mementos for api functions.
DCHECK(!is_api_function || !create_memento);
Isolate* isolate = masm->isolate();
......@@ -409,15 +406,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
Register obj_size = x3;
Register new_obj = x4;
__ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset));
if (create_memento) {
__ Add(x7, obj_size,
Operand(AllocationMemento::kSize / kPointerSize));
__ Allocate(x7, new_obj, x10, x11, &rt_call_reload_new_target,
SIZE_IN_WORDS);
} else {
__ Allocate(obj_size, new_obj, x10, x11, &rt_call_reload_new_target,
SIZE_IN_WORDS);
}
__ Allocate(obj_size, new_obj, x10, x11, &rt_call_reload_new_target,
SIZE_IN_WORDS);
// Allocated the JSObject, now initialize the fields. Map is set to
// initial map and properties and elements are set to empty fixed array.
......@@ -487,25 +477,11 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ bind(&no_inobject_slack_tracking);
}
if (create_memento) {
// Fill the pre-allocated fields with undef.
__ FillFields(first_prop, prop_fields, filler);
__ Add(first_prop, new_obj, Operand(obj_size, LSL, kPointerSizeLog2));
__ LoadRoot(x14, Heap::kAllocationMementoMapRootIndex);
DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
__ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex));
// Load the AllocationSite
__ Peek(x14, 3 * kXRegSize);
__ AssertUndefinedOrAllocationSite(x14, x10);
DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
__ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex));
first_prop = NoReg;
} else {
// Fill all of the property fields with undef.
__ FillFields(first_prop, prop_fields, filler);
first_prop = NoReg;
prop_fields = NoReg;
}
// Fill all of the property fields with undef.
__ FillFields(first_prop, prop_fields, filler);
first_prop = NoReg;
prop_fields = NoReg;
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on.
......@@ -523,40 +499,14 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// x1: constructor function
// x3: original constructor
__ Bind(&rt_call);
Label count_incremented;
if (create_memento) {
// Get the cell or allocation site.
__ Peek(x4, 3 * kXRegSize);
__ Push(x4, constructor, original_constructor); // arguments 1-3
__ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
__ Mov(x4, x0);
// If we ended up using the runtime, and we want a memento, then the
// runtime call made it for us, and we shouldn't do create count
// increment.
__ B(&count_incremented);
} else {
__ Push(constructor, original_constructor); // arguments 1-2
__ CallRuntime(Runtime::kNewObject, 2);
__ Mov(x4, x0);
}
__ Push(constructor, original_constructor); // arguments 1-2
__ CallRuntime(Runtime::kNewObject, 2);
__ Mov(x4, x0);
// Receiver for constructor call allocated.
// x4: JSObject
__ Bind(&allocated);
if (create_memento) {
__ Peek(x10, 3 * kXRegSize);
__ JumpIfRoot(x10, Heap::kUndefinedValueRootIndex, &count_incremented);
// r2 is an AllocationSite. We are creating a memento from it, so we
// need to increment the memento create count.
__ Ldr(x5, FieldMemOperand(x10,
AllocationSite::kPretenureCreateCountOffset));
__ Add(x5, x5, Operand(Smi::FromInt(1)));
__ Str(x5, FieldMemOperand(x10,
AllocationSite::kPretenureCreateCountOffset));
__ bind(&count_incremented);
}
// Restore the parameters.
__ Pop(original_constructor);
__ Pop(constructor);
......@@ -662,12 +612,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
Generate_JSConstructStubHelper(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, true, false);
Generate_JSConstructStubHelper(masm, true);
}
......
......@@ -2681,26 +2681,24 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
__ B(eq, &done);
__ Ldr(feedback_map, FieldMemOperand(feedback, HeapObject::kMapOffset));
__ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex);
__ B(ne, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
__ B(ne, &check_allocation_site);
// If the weak cell is cleared, we have a new chance to become monomorphic.
__ JumpIfSmi(feedback_value, &initialize);
__ B(&megamorphic);
if (!FLAG_pretenuring_call_new) {
__ bind(&check_allocation_site);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite.
__ JumpIfNotRoot(feedback_map, Heap::kAllocationSiteMapRootIndex, &miss);
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
__ Cmp(function, scratch1);
__ B(ne, &megamorphic);
__ B(&done);
}
__ bind(&check_allocation_site);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite.
__ JumpIfNotRoot(feedback_map, Heap::kAllocationSiteMapRootIndex, &miss);
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
__ Cmp(function, scratch1);
__ B(ne, &megamorphic);
__ B(&done);
__ Bind(&miss);
......@@ -2720,27 +2718,23 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
// indicate the ElementsKind if function is the Array constructor.
__ Bind(&initialize);
if (!FLAG_pretenuring_call_new) {
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
__ Cmp(function, scratch1);
__ B(ne, &not_array_function);
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, argc, function,
feedback_vector, index, orig_construct,
is_super);
__ B(&done);
__ Bind(&not_array_function);
}
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
__ Cmp(function, scratch1);
__ B(ne, &not_array_function);
CreateWeakCellStub create_stub(masm->isolate());
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, argc, function,
feedback_vector, index, orig_construct, is_super);
__ B(&done);
__ Bind(&not_array_function);
CreateWeakCellStub weak_cell_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &weak_cell_stub, argc, function,
feedback_vector, index, orig_construct, is_super);
__ Bind(&done);
}
......@@ -2862,21 +2856,14 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
IsSuperConstructorCall());
__ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
if (FLAG_pretenuring_call_new) {
// Put the AllocationSite from the feedback vector into x2.
// By adding kPointerSize we encode that we know the AllocationSite
// entry is at the feedback vector slot given by x3 + 1.
__ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize + kPointerSize));
} else {
Label feedback_register_initialized;
// Put the AllocationSite from the feedback vector into x2, or undefined.
__ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize));
__ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset));
__ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex,
&feedback_register_initialized);
__ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
__ bind(&feedback_register_initialized);
}
// Put the AllocationSite from the feedback vector into x2, or undefined.
__ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize));
__ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset));
__ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex,
&feedback_register_initialized);
__ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
__ bind(&feedback_register_initialized);
__ AssertUndefinedOrAllocationSite(x2, x5);
}
......
......@@ -1982,7 +1982,7 @@ class CallNew final : public Expression {
// Type feedback information.
virtual FeedbackVectorRequirements ComputeFeedbackRequirements(
Isolate* isolate, const ICSlotCache* cache) override {
return FeedbackVectorRequirements(FLAG_pretenuring_call_new ? 2 : 1, 0);
return FeedbackVectorRequirements(1, 0);
}
void SetFirstFeedbackSlot(FeedbackVectorSlot slot) override {
callnew_feedback_slot_ = slot;
......@@ -1992,10 +1992,6 @@ class CallNew final : public Expression {
DCHECK(!callnew_feedback_slot_.IsInvalid());
return callnew_feedback_slot_;
}
FeedbackVectorSlot AllocationSiteFeedbackSlot() {
DCHECK(FLAG_pretenuring_call_new);
return CallNewFeedbackSlot().next();
}
bool IsMonomorphic() override { return is_monomorphic_; }
Handle<JSFunction> target() const { return target_; }
......
......@@ -253,9 +253,6 @@ DEFINE_IMPLICATION(harmony_destructuring, harmony_default_parameters)
// Flags for experimental implementation features.
DEFINE_BOOL(compiled_keyed_generic_loads, false,
"use optimizing compiler to generate keyed generic load stubs")
// TODO(hpayer): We will remove this flag as soon as we have pretenuring
// support for specific allocation sites.
DEFINE_BOOL(pretenuring_call_new, false, "pretenure call new")
DEFINE_BOOL(allocation_site_pretenuring, true,
"pretenure with allocation sites")
DEFINE_BOOL(trace_pretenuring, false,
......
......@@ -3191,12 +3191,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
if (FLAG_pretenuring_call_new) {
EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
expr->CallNewFeedbackSlot().ToInt() + 1);
}
__ Move(r2, FeedbackVector());
__ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
......@@ -3237,15 +3231,6 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
__ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
if (FLAG_pretenuring_call_new) {
UNREACHABLE();
/* TODO(dslomov): support pretenuring.
EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
expr->CallNewFeedbackSlot().ToInt() + 1);
*/
}
__ Move(r2, FeedbackVector());
__ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
......
......@@ -2898,12 +2898,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ Peek(x1, arg_count * kXRegSize);
// Record call targets in unoptimized code.
if (FLAG_pretenuring_call_new) {
EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
expr->CallNewFeedbackSlot().ToInt() + 1);
}
__ LoadObject(x2, FeedbackVector());
__ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
......@@ -2944,15 +2938,6 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
__ Peek(x1, arg_count * kXRegSize);
// Record call targets in unoptimized code.
if (FLAG_pretenuring_call_new) {
UNREACHABLE();
/* TODO(dslomov): support pretenuring.
EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
expr->CallNewFeedbackSlot().ToInt() + 1);
*/
}
__ LoadObject(x2, FeedbackVector());
__ Mov(x3, SmiFromSlot(expr->CallFeedbackSlot()));
......
......@@ -3080,12 +3080,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ mov(edi, Operand(esp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
if (FLAG_pretenuring_call_new) {
EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
expr->CallNewFeedbackSlot().ToInt() + 1);
}
__ LoadHeapObject(ebx, FeedbackVector());
__ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
......@@ -3126,15 +3120,6 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
__ mov(edi, Operand(esp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
if (FLAG_pretenuring_call_new) {
UNREACHABLE();
/* TODO(dslomov): support pretenuring.
EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
expr->CallNewFeedbackSlot().ToInt() + 1);
*/
}
__ LoadHeapObject(ebx, FeedbackVector());
__ mov(edx, Immediate(SmiFromSlot(expr->CallFeedbackSlot())));
......
......@@ -3183,12 +3183,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ lw(a1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
if (FLAG_pretenuring_call_new) {
EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
expr->CallNewFeedbackSlot().ToInt() + 1);
}
__ li(a2, FeedbackVector());
__ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
......@@ -3229,15 +3223,6 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
__ lw(a1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
if (FLAG_pretenuring_call_new) {
UNREACHABLE();
/* TODO(dslomov): support pretenuring.
EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
expr->CallNewFeedbackSlot().ToInt() + 1);
*/
}
__ li(a2, FeedbackVector());
__ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
......
......@@ -3185,12 +3185,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ ld(a1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
if (FLAG_pretenuring_call_new) {
EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
expr->CallNewFeedbackSlot().ToInt() + 1);
}
__ li(a2, FeedbackVector());
__ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
......@@ -3231,15 +3225,6 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
__ ld(a1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
if (FLAG_pretenuring_call_new) {
UNREACHABLE();
/* TODO(dslomov): support pretenuring.
EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
expr->CallNewFeedbackSlot().ToInt() + 1);
*/
}
__ li(a2, FeedbackVector());
__ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
......
......@@ -3108,12 +3108,6 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ movp(rdi, Operand(rsp, arg_count * kPointerSize));
// Record call targets in unoptimized code, but not in the snapshot.
if (FLAG_pretenuring_call_new) {
EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
expr->CallNewFeedbackSlot().ToInt() + 1);
}
__ Move(rbx, FeedbackVector());
__ Move(rdx, SmiFromSlot(expr->CallNewFeedbackSlot()));
......@@ -3154,15 +3148,6 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
__ movp(rdi, Operand(rsp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
if (FLAG_pretenuring_call_new) {
UNREACHABLE();
/* TODO(dslomov): support pretenuring.
EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
expr->CallNewFeedbackSlot().ToInt() + 1);
*/
}
__ Move(rbx, FeedbackVector());
__ Move(rdx, SmiFromSlot(expr->CallFeedbackSlot()));
......
......@@ -9908,16 +9908,6 @@ void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
// Allocate an instance of the implicit receiver object.
HValue* size_in_bytes = Add<HConstant>(instance_size);
HAllocationMode allocation_mode;
if (FLAG_pretenuring_call_new) {
if (FLAG_allocation_site_pretenuring) {
// Try to use pretenuring feedback.
Handle<AllocationSite> allocation_site = expr->allocation_site();
allocation_mode = HAllocationMode(allocation_site);
// Take a dependency on allocation site.
top_info()->dependencies()->AssumeTenuringDecision(allocation_site);
}
}
HAllocate* receiver = BuildAllocate(
size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE, allocation_mode);
receiver->set_known_initial_map(initial_map);
......
......@@ -100,8 +100,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
bool create_memento) {
bool is_api_function) {
// ----------- S t a t e -------------
// -- eax: number of arguments
// -- edi: constructor function
......@@ -109,9 +108,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// -- edx: original constructor
// -----------------------------------
// Should never create mementos for api functions.
DCHECK(!is_api_function || !create_memento);
// Enter a construct frame.
{
FrameScope scope(masm, StackFrame::CONSTRUCT);
......@@ -192,9 +188,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// eax: initial map
__ movzx_b(edi, FieldOperand(eax, Map::kInstanceSizeOffset));
__ shl(edi, kPointerSizeLog2);
if (create_memento) {
__ add(edi, Immediate(AllocationMemento::kSize));
}
__ Allocate(edi, ebx, edi, no_reg, &rt_call, NO_ALLOCATION_FLAGS);
......@@ -203,7 +196,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Allocated the JSObject, now initialize the fields.
// eax: initial map
// ebx: JSObject
// edi: start of next object (including memento if create_memento)
// edi: start of next object
__ mov(Operand(ebx, JSObject::kMapOffset), eax);
__ mov(ecx, factory->empty_fixed_array());
__ mov(Operand(ebx, JSObject::kPropertiesOffset), ecx);
......@@ -211,7 +204,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Set extra fields in the newly allocated object.
// eax: initial map
// ebx: JSObject
// edi: start of next object (including memento if create_memento)
// edi: start of next object
// esi: slack tracking counter (non-API function case)
__ mov(edx, factory->undefined_value());
__ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
......@@ -244,22 +237,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ bind(&no_inobject_slack_tracking);
}
if (create_memento) {
__ lea(esi, Operand(edi, -AllocationMemento::kSize));
__ InitializeFieldsWithFiller(ecx, esi, edx);
// Fill in memento fields if necessary.
// esi: points to the allocated but uninitialized memento.
__ mov(Operand(esi, AllocationMemento::kMapOffset),
factory->allocation_memento_map());
// Get the cell or undefined.
__ mov(edx, Operand(esp, 3 * kPointerSize));
__ AssertUndefinedOrAllocationSite(edx);
__ mov(Operand(esi, AllocationMemento::kAllocationSiteOffset),
edx);
} else {
__ InitializeFieldsWithFiller(ecx, edi, edx);
}
__ InitializeFieldsWithFiller(ecx, edi, edx);
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on.
......@@ -275,12 +253,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// edx: original constructor
__ bind(&rt_call);
int offset = kPointerSize;
if (create_memento) {
// Get the cell or allocation site.
__ mov(edi, Operand(esp, kPointerSize * 3));
__ push(edi); // argument 1: allocation site
offset += kPointerSize;
}
// Must restore esi (context) and edi (constructor) before calling
// runtime.
......@@ -288,35 +260,13 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ mov(edi, Operand(esp, offset));
__ push(edi); // argument 2/1: constructor function
__ push(edx); // argument 3/2: original constructor
if (create_memento) {
__ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
} else {
__ CallRuntime(Runtime::kNewObject, 2);
}
__ CallRuntime(Runtime::kNewObject, 2);
__ mov(ebx, eax); // store result in ebx
// Runtime_NewObjectWithAllocationSite increments allocation count.
// Skip the increment.
Label count_incremented;
if (create_memento) {
__ jmp(&count_incremented);
}
// New object allocated.
// ebx: newly allocated object
__ bind(&allocated);
if (create_memento) {
__ mov(ecx, Operand(esp, 3 * kPointerSize));
__ cmp(ecx, masm->isolate()->factory()->undefined_value());
__ j(equal, &count_incremented);
// ecx is an AllocationSite. We are creating a memento from it, so we
// need to increment the memento create count.
__ add(FieldOperand(ecx, AllocationSite::kPretenureCreateCountOffset),
Immediate(Smi::FromInt(1)));
__ bind(&count_incremented);
}
// Restore the parameters.
__ pop(edx); // new.target
__ pop(edi); // Constructor function.
......@@ -405,12 +355,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
Generate_JSConstructStubHelper(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, true, false);
Generate_JSConstructStubHelper(masm, true);
}
......
......@@ -1960,27 +1960,25 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
__ j(equal, &done, Label::kFar);
__ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
Heap::kWeakCellMapRootIndex);
__ j(not_equal, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
__ j(not_equal, &check_allocation_site);
// If the weak cell is cleared, we have a new chance to become monomorphic.
__ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize);
__ jmp(&megamorphic);
if (!FLAG_pretenuring_call_new) {
__ bind(&check_allocation_site);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite.
__ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
__ j(not_equal, &miss);
__ bind(&check_allocation_site);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite.
__ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
__ j(not_equal, &miss);
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
__ cmp(edi, ecx);
__ j(not_equal, &megamorphic);
__ jmp(&done, Label::kFar);
}
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
__ cmp(edi, ecx);
__ j(not_equal, &megamorphic);
__ jmp(&done, Label::kFar);
__ bind(&miss);
......@@ -1999,24 +1997,21 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
// An uninitialized cache is patched with the function or sentinel to
// indicate the ElementsKind if function is the Array constructor.
__ bind(&initialize);
if (!FLAG_pretenuring_call_new) {
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
__ cmp(edi, ecx);
__ j(not_equal, &not_array_function);
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the
// slot.
CreateAllocationSiteStub create_stub(isolate);
CallStubInRecordCallTarget(masm, &create_stub, is_super);
__ jmp(&done);
__ bind(&not_array_function);
}
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
__ cmp(edi, ecx);
__ j(not_equal, &not_array_function);
CreateWeakCellStub create_stub(isolate);
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the
// slot.
CreateAllocationSiteStub create_stub(isolate);
CallStubInRecordCallTarget(masm, &create_stub, is_super);
__ jmp(&done);
__ bind(&not_array_function);
CreateWeakCellStub weak_cell_stub(isolate);
CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
__ bind(&done);
}
......@@ -2133,24 +2128,16 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
if (RecordCallTarget()) {
GenerateRecordCallTarget(masm, IsSuperConstructorCall());
if (FLAG_pretenuring_call_new) {
// Put the AllocationSite from the feedback vector into ebx.
// By adding kPointerSize we encode that we know the AllocationSite
// entry is at the feedback vector slot given by edx + 1.
__ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize + kPointerSize));
} else {
Label feedback_register_initialized;
// Put the AllocationSite from the feedback vector into ebx, or undefined.
__ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize));
Handle<Map> allocation_site_map =
isolate()->factory()->allocation_site_map();
__ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
__ j(equal, &feedback_register_initialized);
__ mov(ebx, isolate()->factory()->undefined_value());
__ bind(&feedback_register_initialized);
}
Label feedback_register_initialized;
// Put the AllocationSite from the feedback vector into ebx, or undefined.
__ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize));
Handle<Map> allocation_site_map =
isolate()->factory()->allocation_site_map();
__ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
__ j(equal, &feedback_register_initialized);
__ mov(ebx, isolate()->factory()->undefined_value());
__ bind(&feedback_register_initialized);
__ AssertUndefinedOrAllocationSite(ebx);
}
......
......@@ -328,8 +328,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
bool create_memento) {
bool is_api_function) {
// ----------- S t a t e -------------
// -- a0 : number of arguments
// -- a1 : constructor function
......@@ -339,9 +338,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// -- sp[...]: constructor arguments
// -----------------------------------
// Should never create mementos for api functions.
DCHECK(!is_api_function || !create_memento);
Isolate* isolate = masm->isolate();
// Enter a construct frame.
......@@ -408,9 +404,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// a2: initial map
Label rt_call_reload_new_target;
__ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
if (create_memento) {
__ Addu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
}
__ Allocate(a3, t4, t5, t6, &rt_call_reload_new_target, SIZE_IN_WORDS);
......@@ -418,7 +411,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// initial map and properties and elements are set to empty fixed array.
// a1: constructor function
// a2: initial map
// a3: object size (including memento if create_memento)
// a3: object size
// t4: JSObject (not tagged)
__ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
__ mov(t5, t4);
......@@ -433,7 +426,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Fill all the in-object properties with appropriate filler.
// a1: constructor function
// a2: initial map
// a3: object size (in words, including memento if create_memento)
// a3: object size (in words)
// t4: JSObject (not tagged)
// t5: First in-object property of JSObject (not tagged)
// t2: slack tracking counter (non-API function case)
......@@ -473,29 +466,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ bind(&no_inobject_slack_tracking);
}
if (create_memento) {
__ Subu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
__ sll(a0, a0, kPointerSizeLog2);
__ Addu(a0, t4, Operand(a0)); // End of object.
__ InitializeFieldsWithFiller(t5, a0, t7);
// Fill in memento fields.
// t5: points to the allocated but uninitialized memento.
__ LoadRoot(t7, Heap::kAllocationMementoMapRootIndex);
DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
__ sw(t7, MemOperand(t5));
__ Addu(t5, t5, kPointerSize);
// Load the AllocationSite.
__ lw(t7, MemOperand(sp, 3 * kPointerSize));
__ AssertUndefinedOrAllocationSite(a2, t0);
DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
__ sw(t7, MemOperand(t5));
__ Addu(t5, t5, kPointerSize);
} else {
__ sll(at, a3, kPointerSizeLog2);
__ Addu(a0, t4, Operand(at)); // End of object.
__ InitializeFieldsWithFiller(t5, a0, t7);
}
__ sll(at, a3, kPointerSizeLog2);
__ Addu(a0, t4, Operand(at)); // End of object.
__ InitializeFieldsWithFiller(t5, a0, t7);
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on.
......@@ -514,45 +487,15 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// a1: constructor function
// a3: original constructor
__ bind(&rt_call);
if (create_memento) {
// Get the cell or allocation site.
__ lw(a2, MemOperand(sp, 3 * kPointerSize));
__ push(a2); // argument 1: allocation site
}
__ Push(a1, a3); // arguments 2-3 / 1-2
if (create_memento) {
__ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
} else {
__ CallRuntime(Runtime::kNewObject, 2);
}
__ CallRuntime(Runtime::kNewObject, 2);
__ mov(t4, v0);
// Runtime_NewObjectWithAllocationSite increments allocation count.
// Skip the increment.
Label count_incremented;
if (create_memento) {
__ jmp(&count_incremented);
}
// Receiver for constructor call allocated.
// t4: JSObject
__ bind(&allocated);
if (create_memento) {
__ lw(a2, MemOperand(sp, 3 * kPointerSize));
__ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
__ Branch(&count_incremented, eq, a2, Operand(t5));
// a2 is an AllocationSite. We are creating a memento from it, so we
// need to increment the memento create count.
__ lw(a3, FieldMemOperand(a2,
AllocationSite::kPretenureCreateCountOffset));
__ Addu(a3, a3, Operand(Smi::FromInt(1)));
__ sw(a3, FieldMemOperand(a2,
AllocationSite::kPretenureCreateCountOffset));
__ bind(&count_incremented);
}
// Restore the parameters.
__ Pop(a3); // new.target
__ Pop(a1);
......@@ -651,12 +594,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
Generate_JSConstructStubHelper(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, true, false);
Generate_JSConstructStubHelper(masm, true);
}
......
......@@ -2446,27 +2446,24 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
__ Branch(&done, eq, t2, Operand(at));
__ lw(feedback_map, FieldMemOperand(t2, HeapObject::kMapOffset));
__ LoadRoot(at, Heap::kWeakCellMapRootIndex);
__ Branch(FLAG_pretenuring_call_new ? &miss : &check_allocation_site, ne,
feedback_map, Operand(at));
__ Branch(&check_allocation_site, ne, feedback_map, Operand(at));
// If the weak cell is cleared, we have a new chance to become monomorphic.
__ JumpIfSmi(weak_value, &initialize);
__ jmp(&megamorphic);
if (!FLAG_pretenuring_call_new) {
__ bind(&check_allocation_site);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite.
__ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
__ Branch(&miss, ne, feedback_map, Operand(at));
__ bind(&check_allocation_site);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite.
__ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
__ Branch(&miss, ne, feedback_map, Operand(at));
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t2);
__ Branch(&megamorphic, ne, a1, Operand(t2));
__ jmp(&done);
}
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t2);
__ Branch(&megamorphic, ne, a1, Operand(t2));
__ jmp(&done);
__ bind(&miss);
......@@ -2485,23 +2482,20 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
// An uninitialized cache is patched with the function.
__ bind(&initialize);
if (!FLAG_pretenuring_call_new) {
// Make sure the function is the Array() function.
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t2);
__ Branch(&not_array_function, ne, a1, Operand(t2));
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, is_super);
__ Branch(&done);
__ bind(&not_array_function);
}
CreateWeakCellStub create_stub(masm->isolate());
// Make sure the function is the Array() function.
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t2);
__ Branch(&not_array_function, ne, a1, Operand(t2));
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, is_super);
__ Branch(&done);
__ bind(&not_array_function);
CreateWeakCellStub weak_cell_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
__ bind(&done);
}
......@@ -2619,21 +2613,14 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
__ sll(at, a3, kPointerSizeLog2 - kSmiTagSize);
__ Addu(t1, a2, at);
if (FLAG_pretenuring_call_new) {
// Put the AllocationSite from the feedback vector into a2.
// By adding kPointerSize we encode that we know the AllocationSite
// entry is at the feedback vector slot given by a3 + 1.
__ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize + kPointerSize));
} else {
Label feedback_register_initialized;
// Put the AllocationSite from the feedback vector into a2, or undefined.
__ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize));
__ lw(t1, FieldMemOperand(a2, AllocationSite::kMapOffset));
__ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
__ Branch(&feedback_register_initialized, eq, t1, Operand(at));
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
__ bind(&feedback_register_initialized);
}
Label feedback_register_initialized;
// Put the AllocationSite from the feedback vector into a2, or undefined.
__ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize));
__ lw(t1, FieldMemOperand(a2, AllocationSite::kMapOffset));
__ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
__ Branch(&feedback_register_initialized, eq, t1, Operand(at));
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
__ bind(&feedback_register_initialized);
__ AssertUndefinedOrAllocationSite(a2, t1);
}
......
......@@ -326,8 +326,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
bool create_memento) {
bool is_api_function) {
// ----------- S t a t e -------------
// -- a0 : number of arguments
// -- a1 : constructor function
......@@ -337,9 +336,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// -- sp[...]: constructor arguments
// -----------------------------------
// Should never create mementos for api functions.
DCHECK(!is_api_function || !create_memento);
Isolate* isolate = masm->isolate();
// Enter a construct frame.
......@@ -407,9 +403,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// a2: initial map
Label rt_call_reload_new_target;
__ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
if (create_memento) {
__ Daddu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
}
__ Allocate(a3, t0, t1, t2, &rt_call_reload_new_target, SIZE_IN_WORDS);
......@@ -417,7 +410,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// initial map and properties and elements are set to empty fixed array.
// a1: constructor function
// a2: initial map
// a3: object size (including memento if create_memento)
// a3: object size
// t0: JSObject (not tagged)
__ LoadRoot(t2, Heap::kEmptyFixedArrayRootIndex);
__ mov(t1, t0);
......@@ -432,7 +425,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Fill all the in-object properties with appropriate filler.
// a1: constructor function
// a2: initial map
// a3: object size (in words, including memento if create_memento)
// a3: object size (in words)
// t0: JSObject (not tagged)
// t1: First in-object property of JSObject (not tagged)
// a6: slack tracking counter (non-API function case)
......@@ -472,29 +465,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ bind(&no_inobject_slack_tracking);
}
if (create_memento) {
__ Dsubu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
__ dsll(a0, a0, kPointerSizeLog2);
__ Daddu(a0, t0, Operand(a0)); // End of object.
__ InitializeFieldsWithFiller(t1, a0, t3);
// Fill in memento fields.
// t1: points to the allocated but uninitialized memento.
__ LoadRoot(t3, Heap::kAllocationMementoMapRootIndex);
DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
__ sd(t3, MemOperand(t1));
__ Daddu(t1, t1, kPointerSize);
// Load the AllocationSite.
__ ld(t3, MemOperand(sp, 3 * kPointerSize));
__ AssertUndefinedOrAllocationSite(t3, a0);
DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
__ sd(t3, MemOperand(t1));
__ Daddu(t1, t1, kPointerSize);
} else {
__ dsll(at, a3, kPointerSizeLog2);
__ Daddu(a0, t0, Operand(at)); // End of object.
__ InitializeFieldsWithFiller(t1, a0, t3);
}
__ dsll(at, a3, kPointerSizeLog2);
__ Daddu(a0, t0, Operand(at)); // End of object.
__ InitializeFieldsWithFiller(t1, a0, t3);
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on.
......@@ -513,45 +486,15 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// a1: constructor function
// a3: original constructor
__ bind(&rt_call);
if (create_memento) {
// Get the cell or allocation site.
__ ld(a2, MemOperand(sp, 3 * kPointerSize));
__ push(a2); // argument 1: allocation site
}
__ Push(a1, a3); // arguments 2-3 / 1-2
if (create_memento) {
__ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
} else {
__ CallRuntime(Runtime::kNewObject, 2);
}
__ CallRuntime(Runtime::kNewObject, 2);
__ mov(t0, v0);
// Runtime_NewObjectWithAllocationSite increments allocation count.
// Skip the increment.
Label count_incremented;
if (create_memento) {
__ jmp(&count_incremented);
}
// Receiver for constructor call allocated.
// t0: JSObject
__ bind(&allocated);
if (create_memento) {
__ ld(a2, MemOperand(sp, 3 * kPointerSize));
__ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
__ Branch(&count_incremented, eq, a2, Operand(t1));
// a2 is an AllocationSite. We are creating a memento from it, so we
// need to increment the memento create count.
__ ld(a3, FieldMemOperand(a2,
AllocationSite::kPretenureCreateCountOffset));
__ Daddu(a3, a3, Operand(Smi::FromInt(1)));
__ sd(a3, FieldMemOperand(a2,
AllocationSite::kPretenureCreateCountOffset));
__ bind(&count_incremented);
}
// Restore the parameters.
__ Pop(a3); // new.target
__ Pop(a1);
......@@ -649,12 +592,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
Generate_JSConstructStubHelper(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, true, false);
Generate_JSConstructStubHelper(masm, true);
}
......
......@@ -2480,27 +2480,24 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
__ Branch(&done, eq, a5, Operand(at));
__ ld(feedback_map, FieldMemOperand(a5, HeapObject::kMapOffset));
__ LoadRoot(at, Heap::kWeakCellMapRootIndex);
__ Branch(FLAG_pretenuring_call_new ? &miss : &check_allocation_site, ne,
feedback_map, Operand(at));
__ Branch(&check_allocation_site, ne, feedback_map, Operand(at));
// If the weak cell is cleared, we have a new chance to become monomorphic.
__ JumpIfSmi(weak_value, &initialize);
__ jmp(&megamorphic);
if (!FLAG_pretenuring_call_new) {
__ bind(&check_allocation_site);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite.
__ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
__ Branch(&miss, ne, feedback_map, Operand(at));
__ bind(&check_allocation_site);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite.
__ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
__ Branch(&miss, ne, feedback_map, Operand(at));
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a5);
__ Branch(&megamorphic, ne, a1, Operand(a5));
__ jmp(&done);
}
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a5);
__ Branch(&megamorphic, ne, a1, Operand(a5));
__ jmp(&done);
__ bind(&miss);
......@@ -2519,23 +2516,21 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
// An uninitialized cache is patched with the function.
__ bind(&initialize);
if (!FLAG_pretenuring_call_new) {
// Make sure the function is the Array() function.
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a5);
__ Branch(&not_array_function, ne, a1, Operand(a5));
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, is_super);
__ Branch(&done);
// Make sure the function is the Array() function.
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a5);
__ Branch(&not_array_function, ne, a1, Operand(a5));
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, is_super);
__ Branch(&done);
__ bind(&not_array_function);
}
__ bind(&not_array_function);
CreateWeakCellStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, is_super);
CreateWeakCellStub weak_cell_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
__ bind(&done);
}
......@@ -2655,21 +2650,14 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
__ dsrl(at, a3, 32 - kPointerSizeLog2);
__ Daddu(a5, a2, at);
if (FLAG_pretenuring_call_new) {
// Put the AllocationSite from the feedback vector into a2.
// By adding kPointerSize we encode that we know the AllocationSite
// entry is at the feedback vector slot given by a3 + 1.
__ ld(a2, FieldMemOperand(a5, FixedArray::kHeaderSize + kPointerSize));
} else {
Label feedback_register_initialized;
// Put the AllocationSite from the feedback vector into a2, or undefined.
__ ld(a2, FieldMemOperand(a5, FixedArray::kHeaderSize));
__ ld(a5, FieldMemOperand(a2, AllocationSite::kMapOffset));
__ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
__ Branch(&feedback_register_initialized, eq, a5, Operand(at));
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
__ bind(&feedback_register_initialized);
}
Label feedback_register_initialized;
// Put the AllocationSite from the feedback vector into a2, or undefined.
__ ld(a2, FieldMemOperand(a5, FixedArray::kHeaderSize));
__ ld(a5, FieldMemOperand(a2, AllocationSite::kMapOffset));
__ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
__ Branch(&feedback_register_initialized, eq, a5, Operand(at));
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
__ bind(&feedback_register_initialized);
__ AssertUndefinedOrAllocationSite(a2, a5);
}
......
......@@ -1749,8 +1749,7 @@ bool AllocationSite::SitePointsToLiteral() {
// elements kind is the initial elements kind.
AllocationSiteMode AllocationSite::GetMode(
ElementsKind boilerplate_elements_kind) {
if (FLAG_pretenuring_call_new ||
IsFastSmiElementsKind(boilerplate_elements_kind)) {
if (IsFastSmiElementsKind(boilerplate_elements_kind)) {
return TRACK_ALLOCATION_SITE;
}
......@@ -1760,9 +1759,8 @@ AllocationSiteMode AllocationSite::GetMode(
AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
ElementsKind to) {
if (FLAG_pretenuring_call_new ||
(IsFastSmiElementsKind(from) &&
IsMoreGeneralElementsKindTransition(from, to))) {
if (IsFastSmiElementsKind(from) &&
IsMoreGeneralElementsKindTransition(from, to)) {
return TRACK_ALLOCATION_SITE;
}
......
......@@ -1117,22 +1117,6 @@ RUNTIME_FUNCTION(Runtime_NewObject) {
}
RUNTIME_FUNCTION(Runtime_NewObjectWithAllocationSite) {
HandleScope scope(isolate);
DCHECK(args.length() == 3);
CONVERT_ARG_HANDLE_CHECKED(Object, original_constructor, 2);
CONVERT_ARG_HANDLE_CHECKED(Object, constructor, 1);
CONVERT_ARG_HANDLE_CHECKED(Object, feedback, 0);
Handle<AllocationSite> site;
if (feedback->IsAllocationSite()) {
// The feedback can be an AllocationSite or undefined.
site = Handle<AllocationSite>::cast(feedback);
}
return Runtime_NewObjectHelper(isolate, constructor, original_constructor,
site);
}
RUNTIME_FUNCTION(Runtime_FinalizeInstanceSize) {
HandleScope scope(isolate);
DCHECK(args.length() == 1);
......
......@@ -450,7 +450,6 @@ namespace internal {
F(ToFastProperties, 1, 1) \
F(AllocateHeapNumber, 0, 1) \
F(NewObject, 2, 1) \
F(NewObjectWithAllocationSite, 3, 1) \
F(FinalizeInstanceSize, 1, 1) \
F(GlobalProxy, 1, 1) \
F(LookupAccessor, 3, 1) \
......
......@@ -162,9 +162,7 @@ bool TypeFeedbackOracle::CallIsMonomorphic(FeedbackVectorICSlot slot) {
bool TypeFeedbackOracle::CallNewIsMonomorphic(FeedbackVectorSlot slot) {
Handle<Object> info = GetInfo(slot);
return FLAG_pretenuring_call_new
? info->IsJSFunction()
: info->IsAllocationSite() || info->IsJSFunction();
return info->IsAllocationSite() || info->IsJSFunction();
}
......@@ -224,7 +222,7 @@ Handle<JSFunction> TypeFeedbackOracle::GetCallTarget(
Handle<JSFunction> TypeFeedbackOracle::GetCallNewTarget(
FeedbackVectorSlot slot) {
Handle<Object> info = GetInfo(slot);
if (FLAG_pretenuring_call_new || info->IsJSFunction()) {
if (info->IsJSFunction()) {
return Handle<JSFunction>::cast(info);
}
......@@ -246,7 +244,7 @@ Handle<AllocationSite> TypeFeedbackOracle::GetCallAllocationSite(
Handle<AllocationSite> TypeFeedbackOracle::GetCallNewAllocationSite(
FeedbackVectorSlot slot) {
Handle<Object> info = GetInfo(slot);
if (FLAG_pretenuring_call_new || info->IsAllocationSite()) {
if (info->IsAllocationSite()) {
return Handle<AllocationSite>::cast(info);
}
return Handle<AllocationSite>::null();
......
......@@ -555,8 +555,7 @@ void AstTyper::VisitCall(Call* expr) {
void AstTyper::VisitCallNew(CallNew* expr) {
// Collect type feedback.
FeedbackVectorSlot allocation_site_feedback_slot =
FLAG_pretenuring_call_new ? expr->AllocationSiteFeedbackSlot()
: expr->CallNewFeedbackSlot();
expr->CallNewFeedbackSlot();
expr->set_allocation_site(
oracle()->GetCallNewAllocationSite(allocation_site_feedback_slot));
bool monomorphic =
......
......@@ -98,8 +98,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
bool create_memento) {
bool is_api_function) {
// ----------- S t a t e -------------
// -- rax: number of arguments
// -- rdi: constructor function
......@@ -107,9 +106,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// -- rdx: original constructor
// -----------------------------------
// Should never create mementos for api functions.
DCHECK(!is_api_function || !create_memento);
// Enter a construct frame.
{
FrameScope scope(masm, StackFrame::CONSTRUCT);
......@@ -189,9 +185,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Now allocate the JSObject on the heap.
__ movzxbp(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
__ shlp(rdi, Immediate(kPointerSizeLog2));
if (create_memento) {
__ addp(rdi, Immediate(AllocationMemento::kSize));
}
// rdi: size of new object
__ Allocate(rdi,
rbx,
......@@ -199,11 +192,10 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
no_reg,
&rt_call,
NO_ALLOCATION_FLAGS);
Factory* factory = masm->isolate()->factory();
// Allocated the JSObject, now initialize the fields.
// rax: initial map
// rbx: JSObject (not HeapObject tagged - the actual address).
// rdi: start of next object (including memento if create_memento)
// rdi: start of next object
__ movp(Operand(rbx, JSObject::kMapOffset), rax);
__ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
__ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
......@@ -211,7 +203,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Set extra fields in the newly allocated object.
// rax: initial map
// rbx: JSObject
// rdi: start of next object (including memento if create_memento)
// rdi: start of next object
// rsi: slack tracking counter (non-API function case)
__ leap(rcx, Operand(rbx, JSObject::kHeaderSize));
__ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
......@@ -243,21 +235,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ bind(&no_inobject_slack_tracking);
}
if (create_memento) {
__ leap(rsi, Operand(rdi, -AllocationMemento::kSize));
__ InitializeFieldsWithFiller(rcx, rsi, rdx);
// Fill in memento fields if necessary.
// rsi: points to the allocated but uninitialized memento.
__ Move(Operand(rsi, AllocationMemento::kMapOffset),
factory->allocation_memento_map());
// Get the cell or undefined.
__ movp(rdx, Operand(rsp, 3 * kPointerSize));
__ AssertUndefinedOrAllocationSite(rdx);
__ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), rdx);
} else {
__ InitializeFieldsWithFiller(rcx, rdi, rdx);
}
__ InitializeFieldsWithFiller(rcx, rdi, rdx);
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on.
......@@ -273,48 +252,19 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// rdx: original constructor
__ bind(&rt_call);
int offset = kPointerSize;
if (create_memento) {
// Get the cell or allocation site.
__ movp(rdi, Operand(rsp, kPointerSize * 3));
__ Push(rdi); // argument 1: allocation site
offset += kPointerSize;
}
// Must restore rsi (context) and rdi (constructor) before calling runtime.
__ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
__ movp(rdi, Operand(rsp, offset));
__ Push(rdi); // argument 2/1: constructor function
__ Push(rdx); // argument 3/2: original constructor
if (create_memento) {
__ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
} else {
__ CallRuntime(Runtime::kNewObject, 2);
}
__ CallRuntime(Runtime::kNewObject, 2);
__ movp(rbx, rax); // store result in rbx
// Runtime_NewObjectWithAllocationSite increments allocation count.
// Skip the increment.
Label count_incremented;
if (create_memento) {
__ jmp(&count_incremented);
}
// New object allocated.
// rbx: newly allocated object
__ bind(&allocated);
if (create_memento) {
__ movp(rcx, Operand(rsp, 3 * kPointerSize));
__ Cmp(rcx, masm->isolate()->factory()->undefined_value());
__ j(equal, &count_incremented);
// rcx is an AllocationSite. We are creating a memento from it, so we
// need to increment the memento create count.
__ SmiAddConstant(
FieldOperand(rcx, AllocationSite::kPretenureCreateCountOffset),
Smi::FromInt(1));
__ bind(&count_incremented);
}
// Restore the parameters.
__ Pop(rdx);
__ Pop(rdi);
......@@ -403,12 +353,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
Generate_JSConstructStubHelper(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, true, false);
Generate_JSConstructStubHelper(masm, true);
}
......
......@@ -1818,28 +1818,26 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
__ j(equal, &done, Label::kFar);
__ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset),
Heap::kWeakCellMapRootIndex);
__ j(not_equal, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
__ j(not_equal, &check_allocation_site);
// If the weak cell is cleared, we have a new chance to become monomorphic.
__ CheckSmi(FieldOperand(r11, WeakCell::kValueOffset));
__ j(equal, &initialize);
__ jmp(&megamorphic);
if (!FLAG_pretenuring_call_new) {
__ bind(&check_allocation_site);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite.
__ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex);
__ j(not_equal, &miss);
__ bind(&check_allocation_site);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite.
__ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex);
__ j(not_equal, &miss);
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11);
__ cmpp(rdi, r11);
__ j(not_equal, &megamorphic);
__ jmp(&done);
}
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11);
__ cmpp(rdi, r11);
__ j(not_equal, &megamorphic);
__ jmp(&done);
__ bind(&miss);
......@@ -1858,23 +1856,20 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
// indicate the ElementsKind if function is the Array constructor.
__ bind(&initialize);
if (!FLAG_pretenuring_call_new) {
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11);
__ cmpp(rdi, r11);
__ j(not_equal, &not_array_function);
CreateAllocationSiteStub create_stub(isolate);
CallStubInRecordCallTarget(masm, &create_stub, is_super);
__ jmp(&done_no_smi_convert);
__ bind(&not_array_function);
}
// Make sure the function is the Array() function
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11);
__ cmpp(rdi, r11);
__ j(not_equal, &not_array_function);
CreateWeakCellStub create_stub(isolate);
CreateAllocationSiteStub create_stub(isolate);
CallStubInRecordCallTarget(masm, &create_stub, is_super);
__ jmp(&done_no_smi_convert);
__ bind(&not_array_function);
CreateWeakCellStub weak_cell_stub(isolate);
CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
__ jmp(&done_no_smi_convert);
__ bind(&done);
__ Integer32ToSmi(rdx, rdx);
......@@ -1998,22 +1993,14 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
GenerateRecordCallTarget(masm, IsSuperConstructorCall());
__ SmiToInteger32(rdx, rdx);
if (FLAG_pretenuring_call_new) {
// Put the AllocationSite from the feedback vector into ebx.
// By adding kPointerSize we encode that we know the AllocationSite
// entry is at the feedback vector slot given by rdx + 1.
__ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
FixedArray::kHeaderSize + kPointerSize));
} else {
Label feedback_register_initialized;
// Put the AllocationSite from the feedback vector into rbx, or undefined.
__ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
FixedArray::kHeaderSize));
__ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
__ j(equal, &feedback_register_initialized);
__ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
__ bind(&feedback_register_initialized);
}
Label feedback_register_initialized;
// Put the AllocationSite from the feedback vector into rbx, or undefined.
__ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
FixedArray::kHeaderSize));
__ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
__ j(equal, &feedback_register_initialized);
__ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
__ bind(&feedback_register_initialized);
__ AssertUndefinedOrAllocationSite(rbx);
}
......
......@@ -3312,106 +3312,6 @@ TEST(OptimizedPretenuringNestedDoubleLiterals) {
}
// Make sure pretenuring feedback is gathered for constructed objects as well
// as for literals.
TEST(OptimizedPretenuringConstructorCalls) {
if (!i::FLAG_pretenuring_call_new) {
// FLAG_pretenuring_call_new needs to be synced with the snapshot.
return;
}
i::FLAG_allow_natives_syntax = true;
i::FLAG_expose_gc = true;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
v8::HandleScope scope(CcTest::isolate());
// Grow new space unitl maximum capacity reached.
while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
CcTest::heap()->new_space()->Grow();
}
i::ScopedVector<char> source(1024);
// Call new is doing slack tracking for the first
// JSFunction::kGenerousAllocationCount allocations, and we can't find
// mementos during that time.
i::SNPrintF(
source,
"var number_elements = %d;"
"var elements = new Array(number_elements);"
"function foo() {"
" this.a = 3;"
" this.b = {};"
"}"
"function f() {"
" for (var i = 0; i < number_elements; i++) {"
" elements[i] = new foo();"
" }"
" return elements[number_elements - 1];"
"};"
"f(); gc();"
"f(); f();"
"%%OptimizeFunctionOnNextCall(f);"
"f();",
AllocationSite::kPretenureMinimumCreated +
JSFunction::kGenerousAllocationCount);
v8::Local<v8::Value> res = CompileRun(source.start());
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
CHECK(CcTest::heap()->InOldSpace(*o));
}
TEST(OptimizedPretenuringCallNew) {
if (!i::FLAG_pretenuring_call_new) {
// FLAG_pretenuring_call_new needs to be synced with the snapshot.
return;
}
i::FLAG_allow_natives_syntax = true;
i::FLAG_expose_gc = true;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
v8::HandleScope scope(CcTest::isolate());
// Grow new space unitl maximum capacity reached.
while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
CcTest::heap()->new_space()->Grow();
}
i::ScopedVector<char> source(1024);
// Call new is doing slack tracking for the first
// JSFunction::kGenerousAllocationCount allocations, and we can't find
// mementos during that time.
i::SNPrintF(
source,
"var number_elements = %d;"
"var elements = new Array(number_elements);"
"function g() { this.a = 0; }"
"function f() {"
" for (var i = 0; i < number_elements; i++) {"
" elements[i] = new g();"
" }"
" return elements[number_elements - 1];"
"};"
"f(); gc();"
"f(); f();"
"%%OptimizeFunctionOnNextCall(f);"
"f();",
AllocationSite::kPretenureMinimumCreated +
JSFunction::kGenerousAllocationCount);
v8::Local<v8::Value> res = CompileRun(source.start());
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
CHECK(CcTest::heap()->InOldSpace(*o));
}
// Test regular array literals allocation.
TEST(OptimizedAllocationArrayLiterals) {
i::FLAG_allow_natives_syntax = true;
......
......@@ -95,45 +95,3 @@ TEST(BadMementoAfterTopForceScavenge) {
// Force GC to test the poisoned memento handling
CcTest::i_isolate()->heap()->CollectGarbage(i::NEW_SPACE);
}
TEST(PretenuringCallNew) {
CcTest::InitializeVM();
if (!i::FLAG_allocation_site_pretenuring) return;
if (!i::FLAG_pretenuring_call_new) return;
if (i::FLAG_always_opt) return;
v8::HandleScope scope(CcTest::isolate());
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
int call_count = 10;
i::ScopedVector<char> test_buf(1024);
const char* program =
"function f() {"
" this.a = 3;"
" this.b = {};"
" return this;"
"};"
"var a;"
"for(var i = 0; i < %d; i++) {"
" a = new f();"
"}"
"a;";
i::SNPrintF(test_buf, program, call_count);
v8::Local<v8::Value> res = CompileRun(test_buf.start());
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
// The object of class f should have a memento secreted behind it.
Address memento_address = o->address() + o->map()->instance_size();
AllocationMemento* memento =
reinterpret_cast<AllocationMemento*>(memento_address + kHeapObjectTag);
CHECK_EQ(memento->map(), heap->allocation_memento_map());
// Furthermore, how many mementos did we create? The count should match
// call_count. Note, that mementos are allocated during the inobject slack
// tracking phase.
AllocationSite* site = memento->GetAllocationSite();
CHECK_EQ(call_count, site->pretenure_create_count()->value());
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment