Commit 4fdf57ac authored by lrn@chromium.org's avatar lrn@chromium.org

X64: Faster push/pop implementation.

Also snuck in an intended optimization for fast api call preparation and a few indentation fixes.

Review URL: http://codereview.chromium.org/1689010

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@4579 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 54353967
......@@ -1179,7 +1179,7 @@ Object* CallStubCompiler::CompileArrayPushCall(Object* object,
__ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0);
__ add(Operand(eax), Immediate(argc << 1));
__ add(Operand(eax), Immediate(Smi::FromInt(argc)));
// Get the element's length into ecx.
__ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset));
......@@ -1232,7 +1232,7 @@ Object* CallStubCompiler::CompileArrayPushCall(Object* object,
__ j(not_equal, &call_builtin);
__ add(Operand(ecx), Immediate(kAllocationDelta * kPointerSize));
__ cmp(ecx, Operand::StaticVariable(new_space_allocation_limit));
__ j(greater, &call_builtin);
__ j(above, &call_builtin);
// We fit and could grow elements.
__ mov(Operand::StaticVariable(new_space_allocation_top), ecx);
......@@ -1298,7 +1298,7 @@ Object* CallStubCompiler::CompileArrayPopCall(Object* object,
return Heap::undefined_value();
}
Label miss, empty_array, call_builtin;
Label miss, return_undefined, call_builtin;
// Get the receiver from the stack.
const int argc = arguments().immediate();
......@@ -1307,7 +1307,6 @@ Object* CallStubCompiler::CompileArrayPopCall(Object* object,
// Check that the receiver isn't a smi.
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &miss);
CheckPrototypes(JSObject::cast(object), edx,
holder, ebx,
eax, name, &miss);
......@@ -1323,7 +1322,7 @@ Object* CallStubCompiler::CompileArrayPopCall(Object* object,
// Get the array's length into ecx and calculate new length.
__ mov(ecx, FieldOperand(edx, JSArray::kLengthOffset));
__ sub(Operand(ecx), Immediate(Smi::FromInt(1)));
__ j(negative, &empty_array);
__ j(negative, &return_undefined);
// Get the last element.
STATIC_ASSERT(kSmiTagSize == 1);
......@@ -1344,12 +1343,11 @@ Object* CallStubCompiler::CompileArrayPopCall(Object* object,
Immediate(Factory::the_hole_value()));
__ ret((argc + 1) * kPointerSize);
__ bind(&empty_array);
__ bind(&return_undefined);
__ mov(eax, Immediate(Factory::undefined_value()));
__ ret((argc + 1) * kPointerSize);
__ bind(&call_builtin);
__ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop),
argc + 1,
1);
......
......@@ -8516,6 +8516,7 @@ static int NegativeComparisonResult(Condition cc) {
return (cc == greater || cc == greater_equal) ? LESS : GREATER;
}
void CompareStub::Generate(MacroAssembler* masm) {
Label call_builtin, done;
// The compare stub returns a positive, negative, or zero 64-bit integer
......
......@@ -101,7 +101,9 @@ void MacroAssembler::RecordWriteHelper(Register object,
// If the bit offset lies beyond the normal remembered set range, it is in
// the extra remembered set area of a large object.
cmpq(pointer_offset, Immediate(Page::kPageSize / kPointerSize));
j(less, &fast);
j(below, &fast);
// We have a large object containing pointers. It must be a FixedArray.
// Adjust 'page_start' so that addressing using 'pointer_offset' hits the
// extra remembered set after the large object.
......@@ -132,22 +134,6 @@ void MacroAssembler::RecordWriteHelper(Register object,
}
void MacroAssembler::InNewSpace(Register object,
Register scratch,
Condition cc,
Label* branch) {
ASSERT(cc == equal || cc == not_equal);
if (!scratch.is(object)) {
movq(scratch, object);
}
ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
movq(kScratchRegister, ExternalReference::new_space_start());
cmpq(scratch, kScratchRegister);
j(cc, branch);
}
// Set the remembered set bit for [object+offset].
// object is the object being stored into, value is the object being stored.
// If offset is zero, then the smi_index register contains the array index into
......@@ -213,11 +199,11 @@ void MacroAssembler::RecordWriteNonSmi(Register object,
// We make sure that an offset is inside the right limits whether it is
// tagged or untagged.
if ((offset > 0) && (offset < Page::kMaxHeapObjectSize - kHeapObjectTag)) {
// Compute the bit offset in the remembered set, leave it in 'value'.
// Compute the bit offset in the remembered set, leave it in 'scratch'.
lea(scratch, Operand(object, offset));
ASSERT(is_int32(Page::kPageAlignmentMask));
and_(scratch, Immediate(static_cast<int32_t>(Page::kPageAlignmentMask)));
shr(scratch, Immediate(kObjectAlignmentBits));
shr(scratch, Immediate(kPointerSizeLog2));
// Compute the page address from the heap object pointer, leave it in
// 'object' (immediate value is sign extended).
......@@ -236,10 +222,10 @@ void MacroAssembler::RecordWriteNonSmi(Register object,
// array access: calculate the destination address in the same manner as
// KeyedStoreIC::GenerateGeneric.
SmiIndex index = SmiToIndex(smi_index, smi_index, kPointerSizeLog2);
lea(dst, Operand(object,
lea(dst, FieldOperand(object,
index.reg,
index.scale,
FixedArray::kHeaderSize - kHeapObjectTag));
FixedArray::kHeaderSize));
}
// If we are already generating a shared stub, not inlining the
// record write code isn't going to save us any memory.
......@@ -263,6 +249,41 @@ void MacroAssembler::RecordWriteNonSmi(Register object,
}
void MacroAssembler::InNewSpace(Register object,
Register scratch,
Condition cc,
Label* branch) {
if (Serializer::enabled()) {
// Can't do arithmetic on external references if it might get serialized.
// The mask isn't really an address. We load it as an external reference in
// case the size of the new space is different between the snapshot maker
// and the running system.
if (scratch.is(object)) {
movq(kScratchRegister, ExternalReference::new_space_mask());
and_(scratch, kScratchRegister);
} else {
movq(scratch, ExternalReference::new_space_mask());
and_(scratch, object);
}
movq(kScratchRegister, ExternalReference::new_space_start());
cmpq(scratch, kScratchRegister);
j(cc, branch);
} else {
ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
intptr_t new_space_start =
reinterpret_cast<intptr_t>(Heap::NewSpaceStart());
movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
if (scratch.is(object)) {
addq(scratch, kScratchRegister);
} else {
lea(scratch, Operand(object, kScratchRegister, times_1, 0));
}
and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
j(cc, branch);
}
}
void MacroAssembler::Assert(Condition cc, const char* msg) {
if (FLAG_debug_code) Check(cc, msg);
}
......@@ -2161,7 +2182,7 @@ Register MacroAssembler::CheckMaps(JSObject* object,
int depth = 0;
if (save_at_depth == depth) {
movq(Operand(rsp, kPointerSize), reg);
movq(Operand(rsp, kPointerSize), object_reg);
}
// Check the maps in the prototype chain.
......
......@@ -102,7 +102,6 @@ class MacroAssembler: public Assembler {
Register value,
Register scratch);
#ifdef ENABLE_DEBUGGER_SUPPORT
// ---------------------------------------------------------------------------
// Debugger Support
......
This diff is collapsed.
......@@ -58,6 +58,29 @@
assertEquals(undefined, a.pop(1, 2, 3), "9th pop");
assertEquals(0, a.length, "length 9th pop");
}
// Check that pop works on inherited properties.
for (var i = 0; i < 10 ;i++) { // Ensure ICs are stabilized.
Array.prototype[1] = 1;
Array.prototype[3] = 3;
Array.prototype[5] = 5;
Array.prototype[7] = 7;
Array.prototype[9] = 9;
a = [0,1,2,,4,,6,7,8,,];
assertEquals(10, a.length, "inherit-initial-length");
for (var j = 9; j >= 0; j--) {
assertEquals(j + 1, a.length, "inherit-pre-length-" + j);
assertTrue(j in a, "has property " + j);
var own = a.hasOwnProperty(j);
var inherited = Array.prototype.hasOwnProperty(j);
assertEquals(j, a.pop(), "inherit-pop");
assertEquals(j, a.length, "inherit-post-length");
assertFalse(a.hasOwnProperty(j), "inherit-deleted-own-" + j);
assertEquals(inherited, Array.prototype.hasOwnProperty(j),
"inherit-not-deleted-inherited" + j);
}
Array.prototype.length = 0; // Clean-up.
}
})();
// Test the case of not JSArray receiver.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment