Commit f3033032 authored by verwaest@chromium.org's avatar verwaest@chromium.org

Reland (and fix) "Add hydrogen support for ArrayPop, and remove the handwritten call stubs."

BUG=
R=mvstanton@chromium.org

Review URL: https://codereview.chromium.org/144913003

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18749 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 1474d051
...@@ -1571,79 +1571,6 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, ...@@ -1571,79 +1571,6 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
} }
Handle<Code> CallStubCompiler::CompileArrayPopCall(
Handle<Object> object,
Handle<JSObject> holder,
Handle<Cell> cell,
Handle<JSFunction> function,
Handle<String> name,
Code::StubType type) {
// If object is not an array or is observed or sealed, bail out to regular
// call.
if (!object->IsJSArray() ||
!cell.is_null() ||
Handle<JSArray>::cast(object)->map()->is_observed() ||
!Handle<JSArray>::cast(object)->map()->is_extensible()) {
return Handle<Code>::null();
}
Label miss, return_undefined, call_builtin;
Register receiver = r0;
Register scratch = r1;
Register elements = r3;
HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
// Get the elements array of the object.
__ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ CheckMap(elements,
scratch,
Heap::kFixedArrayMapRootIndex,
&call_builtin,
DONT_DO_SMI_CHECK);
// Get the array's length into r4 and calculate new length.
__ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
__ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC);
__ b(lt, &return_undefined);
// Get the last element.
__ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
// We can't address the last element in one operation. Compute the more
// expensive shift first, and use an offset later on.
__ add(elements, elements, Operand::PointerOffsetFromSmiKey(r4));
__ ldr(scratch, FieldMemOperand(elements, FixedArray::kHeaderSize));
__ cmp(scratch, r6);
__ b(eq, &call_builtin);
// Set the array's length.
__ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
// Fill with the hole.
__ str(r6, FieldMemOperand(elements, FixedArray::kHeaderSize));
const int argc = arguments().immediate();
__ Drop(argc + 1);
__ mov(r0, scratch);
__ Ret();
__ bind(&return_undefined);
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
__ Drop(argc + 1);
__ Ret();
__ bind(&call_builtin);
__ TailCallExternalReference(
ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1);
HandlerFrontendFooter(&miss);
// Return the generated code.
return GetCode(type, name);
}
Handle<Code> CallStubCompiler::CompileFastApiCall( Handle<Code> CallStubCompiler::CompileFastApiCall(
const CallOptimization& optimization, const CallOptimization& optimization,
Handle<Object> object, Handle<Object> object,
......
...@@ -7613,6 +7613,53 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall( ...@@ -7613,6 +7613,53 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
return true; return true;
} }
break; break;
case kArrayPop: {
if (!expr->IsMonomorphic() || expr->check_type() != RECEIVER_MAP_CHECK) {
return false;
}
if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
ElementsKind elements_kind = receiver_map->elements_kind();
if (!IsFastElementsKind(elements_kind)) return false;
AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
Drop(expr->arguments()->length());
HValue* result;
HValue* checked_object;
HValue* reduced_length;
HValue* receiver = Pop();
{ NoObservableSideEffectsScope scope(this);
checked_object = AddCheckMap(receiver, receiver_map);
HValue* elements = AddLoadElements(checked_object);
// Ensure that we aren't popping from a copy-on-write array.
if (IsFastSmiOrObjectElementsKind(elements_kind)) {
Add<HCheckMaps>(
elements, isolate()->factory()->fixed_array_map(), top_info());
}
HValue* length = Add<HLoadNamedField>(
checked_object, HObjectAccess::ForArrayLength(elements_kind));
reduced_length = AddUncasted<HSub>(length, graph()->GetConstant1());
HValue* bounds_check = Add<HBoundsCheck>(
graph()->GetConstant0(), length);
result = AddElementAccess(elements, reduced_length, NULL,
bounds_check, elements_kind, false);
Factory* factory = isolate()->factory();
double nan_double = FixedDoubleArray::hole_nan_as_double();
HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
? Add<HConstant>(factory->the_hole_value())
: Add<HConstant>(nan_double);
if (IsFastSmiOrObjectElementsKind(elements_kind)) {
elements_kind = FAST_HOLEY_ELEMENTS;
}
AddElementAccess(
elements, reduced_length, hole, bounds_check, elements_kind, true);
}
Add<HStoreNamedField>(
checked_object, HObjectAccess::ForArrayLength(elements_kind),
reduced_length);
ast_context()->ReturnValue(result);
Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
return true;
}
default: default:
// Not yet supported for inlining. // Not yet supported for inlining.
break; break;
......
...@@ -1666,76 +1666,6 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, ...@@ -1666,76 +1666,6 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
} }
Handle<Code> CallStubCompiler::CompileArrayPopCall(
Handle<Object> object,
Handle<JSObject> holder,
Handle<Cell> cell,
Handle<JSFunction> function,
Handle<String> name,
Code::StubType type) {
// If object is not an array or is observed or sealed, bail out to regular
// call.
if (!object->IsJSArray() ||
!cell.is_null() ||
Handle<JSArray>::cast(object)->map()->is_observed() ||
!Handle<JSArray>::cast(object)->map()->is_extensible()) {
return Handle<Code>::null();
}
Label miss, return_undefined, call_builtin;
HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
// Get the elements array of the object.
__ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
Immediate(factory()->fixed_array_map()));
__ j(not_equal, &call_builtin);
// Get the array's length into ecx and calculate new length.
__ mov(ecx, FieldOperand(edx, JSArray::kLengthOffset));
__ sub(ecx, Immediate(Smi::FromInt(1)));
__ j(negative, &return_undefined);
// Get the last element.
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0);
__ mov(eax, FieldOperand(ebx,
ecx, times_half_pointer_size,
FixedArray::kHeaderSize));
__ cmp(eax, Immediate(factory()->the_hole_value()));
__ j(equal, &call_builtin);
// Set the array's length.
__ mov(FieldOperand(edx, JSArray::kLengthOffset), ecx);
// Fill with the hole.
__ mov(FieldOperand(ebx,
ecx, times_half_pointer_size,
FixedArray::kHeaderSize),
Immediate(factory()->the_hole_value()));
const int argc = arguments().immediate();
__ ret((argc + 1) * kPointerSize);
__ bind(&return_undefined);
__ mov(eax, Immediate(factory()->undefined_value()));
__ ret((argc + 1) * kPointerSize);
__ bind(&call_builtin);
__ TailCallExternalReference(
ExternalReference(Builtins::c_ArrayPop, isolate()),
argc + 1,
1);
HandlerFrontendFooter(&miss);
// Return the generated code.
return GetCode(type, name);
}
Handle<Code> CallStubCompiler::CompileFastApiCall( Handle<Code> CallStubCompiler::CompileFastApiCall(
const CallOptimization& optimization, const CallOptimization& optimization,
Handle<Object> object, Handle<Object> object,
......
...@@ -1556,78 +1556,6 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, ...@@ -1556,78 +1556,6 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
} }
Handle<Code> CallStubCompiler::CompileArrayPopCall(
Handle<Object> object,
Handle<JSObject> holder,
Handle<Cell> cell,
Handle<JSFunction> function,
Handle<String> name,
Code::StubType type) {
// If object is not an array or is observed or sealed, bail out to regular
// call.
if (!object->IsJSArray() ||
!cell.is_null() ||
Handle<JSArray>::cast(object)->map()->is_observed() ||
!Handle<JSArray>::cast(object)->map()->is_extensible()) {
return Handle<Code>::null();
}
Label miss, return_undefined, call_builtin;
Register receiver = a0;
Register scratch = a1;
Register elements = a3;
HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
// Get the elements array of the object.
__ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ CheckMap(elements,
scratch,
Heap::kFixedArrayMapRootIndex,
&call_builtin,
DONT_DO_SMI_CHECK);
// Get the array's length into t0 and calculate new length.
__ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
__ Subu(t0, t0, Operand(Smi::FromInt(1)));
__ Branch(&return_undefined, lt, t0, Operand(zero_reg));
// Get the last element.
__ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0);
// We can't address the last element in one operation. Compute the more
// expensive shift first, and use an offset later on.
__ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize);
__ Addu(elements, elements, t1);
__ lw(scratch, FieldMemOperand(elements, FixedArray::kHeaderSize));
__ Branch(&call_builtin, eq, scratch, Operand(t2));
// Set the array's length.
__ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
// Fill with the hole.
__ sw(t2, FieldMemOperand(elements, FixedArray::kHeaderSize));
const int argc = arguments().immediate();
__ mov(v0, scratch);
__ DropAndRet(argc + 1);
__ bind(&return_undefined);
__ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
__ DropAndRet(argc + 1);
__ bind(&call_builtin);
__ TailCallExternalReference(
ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1);
HandlerFrontendFooter(&miss);
// Return the generated code.
return GetCode(type, name);
}
Handle<Code> CallStubCompiler::CompileFastApiCall( Handle<Code> CallStubCompiler::CompileFastApiCall(
const CallOptimization& optimization, const CallOptimization& optimization,
Handle<Object> object, Handle<Object> object,
......
...@@ -888,8 +888,7 @@ class KeyedStoreStubCompiler: public StoreStubCompiler { ...@@ -888,8 +888,7 @@ class KeyedStoreStubCompiler: public StoreStubCompiler {
// Subset of FUNCTIONS_WITH_ID_LIST with custom constant/global call // Subset of FUNCTIONS_WITH_ID_LIST with custom constant/global call
// IC stubs. // IC stubs.
#define CUSTOM_CALL_IC_GENERATORS(V) \ #define CUSTOM_CALL_IC_GENERATORS(V) \
V(ArrayPush) \ V(ArrayPush)
V(ArrayPop)
class CallStubCompiler: public StubCompiler { class CallStubCompiler: public StubCompiler {
......
...@@ -1593,77 +1593,6 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, ...@@ -1593,77 +1593,6 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
} }
Handle<Code> CallStubCompiler::CompileArrayPopCall(
Handle<Object> object,
Handle<JSObject> holder,
Handle<Cell> cell,
Handle<JSFunction> function,
Handle<String> name,
Code::StubType type) {
// If object is not an array or is observed or sealed, bail out to regular
// call.
if (!object->IsJSArray() ||
!cell.is_null() ||
Handle<JSArray>::cast(object)->map()->is_observed() ||
!Handle<JSArray>::cast(object)->map()->is_extensible()) {
return Handle<Code>::null();
}
Label miss, return_undefined, call_builtin;
HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
// Get the elements array of the object.
__ movp(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
Heap::kFixedArrayMapRootIndex);
__ j(not_equal, &call_builtin);
// Get the array's length into rcx and calculate new length.
__ SmiToInteger32(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
__ subl(rcx, Immediate(1));
__ j(negative, &return_undefined);
// Get the last element.
__ LoadRoot(r9, Heap::kTheHoleValueRootIndex);
__ movp(rax, FieldOperand(rbx,
rcx, times_pointer_size,
FixedArray::kHeaderSize));
// Check if element is already the hole.
__ cmpq(rax, r9);
// If so, call slow-case to also check prototypes for value.
__ j(equal, &call_builtin);
// Set the array's length.
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
// Fill with the hole and return original value.
__ movp(FieldOperand(rbx,
rcx, times_pointer_size,
FixedArray::kHeaderSize),
r9);
const int argc = arguments().immediate();
__ ret((argc + 1) * kPointerSize);
__ bind(&return_undefined);
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
__ ret((argc + 1) * kPointerSize);
__ bind(&call_builtin);
__ TailCallExternalReference(
ExternalReference(Builtins::c_ArrayPop, isolate()),
argc + 1,
1);
HandlerFrontendFooter(&miss);
// Return the generated code.
return GetCode(type, name);
}
Handle<Code> CallStubCompiler::CompileFastApiCall( Handle<Code> CallStubCompiler::CompileFastApiCall(
const CallOptimization& optimization, const CallOptimization& optimization,
Handle<Object> object, Handle<Object> object,
......
// Copyright 2014 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax
var o = [6,7,8,9];
function f(b) {
var v = o.pop() + b;
return v;
}
assertEquals(10, f(1));
assertEquals(9, f(1));
assertEquals(8, f(1));
%OptimizeFunctionOnNextCall(f);
assertEquals("61", f("1"));
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment