Commit 767ce788 authored by bmeurer's avatar bmeurer Committed by Commit Bot

[turbofan] Introduce new JSCallWithArrayLike operator.

Add a new JSCallWithArrayLike operator that is backed by the
CallWithArrayLike builtin, and use that operator for both
Function.prototype.apply and Reflect.apply inlining. Also unify
the handling of JSCallWithArrayLike and JSCallWithSpread in
the JSCallReducer to reduce the copy&paste overhead.

Drive-by-fix: Add a lot of test coverage for Reflect.apply and
Function.prototype.apply in optimized code, especially for some
corner cases, which was missing so far.

BUG=v8:4587,v8:5269
R=petermarshall@chromium.org

Review-Url: https://codereview.chromium.org/2950773002
Cr-Commit-Position: refs/heads/master@{#46041}
parent d00b37fb
......@@ -1930,13 +1930,9 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
// -- sp[0] : thisArg
// -----------------------------------
// 2. Make sure the receiver is actually callable.
Label receiver_not_callable;
__ JumpIfSmi(r1, &receiver_not_callable);
__ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
__ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
__ tst(r4, Operand(1 << Map::kIsCallable));
__ b(eq, &receiver_not_callable);
// 2. We don't need to check explicitly for callable receiver here,
// since that's the first thing the Call/CallWithArrayLike builtins
// will do.
// 3. Tail call with no arguments if argArray is null or undefined.
Label no_arguments;
......@@ -1954,13 +1950,6 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
__ mov(r0, Operand(0));
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
// 4c. The receiver is not callable, throw an appropriate TypeError.
__ bind(&receiver_not_callable);
{
__ str(r1, MemOperand(sp, 0));
__ TailCallRuntime(Runtime::kThrowApplyNonFunction);
}
}
// static
......@@ -2038,24 +2027,13 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
// -- sp[0] : thisArgument
// -----------------------------------
// 2. Make sure the target is actually callable.
Label target_not_callable;
__ JumpIfSmi(r1, &target_not_callable);
__ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
__ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
__ tst(r4, Operand(1 << Map::kIsCallable));
__ b(eq, &target_not_callable);
// 2. We don't need to check explicitly for callable target here,
// since that's the first thing the Call/CallWithArrayLike builtins
// will do.
// 3a. Apply the target to the given argumentsList.
// 3. Apply the target to the given argumentsList.
__ Jump(masm->isolate()->builtins()->CallWithArrayLike(),
RelocInfo::CODE_TARGET);
// 3b. The target is not callable, throw an appropriate TypeError.
__ bind(&target_not_callable);
{
__ str(r1, MemOperand(sp, 0));
__ TailCallRuntime(Runtime::kThrowApplyNonFunction);
}
}
void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
......
......@@ -1994,13 +1994,9 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
// -- jssp[0] : thisArg
// -----------------------------------
// 2. Make sure the receiver is actually callable.
Label receiver_not_callable;
__ JumpIfSmi(receiver, &receiver_not_callable);
__ Ldr(x10, FieldMemOperand(receiver, HeapObject::kMapOffset));
__ Ldrb(w10, FieldMemOperand(x10, Map::kBitFieldOffset));
__ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable,
&receiver_not_callable);
// 2. We don't need to check explicitly for callable receiver here,
// since that's the first thing the Call/CallWithArrayLike builtins
// will do.
// 3. Tail call with no arguments if argArray is null or undefined.
Label no_arguments;
......@@ -2020,13 +2016,6 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
DCHECK(receiver.Is(x1));
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
// 4c. The receiver is not callable, throw an appropriate TypeError.
__ Bind(&receiver_not_callable);
{
__ Poke(receiver, 0);
__ TailCallRuntime(Runtime::kThrowApplyNonFunction);
}
}
// static
......@@ -2125,23 +2114,13 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
// -- jssp[0] : thisArgument
// -----------------------------------
// 2. Make sure the target is actually callable.
Label target_not_callable;
__ JumpIfSmi(target, &target_not_callable);
__ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
__ Ldr(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
__ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable, &target_not_callable);
// 2. We don't need to check explicitly for callable target here,
// since that's the first thing the Call/CallWithArrayLike builtins
// will do.
// 3a. Apply the target to the given argumentsList.
// 3. Apply the target to the given argumentsList.
__ Jump(masm->isolate()->builtins()->CallWithArrayLike(),
RelocInfo::CODE_TARGET);
// 3b. The target is not callable, throw an appropriate TypeError.
__ Bind(&target_not_callable);
{
__ Poke(target, 0);
__ TailCallRuntime(Runtime::kThrowApplyNonFunction);
}
}
void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
......
......@@ -107,6 +107,21 @@ void CallOrConstructBuiltinsAssembler::CallOrConstructWithArrayLike(
if_holey_array(this, Label::kDeferred),
if_runtime(this, Label::kDeferred);
// Perform appropriate checks on {target} (and {new_target} first).
if (new_target == nullptr) {
// Check that {target} is Callable.
Label if_target_callable(this),
if_target_not_callable(this, Label::kDeferred);
GotoIf(TaggedIsSmi(target), &if_target_not_callable);
Branch(IsCallable(target), &if_target_callable, &if_target_not_callable);
BIND(&if_target_not_callable);
{
CallRuntime(Runtime::kThrowApplyNonFunction, context, target);
Unreachable();
}
BIND(&if_target_callable);
}
GotoIf(TaggedIsSmi(arguments_list), &if_runtime);
Node* arguments_list_map = LoadMap(arguments_list);
Node* native_context = LoadNativeContext(context);
......
......@@ -1697,13 +1697,9 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
// -- esp[4] : thisArg
// -----------------------------------
// 2. Make sure the receiver is actually callable.
Label receiver_not_callable;
__ JumpIfSmi(edi, &receiver_not_callable, Label::kNear);
__ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
__ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
Immediate(1 << Map::kIsCallable));
__ j(zero, &receiver_not_callable, Label::kNear);
// 2. We don't need to check explicitly for callable receiver here,
// since that's the first thing the Call/CallWithArrayLike builtins
// will do.
// 3. Tail call with no arguments if argArray is null or undefined.
Label no_arguments;
......@@ -1722,13 +1718,6 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
__ Set(eax, 0);
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
// 4c. The receiver is not callable, throw an appropriate TypeError.
__ bind(&receiver_not_callable);
{
__ mov(Operand(esp, kPointerSize), edi);
__ TailCallRuntime(Runtime::kThrowApplyNonFunction);
}
}
// static
......@@ -1817,24 +1806,13 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
// -- esp[4] : thisArgument
// -----------------------------------
// 2. Make sure the target is actually callable.
Label target_not_callable;
__ JumpIfSmi(edi, &target_not_callable, Label::kNear);
__ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
__ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
Immediate(1 << Map::kIsCallable));
__ j(zero, &target_not_callable, Label::kNear);
// 2. We don't need to check explicitly for callable target here,
// since that's the first thing the Call/CallWithArrayLike builtins
// will do.
// 3a. Apply the target to the given argumentsList.
// 3. Apply the target to the given argumentsList.
__ Jump(masm->isolate()->builtins()->CallWithArrayLike(),
RelocInfo::CODE_TARGET);
// 3b. The target is not callable, throw an appropriate TypeError.
__ bind(&target_not_callable);
{
__ mov(Operand(esp, kPointerSize), edi);
__ TailCallRuntime(Runtime::kThrowApplyNonFunction);
}
}
void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
......
......@@ -1927,13 +1927,9 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
// -- sp[0] : thisArg
// -----------------------------------
// 2. Make sure the receiver is actually callable.
Label receiver_not_callable;
__ JumpIfSmi(a1, &receiver_not_callable);
__ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
__ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
__ And(t0, t0, Operand(1 << Map::kIsCallable));
__ Branch(&receiver_not_callable, eq, t0, Operand(zero_reg));
// 2. We don't need to check explicitly for callable receiver here,
// since that's the first thing the Call/CallWithArrayLike builtins
// will do.
// 3. Tail call with no arguments if argArray is null or undefined.
Label no_arguments;
......@@ -1951,13 +1947,6 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
__ mov(a0, zero_reg);
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
// 4c. The receiver is not callable, throw an appropriate TypeError.
__ bind(&receiver_not_callable);
{
__ sw(a1, MemOperand(sp));
__ TailCallRuntime(Runtime::kThrowApplyNonFunction);
}
}
// static
......@@ -2044,24 +2033,13 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
// -- sp[0] : thisArgument
// -----------------------------------
// 2. Make sure the target is actually callable.
Label target_not_callable;
__ JumpIfSmi(a1, &target_not_callable);
__ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
__ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
__ And(t0, t0, Operand(1 << Map::kIsCallable));
__ Branch(&target_not_callable, eq, t0, Operand(zero_reg));
// 2. We don't need to check explicitly for callable target here,
// since that's the first thing the Call/CallWithArrayLike builtins
// will do.
// 3a. Apply the target to the given argumentsList.
// 3. Apply the target to the given argumentsList.
__ Jump(masm->isolate()->builtins()->CallWithArrayLike(),
RelocInfo::CODE_TARGET);
// 3b. The target is not callable, throw an appropriate TypeError.
__ bind(&target_not_callable);
{
__ sw(a1, MemOperand(sp));
__ TailCallRuntime(Runtime::kThrowApplyNonFunction);
}
}
void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
......
......@@ -1934,13 +1934,9 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
// -- sp[0] : thisArg
// -----------------------------------
// 2. Make sure the receiver is actually callable.
Label receiver_not_callable;
__ JumpIfSmi(receiver, &receiver_not_callable);
__ Ld(a4, FieldMemOperand(receiver, HeapObject::kMapOffset));
__ Lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
__ And(a4, a4, Operand(1 << Map::kIsCallable));
__ Branch(&receiver_not_callable, eq, a4, Operand(zero_reg));
// 2. We don't need to check explicitly for callable receiver here,
// since that's the first thing the Call/CallWithArrayLike builtins
// will do.
// 3. Tail call with no arguments if argArray is null or undefined.
Label no_arguments;
......@@ -1959,13 +1955,6 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
DCHECK(receiver.is(a1));
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
// 4c. The receiver is not callable, throw an appropriate TypeError.
__ bind(&receiver_not_callable);
{
__ Sd(receiver, MemOperand(sp));
__ TailCallRuntime(Runtime::kThrowApplyNonFunction);
}
}
// static
......@@ -2057,24 +2046,13 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
// -- sp[0] : thisArgument
// -----------------------------------
// 2. Make sure the target is actually callable.
Label target_not_callable;
__ JumpIfSmi(target, &target_not_callable);
__ Ld(a4, FieldMemOperand(target, HeapObject::kMapOffset));
__ Lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
__ And(a4, a4, Operand(1 << Map::kIsCallable));
__ Branch(&target_not_callable, eq, a4, Operand(zero_reg));
// 2. We don't need to check explicitly for callable target here,
// since that's the first thing the Call/CallWithArrayLike builtins
// will do.
// 3a. Apply the target to the given argumentsList.
// 3. Apply the target to the given argumentsList.
__ Jump(masm->isolate()->builtins()->CallWithArrayLike(),
RelocInfo::CODE_TARGET);
// 3b. The target is not callable, throw an appropriate TypeError.
__ bind(&target_not_callable);
{
__ Sd(target, MemOperand(sp));
__ TailCallRuntime(Runtime::kThrowApplyNonFunction);
}
}
void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
......
......@@ -1669,13 +1669,9 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
// -- rsp[8] : thisArg
// -----------------------------------
// 2. Make sure the receiver is actually callable.
Label receiver_not_callable;
__ JumpIfSmi(rdi, &receiver_not_callable, Label::kNear);
__ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
__ testb(FieldOperand(rcx, Map::kBitFieldOffset),
Immediate(1 << Map::kIsCallable));
__ j(zero, &receiver_not_callable, Label::kNear);
// 2. We don't need to check explicitly for callable receiver here,
// since that's the first thing the Call/CallWithArrayLike builtins
// will do.
// 3. Tail call with no arguments if argArray is null or undefined.
Label no_arguments;
......@@ -1695,14 +1691,6 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
__ Set(rax, 0);
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
// 4c. The receiver is not callable, throw an appropriate TypeError.
__ bind(&receiver_not_callable);
{
StackArgumentsAccessor args(rsp, 0);
__ movp(args.GetReceiverOperand(), rdi);
__ TailCallRuntime(Runtime::kThrowApplyNonFunction);
}
}
// static
......@@ -1798,25 +1786,13 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
// -- rsp[8] : thisArgument
// -----------------------------------
// 2. Make sure the target is actually callable.
Label target_not_callable;
__ JumpIfSmi(rdi, &target_not_callable, Label::kNear);
__ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
__ testb(FieldOperand(rcx, Map::kBitFieldOffset),
Immediate(1 << Map::kIsCallable));
__ j(zero, &target_not_callable, Label::kNear);
// 2. We don't need to check explicitly for callable target here,
// since that's the first thing the Call/CallWithArrayLike builtins
// will do.
// 3a. Apply the target to the given argumentsList.
// 3. Apply the target to the given argumentsList.
__ Jump(masm->isolate()->builtins()->CallWithArrayLike(),
RelocInfo::CODE_TARGET);
// 3b. The target is not callable, throw an appropriate TypeError.
__ bind(&target_not_callable);
{
StackArgumentsAccessor args(rsp, 0);
__ movp(args.GetReceiverOperand(), rdi);
__ TailCallRuntime(Runtime::kThrowApplyNonFunction);
}
}
void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
......
......@@ -286,6 +286,12 @@ Callable CodeFactory::Call(Isolate* isolate, ConvertReceiverMode mode,
CallTrampolineDescriptor(isolate));
}
// static
Callable CodeFactory::CallWithArrayLike(Isolate* isolate) {
return Callable(isolate->builtins()->CallWithArrayLike(),
CallWithArrayLikeDescriptor(isolate));
}
// static
Callable CodeFactory::CallWithSpread(Isolate* isolate) {
return Callable(isolate->builtins()->CallWithSpread(),
......
......@@ -87,6 +87,7 @@ class V8_EXPORT_PRIVATE CodeFactory final {
static Callable Call(Isolate* isolate,
ConvertReceiverMode mode = ConvertReceiverMode::kAny,
TailCallMode tail_call_mode = TailCallMode::kDisallow);
static Callable CallWithArrayLike(Isolate* isolate);
static Callable CallWithSpread(Isolate* isolate);
static Callable CallFunction(
Isolate* isolate, ConvertReceiverMode mode = ConvertReceiverMode::kAny,
......
This diff is collapsed.
......@@ -18,6 +18,7 @@ class Factory;
namespace compiler {
// Forward declarations.
class CallFrequency;
class CommonOperatorBuilder;
class JSGraph;
class JSOperatorBuilder;
......@@ -50,12 +51,15 @@ class JSCallReducer final : public AdvancedReducer {
Reduction ReduceObjectGetPrototypeOf(Node* node);
Reduction ReduceObjectPrototypeGetProto(Node* node);
Reduction ReduceObjectPrototypeIsPrototypeOf(Node* node);
Reduction ReduceReflectApply(Node* node);
Reduction ReduceReflectGetPrototypeOf(Node* node);
Reduction ReduceArrayForEach(Handle<JSFunction> function, Node* node);
Reduction ReduceSpreadCall(Node* node, int arity);
Reduction ReduceCallOrConstructWithArrayLikeOrSpread(
Node* node, int arity, CallFrequency const& frequency);
Reduction ReduceJSConstruct(Node* node);
Reduction ReduceJSConstructWithSpread(Node* node);
Reduction ReduceJSCall(Node* node);
Reduction ReduceJSCallWithArrayLike(Node* node);
Reduction ReduceJSCallWithSpread(Node* node);
Reduction ReduceReturnReceiver(Node* node);
......
......@@ -652,6 +652,20 @@ void JSGenericLowering::LowerJSCall(Node* node) {
NodeProperties::ChangeOp(node, common()->Call(desc));
}
void JSGenericLowering::LowerJSCallWithArrayLike(Node* node) {
Callable callable = CodeFactory::CallWithArrayLike(isolate());
CallDescriptor::Flags flags = FrameStateFlagForCall(node);
CallDescriptor* desc = Linkage::GetStubCallDescriptor(
isolate(), zone(), callable.descriptor(), 1, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
Node* receiver = node->InputAt(1);
Node* arguments_list = node->InputAt(2);
node->InsertInput(zone(), 0, stub_code);
node->ReplaceInput(3, receiver);
node->ReplaceInput(2, arguments_list);
NodeProperties::ChangeOp(node, common()->Call(desc));
}
void JSGenericLowering::LowerJSCallWithSpread(Node* node) {
SpreadWithArityParameter const& p = SpreadWithArityParameterOf(node->op());
int const arg_count = static_cast<int>(p.arity() - 2);
......
......@@ -22,6 +22,11 @@ std::ostream& operator<<(std::ostream& os, CallFrequency f) {
return os << f.value();
}
CallFrequency CallFrequencyOf(Operator const* op) {
DCHECK_EQ(IrOpcode::kJSCallWithArrayLike, op->opcode());
return OpParameter<CallFrequency>(op);
}
VectorSlotPair::VectorSlotPair() {}
......@@ -821,6 +826,14 @@ const Operator* JSOperatorBuilder::Call(size_t arity, CallFrequency frequency,
parameters); // parameter
}
const Operator* JSOperatorBuilder::CallWithArrayLike(CallFrequency frequency) {
return new (zone()) Operator1<CallFrequency>( // --
IrOpcode::kJSCallWithArrayLike, Operator::kNoProperties, // opcode
"JSCallWithArrayLike", // name
3, 1, 1, 1, 1, 2, // counts
frequency); // parameter
}
const Operator* JSOperatorBuilder::CallWithSpread(uint32_t arity) {
SpreadWithArityParameter parameters(arity);
return new (zone()) Operator1<SpreadWithArityParameter>( // --
......
......@@ -57,6 +57,8 @@ class CallFrequency final {
std::ostream& operator<<(std::ostream&, CallFrequency);
CallFrequency CallFrequencyOf(Operator const* op) WARN_UNUSED_RESULT;
// Defines a pair of {FeedbackVector} and {FeedbackSlot}, which
// is used to access the type feedback for a certain {Node}.
class V8_EXPORT_PRIVATE VectorSlotPair {
......@@ -731,6 +733,7 @@ class V8_EXPORT_PRIVATE JSOperatorBuilder final
VectorSlotPair const& feedback = VectorSlotPair(),
ConvertReceiverMode convert_mode = ConvertReceiverMode::kAny,
TailCallMode tail_call_mode = TailCallMode::kDisallow);
const Operator* CallWithArrayLike(CallFrequency frequency);
const Operator* CallWithSpread(uint32_t arity);
const Operator* CallRuntime(Runtime::FunctionId id);
const Operator* CallRuntime(Runtime::FunctionId id, size_t arity);
......
......@@ -166,6 +166,7 @@
V(JSConstructWithSpread) \
V(JSCallForwardVarargs) \
V(JSCall) \
V(JSCallWithArrayLike) \
V(JSCallWithSpread) \
V(JSCallRuntime) \
V(JSConvertReceiver) \
......
......@@ -103,6 +103,7 @@ bool OperatorProperties::HasFrameStateInput(const Operator* op) {
case IrOpcode::kJSConstructWithSpread:
case IrOpcode::kJSCallForwardVarargs:
case IrOpcode::kJSCall:
case IrOpcode::kJSCallWithArrayLike:
case IrOpcode::kJSCallWithSpread:
// Misc operations
......
......@@ -1610,6 +1610,10 @@ Type* Typer::Visitor::TypeJSCall(Node* node) {
return TypeUnaryOp(node, JSCallTyper);
}
Type* Typer::Visitor::TypeJSCallWithArrayLike(Node* node) {
return TypeUnaryOp(node, JSCallTyper);
}
Type* Typer::Visitor::TypeJSCallWithSpread(Node* node) {
return TypeUnaryOp(node, JSCallTyper);
}
......
......@@ -721,6 +721,7 @@ void Verifier::Visitor::Check(Node* node) {
break;
case IrOpcode::kJSCallForwardVarargs:
case IrOpcode::kJSCall:
case IrOpcode::kJSCallWithArrayLike:
case IrOpcode::kJSCallWithSpread:
case IrOpcode::kJSCallRuntime:
// Type can be anything.
......
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax
// Test Function.prototype.apply with null/undefined argumentsList
(function() {
"use strict";
function bar() { return this; }
function foo() { return bar.apply(this, null); }
assertEquals(42, foo.call(42));
assertEquals(42, foo.call(42));
%OptimizeFunctionOnNextCall(foo);
assertEquals(42, foo.call(42));
})();
(function() {
"use strict";
function bar() { return this; }
function foo() { return bar.apply(this, undefined); }
assertEquals(42, foo.call(42));
assertEquals(42, foo.call(42));
%OptimizeFunctionOnNextCall(foo);
assertEquals(42, foo.call(42));
})();
// Test Function.prototype.apply within try/catch.
(function() {
"use strict";
function foo(bar) {
try {
return Function.prototype.apply.call(bar, bar, arguments);
} catch (e) {
return 1;
}
}
assertEquals(1, foo());
assertEquals(1, foo());
%OptimizeFunctionOnNextCall(foo);
assertEquals(1, foo());
})();
(function() {
"use strict";
function foo(bar) {
try {
return Function.prototype.apply.call(bar, bar, bar);
} catch (e) {
return 1;
}
}
assertEquals(1, foo());
assertEquals(1, foo());
%OptimizeFunctionOnNextCall(foo);
assertEquals(1, foo());
})();
// Test Function.prototype.apply with wrong number of arguments.
(function() {
"use strict";
function bar() { return this; }
function foo() { return bar.apply(); }
assertEquals(undefined, foo());
assertEquals(undefined, foo());
%OptimizeFunctionOnNextCall(foo);
assertEquals(undefined, foo());
})();
(function() {
"use strict";
function bar() { return this; }
function foo() { return bar.apply(this); }
assertEquals(42, foo.call(42));
assertEquals(42, foo.call(42));
%OptimizeFunctionOnNextCall(foo);
assertEquals(42, foo.call(42));
})();
(function() {
"use strict";
function bar() { return this; }
function foo() { return bar.apply(this, arguments, this); }
assertEquals(42, foo.call(42));
assertEquals(42, foo.call(42));
%OptimizeFunctionOnNextCall(foo);
assertEquals(42, foo.call(42));
})();
// Test proper order of callable check and array-like iteration
// in Function.prototype.apply.
(function() {
var dummy_length_counter = 0;
var dummy = { get length() { ++dummy_length_counter; return 0; } };
function foo() {
return Function.prototype.apply.call(undefined, this, dummy);
}
assertThrows(foo, TypeError);
assertThrows(foo, TypeError);
%OptimizeFunctionOnNextCall(foo);
assertThrows(foo, TypeError);
assertEquals(0, dummy_length_counter);
})();
(function() {
var dummy_length_counter = 0;
var dummy = { get length() { ++dummy_length_counter; return 0; } };
function foo() {
return Function.prototype.apply.call(null, this, dummy);
}
assertThrows(foo, TypeError);
assertThrows(foo, TypeError);
%OptimizeFunctionOnNextCall(foo);
assertThrows(foo, TypeError);
assertEquals(0, dummy_length_counter);
})();
(function() {
var dummy_length_counter = 0;
var dummy = { get length() { ++dummy_length_counter; return 0; } };
function foo() {
return Function.prototype.apply.call(null, this, dummy);
}
assertThrows(foo, TypeError);
assertThrows(foo, TypeError);
%OptimizeFunctionOnNextCall(foo);
assertThrows(foo, TypeError);
assertEquals(0, dummy_length_counter);
})();
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax
// Test Reflect.apply with wrong number of arguments.
(function() {
"use strict";
function bar() { return this; }
function foo() { return Reflect.apply(bar); }
assertThrows(foo);
assertThrows(foo);
%OptimizeFunctionOnNextCall(foo);
assertThrows(foo);
})();
(function() {
"use strict";
function bar() { return this; }
function foo() { return Reflect.apply(bar, this); }
assertThrows(foo);
assertThrows(foo);
%OptimizeFunctionOnNextCall(foo);
assertThrows(foo);
})();
(function() {
"use strict";
function bar() { return this; }
function foo() { return Reflect.apply(bar, this, arguments, this); }
assertEquals(42, foo.call(42));
assertEquals(42, foo.call(42));
%OptimizeFunctionOnNextCall(foo);
assertEquals(42, foo.call(42));
})();
// Test Reflect.apply within try/catch.
(function() {
"use strict";
function foo(bar) {
try {
return Reflect.apply(bar, bar, arguments);
} catch (e) {
return 1;
}
}
assertEquals(1, foo());
assertEquals(1, foo());
%OptimizeFunctionOnNextCall(foo);
assertEquals(1, foo());
})();
(function() {
"use strict";
function foo(bar) {
try {
return Reflect.apply(bar, bar, bar);
} catch (e) {
return 1;
}
}
assertEquals(1, foo());
assertEquals(1, foo());
%OptimizeFunctionOnNextCall(foo);
assertEquals(1, foo());
})();
// Test proper order of callable check and array-like iteration
// in Reflect.apply.
(function() {
var dummy_length_counter = 0;
var dummy = { get length() { ++dummy_length_counter; return 0; } };
function foo() {
return Reflect.apply(undefined, this, dummy);
}
assertThrows(foo, TypeError);
assertThrows(foo, TypeError);
%OptimizeFunctionOnNextCall(foo);
assertThrows(foo, TypeError);
assertEquals(0, dummy_length_counter);
})();
(function() {
var dummy_length_counter = 0;
var dummy = { get length() { ++dummy_length_counter; return 0; } };
function foo() {
return Reflect.apply(null, this, dummy);
}
assertThrows(foo, TypeError);
assertThrows(foo, TypeError);
%OptimizeFunctionOnNextCall(foo);
assertThrows(foo, TypeError);
assertEquals(0, dummy_length_counter);
})();
(function() {
var dummy_length_counter = 0;
var dummy = { get length() { ++dummy_length_counter; return 0; } };
function foo() {
return Reflect.apply(null, this, dummy);
}
assertThrows(foo, TypeError);
assertThrows(foo, TypeError);
%OptimizeFunctionOnNextCall(foo);
assertThrows(foo, TypeError);
assertEquals(0, dummy_length_counter);
})();
......@@ -28,7 +28,7 @@ test(function() {
// kApplyNonFunction
test(function() {
Function.prototype.apply.call(1, []);
Reflect.apply(1, []);
}, "Function.prototype.apply was called on 1, which is a number " +
"and not a function", TypeError);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment