Commit 1fa4285e authored by danno's avatar danno Committed by Commit bot

[turbofan] Enable tail calls for %_CallRuntime.

This involves:
- Enabling the tail call optimization reducer in all cases.
- Adding an addition flag to CallFunctionParameters to mark call sites
  that can be tail-called enabled.
- Only set the tail-call flag for %_CallFunction.

R=bmeurer@chromium.org

Review URL: https://codereview.chromium.org/1216933011

Cr-Commit-Position: refs/heads/master@{#29436}
parent 1297a51e
......@@ -304,10 +304,6 @@ void CodeGenerator::AssembleDeconstructActivationRecord() {
int stack_slots = frame()->GetSpillSlotCount();
if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
__ LeaveFrame(StackFrame::MANUAL);
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
: 0;
__ Drop(pop_count);
}
}
......@@ -1053,8 +1049,12 @@ void CodeGenerator::AssembleReturn() {
__ LeaveFrame(StackFrame::MANUAL);
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
: 0;
__ Drop(pop_count);
: (info()->IsStub()
? info()->code_stub()->GetStackParameterCount()
: 0);
if (pop_count != 0) {
__ Drop(pop_count);
}
__ Ret();
}
} else {
......
......@@ -1166,9 +1166,7 @@ void InstructionSelector::VisitTailCall(Node* node) {
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
// TODO(turbofan): Relax restriction for stack parameters.
if (descriptor->UsesOnlyRegisters() &&
descriptor->HasSameReturnLocationsAs(
linkage()->GetIncomingDescriptor())) {
if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
CallBuffer buffer(zone(), descriptor, nullptr);
// Compute InstructionOperands for inputs and outputs.
......@@ -1177,8 +1175,6 @@ void InstructionSelector::VisitTailCall(Node* node) {
// heuristics in the register allocator for where to emit constants.
InitializeCallBuffer(node, &buffer, true, false);
DCHECK_EQ(0u, buffer.pushed_nodes.size());
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
switch (descriptor->kind()) {
......
......@@ -351,10 +351,6 @@ void CodeGenerator::AssembleDeconstructActivationRecord() {
if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
__ Mov(jssp, fp);
__ Pop(fp, lr);
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
: 0;
__ Drop(pop_count);
}
}
......@@ -1184,8 +1180,12 @@ void CodeGenerator::AssembleReturn() {
__ Pop(fp, lr);
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
: 0;
__ Drop(pop_count);
: (info()->IsStub()
? info()->code_stub()->GetStackParameterCount()
: 0);
if (pop_count != 0) {
__ Drop(pop_count);
}
__ Ret();
}
} else {
......
......@@ -1470,9 +1470,7 @@ void InstructionSelector::VisitTailCall(Node* node) {
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
// TODO(turbofan): Relax restriction for stack parameters.
if (descriptor->UsesOnlyRegisters() &&
descriptor->HasSameReturnLocationsAs(
linkage()->GetIncomingDescriptor())) {
if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
CallBuffer buffer(zone(), descriptor, nullptr);
// Compute InstructionOperands for inputs and outputs.
......@@ -1481,8 +1479,6 @@ void InstructionSelector::VisitTailCall(Node* node) {
// heuristics in the register allocator for where to emit constants.
InitializeCallBuffer(node, &buffer, true, false);
DCHECK_EQ(0u, buffer.pushed_nodes.size());
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
switch (descriptor->kind()) {
......
......@@ -290,13 +290,6 @@ void CodeGenerator::AssembleDeconstructActivationRecord() {
if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
__ mov(esp, ebp);
__ pop(ebp);
int32_t bytes_to_pop =
descriptor->IsJSFunctionCall()
? static_cast<int32_t>(descriptor->JSParameterCount() *
kPointerSize)
: 0;
__ pop(Operand(esp, bytes_to_pop));
__ add(esp, Immediate(bytes_to_pop));
}
}
......@@ -1348,8 +1341,14 @@ void CodeGenerator::AssembleReturn() {
__ pop(ebp); // Pop caller's frame pointer.
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
: 0;
__ Ret(pop_count * kPointerSize, ebx);
: (info()->IsStub()
? info()->code_stub()->GetStackParameterCount()
: 0);
if (pop_count == 0) {
__ ret(0);
} else {
__ Ret(pop_count * kPointerSize, ebx);
}
}
} else {
__ ret(0);
......
......@@ -906,16 +906,13 @@ void InstructionSelector::VisitTailCall(Node* node) {
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
// TODO(turbofan): Relax restriction for stack parameters.
if (descriptor->UsesOnlyRegisters() &&
descriptor->HasSameReturnLocationsAs(
linkage()->GetIncomingDescriptor())) {
if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
CallBuffer buffer(zone(), descriptor, nullptr);
// Compute InstructionOperands for inputs and outputs.
InitializeCallBuffer(node, &buffer, true, true);
DCHECK_EQ(0u, buffer.pushed_nodes.size());
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
switch (descriptor->kind()) {
......
......@@ -529,6 +529,9 @@ void JSGenericLowering::LowerJSCallFunction(Node* node) {
CallFunctionStub stub(isolate(), arg_count, p.flags());
CallInterfaceDescriptor d = stub.GetCallInterfaceDescriptor();
CallDescriptor::Flags flags = AdjustFrameStatesForCall(node);
if (p.AllowTailCalls()) {
flags |= CallDescriptor::kSupportsTailCalls;
}
CallDescriptor* desc = Linkage::GetStubCallDescriptor(
isolate(), zone(), d, static_cast<int>(p.arity() - 1), flags);
Node* stub_code = jsgraph()->HeapConstant(stub.GetCode());
......
......@@ -94,6 +94,8 @@ Reduction JSIntrinsicLowering::Reduce(Node* node) {
return ReduceGetCallerJSFunction(node);
case Runtime::kInlineThrowNotDateError:
return ReduceThrowNotDateError(node);
case Runtime::kInlineCallFunction:
return ReduceCallFunction(node);
default:
break;
}
......@@ -513,6 +515,21 @@ Reduction JSIntrinsicLowering::ReduceThrowNotDateError(Node* node) {
}
Reduction JSIntrinsicLowering::ReduceCallFunction(Node* node) {
CallRuntimeParameters params = OpParameter<CallRuntimeParameters>(node->op());
size_t arity = params.arity();
node->set_op(javascript()->CallFunction(arity, NO_CALL_FUNCTION_FLAGS, STRICT,
VectorSlotPair(), ALLOW_TAIL_CALLS));
Node* function = node->InputAt(static_cast<int>(arity - 1));
while (--arity != 0) {
node->ReplaceInput(static_cast<int>(arity),
node->InputAt(static_cast<int>(arity - 1)));
}
node->ReplaceInput(0, function);
return Changed(node);
}
Reduction JSIntrinsicLowering::Change(Node* node, const Operator* op, Node* a,
Node* b) {
node->set_op(op);
......@@ -549,6 +566,10 @@ CommonOperatorBuilder* JSIntrinsicLowering::common() const {
return jsgraph()->common();
}
JSOperatorBuilder* JSIntrinsicLowering::javascript() const {
return jsgraph_->javascript();
}
MachineOperatorBuilder* JSIntrinsicLowering::machine() const {
return jsgraph()->machine();
......
......@@ -15,6 +15,7 @@ namespace compiler {
// Forward declarations.
class CommonOperatorBuilder;
class JSOperatorBuilder;
class JSGraph;
class MachineOperatorBuilder;
......@@ -56,6 +57,7 @@ class JSIntrinsicLowering final : public AdvancedReducer {
Reduction ReduceGetTypeFeedbackVector(Node* node);
Reduction ReduceGetCallerJSFunction(Node* node);
Reduction ReduceThrowNotDateError(Node* node);
Reduction ReduceCallFunction(Node* node);
Reduction Change(Node* node, const Operator* op);
Reduction Change(Node* node, const Operator* op, Node* a, Node* b);
......@@ -65,6 +67,7 @@ class JSIntrinsicLowering final : public AdvancedReducer {
Graph* graph() const;
JSGraph* jsgraph() const { return jsgraph_; }
CommonOperatorBuilder* common() const;
JSOperatorBuilder* javascript() const;
MachineOperatorBuilder* machine() const;
DeoptimizationMode mode() const { return mode_; }
SimplifiedOperatorBuilder* simplified() { return &simplified_; }
......
......@@ -30,7 +30,11 @@ size_t hash_value(VectorSlotPair const& p) {
std::ostream& operator<<(std::ostream& os, CallFunctionParameters const& p) {
return os << p.arity() << ", " << p.flags() << ", " << p.language_mode();
os << p.arity() << ", " << p.flags() << ", " << p.language_mode();
if (p.AllowTailCalls()) {
os << ", ALLOW_TAIL_CALLS";
}
return os;
}
......@@ -470,10 +474,13 @@ CACHED_OP_LIST_WITH_LANGUAGE_MODE(CACHED_WITH_LANGUAGE_MODE)
#undef CACHED_WITH_LANGUAGE_MODE
const Operator* JSOperatorBuilder::CallFunction(
size_t arity, CallFunctionFlags flags, LanguageMode language_mode,
VectorSlotPair const& feedback) {
CallFunctionParameters parameters(arity, flags, language_mode, feedback);
const Operator* JSOperatorBuilder::CallFunction(size_t arity,
CallFunctionFlags flags,
LanguageMode language_mode,
VectorSlotPair const& feedback,
TailCallMode tail_call_mode) {
CallFunctionParameters parameters(arity, flags, language_mode, feedback,
tail_call_mode);
return new (zone()) Operator1<CallFunctionParameters>( // --
IrOpcode::kJSCallFunction, Operator::kNoProperties, // opcode
"JSCallFunction", // name
......
......@@ -45,6 +45,7 @@ bool operator!=(VectorSlotPair const&, VectorSlotPair const&);
size_t hash_value(VectorSlotPair const&);
enum TailCallMode { NO_TAIL_CALLS, ALLOW_TAIL_CALLS };
// Defines the arity and the call flags for a JavaScript function call. This is
// used as a parameter by JSCallFunction operators.
......@@ -52,10 +53,12 @@ class CallFunctionParameters final {
public:
CallFunctionParameters(size_t arity, CallFunctionFlags flags,
LanguageMode language_mode,
VectorSlotPair const& feedback)
VectorSlotPair const& feedback,
TailCallMode tail_call_mode)
: bit_field_(ArityField::encode(arity) | FlagsField::encode(flags) |
LanguageModeField::encode(language_mode)),
feedback_(feedback) {}
feedback_(feedback),
tail_call_mode_(tail_call_mode) {}
size_t arity() const { return ArityField::decode(bit_field_); }
CallFunctionFlags flags() const { return FlagsField::decode(bit_field_); }
......@@ -72,6 +75,8 @@ class CallFunctionParameters final {
return !(*this == that);
}
bool AllowTailCalls() const { return tail_call_mode_ == ALLOW_TAIL_CALLS; }
private:
friend size_t hash_value(CallFunctionParameters const& p) {
return base::hash_combine(p.bit_field_, p.feedback_);
......@@ -83,6 +88,7 @@ class CallFunctionParameters final {
const uint32_t bit_field_;
const VectorSlotPair feedback_;
bool tail_call_mode_;
};
size_t hash_value(CallFunctionParameters const&);
......@@ -415,7 +421,8 @@ class JSOperatorBuilder final : public ZoneObject {
const Operator* CallFunction(
size_t arity, CallFunctionFlags flags, LanguageMode language_mode,
VectorSlotPair const& feedback = VectorSlotPair());
VectorSlotPair const& feedback = VectorSlotPair(),
TailCallMode tail_call_mode = NO_TAIL_CALLS);
const Operator* CallRuntime(Runtime::FunctionId id, size_t arity);
const Operator* CallConstruct(int arguments);
......
......@@ -4,6 +4,7 @@
#include "src/code-stubs.h"
#include "src/compiler.h"
#include "src/compiler/common-operator.h"
#include "src/compiler/linkage.h"
#include "src/compiler/node.h"
#include "src/compiler/pipeline.h"
......@@ -48,6 +49,63 @@ bool CallDescriptor::HasSameReturnLocationsAs(
}
bool CallDescriptor::CanTailCall(const Node* node) const {
// Tail calling is currently allowed if return locations match and all
// parameters are either in registers or on the stack but match exactly in
// number and content.
CallDescriptor const* other = OpParameter<CallDescriptor const*>(node);
if (!HasSameReturnLocationsAs(other)) return false;
size_t current_input = 0;
size_t other_input = 0;
size_t stack_parameter = 0;
while (true) {
if (other_input >= other->InputCount()) {
while (current_input <= InputCount()) {
if (!GetInputLocation(current_input).is_register()) {
return false;
}
++current_input;
}
return true;
}
if (current_input >= InputCount()) {
while (other_input < other->InputCount()) {
if (!other->GetInputLocation(other_input).is_register()) {
return false;
}
++other_input;
}
return true;
}
if (GetInputLocation(current_input).is_register()) {
++current_input;
continue;
}
if (other->GetInputLocation(other_input).is_register()) {
++other_input;
continue;
}
if (GetInputLocation(current_input) !=
other->GetInputLocation(other_input)) {
return false;
}
Node* input = node->InputAt(static_cast<int>(other_input));
if (input->opcode() != IrOpcode::kParameter) {
return false;
}
size_t param_index = ParameterIndexOf(input->op());
if (param_index != stack_parameter) {
return false;
}
++stack_parameter;
++current_input;
++other_input;
}
UNREACHABLE();
return false;
}
CallDescriptor* Linkage::ComputeIncoming(Zone* zone, CompilationInfo* info) {
if (info->code_stub() != NULL) {
// Use the code stub interface descriptor.
......
......@@ -20,6 +20,7 @@ class CallInterfaceDescriptor;
namespace compiler {
class Node;
class OsrHelper;
// Describes the location for a parameter or a return value to a call.
......@@ -167,6 +168,8 @@ class CallDescriptor final : public ZoneObject {
bool HasSameReturnLocationsAs(const CallDescriptor* other) const;
bool CanTailCall(const Node* call) const;
private:
friend class Linkage;
......
......@@ -399,10 +399,6 @@ void CodeGenerator::AssembleDeconstructActivationRecord() {
int stack_slots = frame()->GetSpillSlotCount();
if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
__ LeaveFrame(StackFrame::MANUAL);
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
: 0;
__ Drop(pop_count);
}
}
......@@ -1157,8 +1153,14 @@ void CodeGenerator::AssembleReturn() {
__ Pop(ra, fp);
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
: 0;
__ DropAndRet(pop_count);
: (info()->IsStub()
? info()->code_stub()->GetStackParameterCount()
: 0);
if (pop_count != 0) {
__ DropAndRet(pop_count);
} else {
__ Ret();
}
}
} else {
__ Ret();
......
......@@ -589,16 +589,12 @@ void InstructionSelector::VisitTailCall(Node* node) {
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
// TODO(turbofan): Relax restriction for stack parameters.
if (descriptor->UsesOnlyRegisters() &&
descriptor->HasSameReturnLocationsAs(
linkage()->GetIncomingDescriptor())) {
if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
CallBuffer buffer(zone(), descriptor, nullptr);
// Compute InstructionOperands for inputs and outputs.
InitializeCallBuffer(node, &buffer, true, false);
DCHECK_EQ(0u, buffer.pushed_nodes.size());
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
switch (descriptor->kind()) {
......
......@@ -399,10 +399,6 @@ void CodeGenerator::AssembleDeconstructActivationRecord() {
int stack_slots = frame()->GetSpillSlotCount();
if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
__ LeaveFrame(StackFrame::MANUAL);
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
: 0;
__ Drop(pop_count);
}
}
......@@ -1230,8 +1226,14 @@ void CodeGenerator::AssembleReturn() {
__ Pop(ra, fp);
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
: 0;
__ DropAndRet(pop_count);
: (info()->IsStub()
? info()->code_stub()->GetStackParameterCount()
: 0);
if (pop_count != 0) {
__ DropAndRet(pop_count);
} else {
__ Ret();
}
}
} else {
__ Ret();
......
......@@ -738,16 +738,12 @@ void InstructionSelector::VisitTailCall(Node* node) {
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
// TODO(turbofan): Relax restriction for stack parameters.
if (descriptor->UsesOnlyRegisters() &&
descriptor->HasSameReturnLocationsAs(
linkage()->GetIncomingDescriptor())) {
if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
CallBuffer buffer(zone(), descriptor, nullptr);
// Compute InstructionOperands for inputs and outputs.
InitializeCallBuffer(node, &buffer, true, false);
DCHECK_EQ(0u, buffer.pushed_nodes.size());
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
switch (descriptor->kind()) {
......
......@@ -703,8 +703,7 @@ struct GenericLoweringPhase {
AddReducer(data, &graph_reducer, &common_reducer);
AddReducer(data, &graph_reducer, &generic_lowering);
AddReducer(data, &graph_reducer, &select_lowering);
// TODO(turbofan): TCO is currently limited to stubs.
if (data->info()->IsStub()) AddReducer(data, &graph_reducer, &tco);
AddReducer(data, &graph_reducer, &tco);
graph_reducer.ReduceGraph();
}
};
......
......@@ -538,13 +538,6 @@ void CodeGenerator::AssembleDeconstructActivationRecord() {
if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
__ movq(rsp, rbp);
__ popq(rbp);
int32_t bytes_to_pop =
descriptor->IsJSFunctionCall()
? static_cast<int32_t>(descriptor->JSParameterCount() *
kPointerSize)
: 0;
__ popq(Operand(rsp, bytes_to_pop));
__ addq(rsp, Immediate(bytes_to_pop));
}
}
......@@ -1578,11 +1571,17 @@ void CodeGenerator::AssembleReturn() {
__ popq(rbp); // Pop caller's frame pointer.
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
: 0;
__ Ret(pop_count * kPointerSize, rbx);
: (info()->IsStub()
? info()->code_stub()->GetStackParameterCount()
: 0);
if (pop_count == 0) {
__ Ret();
} else {
__ Ret(pop_count * kPointerSize, rbx);
}
}
} else {
__ ret(0);
__ Ret();
}
}
......
......@@ -1112,16 +1112,12 @@ void InstructionSelector::VisitTailCall(Node* node) {
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
// TODO(turbofan): Relax restriction for stack parameters.
if (descriptor->UsesOnlyRegisters() &&
descriptor->HasSameReturnLocationsAs(
linkage()->GetIncomingDescriptor())) {
if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
CallBuffer buffer(zone(), descriptor, nullptr);
// Compute InstructionOperands for inputs and outputs.
InitializeCallBuffer(node, &buffer, true, true);
DCHECK_EQ(0u, buffer.pushed_nodes.size());
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
switch (descriptor->kind()) {
......
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --nostress-opt --turbo
var p0 = new Object();
var p1 = new Object();
var p2 = new Object();
// Ensure 1 parameter passed straight-through is handled correctly
var count1 = 100000;
tailee1 = function() {
"use strict";
if (count1-- == 0) {
return this;
}
return %_CallFunction(this, tailee1);
};
%OptimizeFunctionOnNextCall(tailee1);
assertEquals(p0, tailee1.call(p0));
// Ensure 2 parameters passed straight-through trigger a tail call are handled
// correctly and don't cause a stack overflow.
var count2 = 100000;
tailee2 = function(px) {
"use strict";
assertEquals(p2, px);
assertEquals(p1, this);
count2 = ((count2 | 0) - 1) | 0;
if ((count2 | 0) === 0) {
return this;
}
return %_CallFunction(this, px, tailee2);
};
%OptimizeFunctionOnNextCall(tailee2);
assertEquals(p1, tailee2.call(p1, p2));
// Ensure swapped 2 parameters don't trigger a tail call (parameter swizzling
// for the tail call isn't supported yet).
var count3 = 100000;
tailee3 = function(px) {
"use strict";
if (count3-- == 0) {
return this;
}
return %_CallFunction(px, this, tailee3);
};
%OptimizeFunctionOnNextCall(tailee3);
assertThrows(function() { tailee3.call(p1, p2); });
// Ensure too many parameters defeats the tail call optimization (currently
// unsupported).
var count4 = 1000000;
tailee4 = function(px) {
"use strict";
if (count4-- == 0) {
return this;
}
return %_CallFunction(this, px, undefined, tailee4);
};
%OptimizeFunctionOnNextCall(tailee4);
assertThrows(function() { tailee4.call(p1, p2); });
// Ensure too few parameters defeats the tail call optimization (currently
// unsupported).
var count5 = 1000000;
tailee5 = function(px) {
"use strict";
if (count5-- == 0) {
return this;
}
return %_CallFunction(this, tailee5);
};
%OptimizeFunctionOnNextCall(tailee5);
assertThrows(function() { tailee5.call(p1, p2); });
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment