Commit e519e6fa authored by ishell's avatar ishell Committed by Commit bot

[es6] Further fixing of tail Calls.

1) Update profiling counters in Full codegen.
2) Call Runtime::kTraceTailCall when tracing is on

test/mjsunit/es6/tail-call-simple.js is disabled for now, because Turbofan does not fully support TCO yet.

BUG=v8:4698
LOG=N

Review URL: https://codereview.chromium.org/1670133002

Cr-Commit-Position: refs/heads/master@{#33886}
parent e82588f6
...@@ -423,6 +423,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, ...@@ -423,6 +423,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
} }
void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
bool is_tail_call) {
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ b(pl, &ok);
// Don't need to save result register if we are going to do a tail call.
if (!is_tail_call) {
__ push(r0);
}
__ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
if (!is_tail_call) {
__ pop(r0);
}
EmitProfilingCounterReset();
__ bind(&ok);
}
void FullCodeGenerator::EmitReturnSequence() { void FullCodeGenerator::EmitReturnSequence() {
Comment cmnt(masm_, "[ Return sequence"); Comment cmnt(masm_, "[ Return sequence");
...@@ -436,24 +460,7 @@ void FullCodeGenerator::EmitReturnSequence() { ...@@ -436,24 +460,7 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(r0); __ push(r0);
__ CallRuntime(Runtime::kTraceExit); __ CallRuntime(Runtime::kTraceExit);
} }
// Pretend that the exit is a backwards jump to the entry. EmitProfilingCounterHandlingForReturnSequence(false);
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight,
Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ b(pl, &ok);
__ push(r0);
__ Call(isolate()->builtins()->InterruptCheck(),
RelocInfo::CODE_TARGET);
__ pop(r0);
EmitProfilingCounterReset();
__ bind(&ok);
// Make sure that the constant pool is not emitted inside of the return // Make sure that the constant pool is not emitted inside of the return
// sequence. // sequence.
...@@ -2754,6 +2761,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { ...@@ -2754,6 +2761,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
SetCallPosition(expr); SetCallPosition(expr);
if (expr->tail_call_mode() == TailCallMode::kAllow) {
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceTailCall);
}
// Update profiling counters before the tail call since we will
// not return to this function.
EmitProfilingCounterHandlingForReturnSequence(true);
}
Handle<Code> ic = Handle<Code> ic =
CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
.code(); .code();
......
...@@ -414,6 +414,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, ...@@ -414,6 +414,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
} }
void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
bool is_tail_call) {
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ B(pl, &ok);
// Don't need to save result register if we are going to do a tail call.
if (!is_tail_call) {
__ Push(x0);
}
__ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
if (!is_tail_call) {
__ Pop(x0);
}
EmitProfilingCounterReset();
__ Bind(&ok);
}
void FullCodeGenerator::EmitReturnSequence() { void FullCodeGenerator::EmitReturnSequence() {
Comment cmnt(masm_, "[ Return sequence"); Comment cmnt(masm_, "[ Return sequence");
...@@ -430,24 +454,7 @@ void FullCodeGenerator::EmitReturnSequence() { ...@@ -430,24 +454,7 @@ void FullCodeGenerator::EmitReturnSequence() {
__ CallRuntime(Runtime::kTraceExit); __ CallRuntime(Runtime::kTraceExit);
DCHECK(x0.Is(result_register())); DCHECK(x0.Is(result_register()));
} }
// Pretend that the exit is a backwards jump to the entry. EmitProfilingCounterHandlingForReturnSequence(false);
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
weight = Min(kMaxBackEdgeWeight,
Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ B(pl, &ok);
__ Push(x0);
__ Call(isolate()->builtins()->InterruptCheck(),
RelocInfo::CODE_TARGET);
__ Pop(x0);
EmitProfilingCounterReset();
__ Bind(&ok);
SetReturnPosition(literal()); SetReturnPosition(literal());
const Register& current_sp = __ StackPointer(); const Register& current_sp = __ StackPointer();
...@@ -2556,7 +2563,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { ...@@ -2556,7 +2563,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
SetCallPosition(expr); SetCallPosition(expr);
if (expr->tail_call_mode() == TailCallMode::kAllow) {
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceTailCall);
}
// Update profiling counters before the tail call since we will
// not return to this function.
EmitProfilingCounterHandlingForReturnSequence(true);
}
Handle<Code> ic = Handle<Code> ic =
CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
.code(); .code();
......
...@@ -505,6 +505,7 @@ class FullCodeGenerator: public AstVisitor { ...@@ -505,6 +505,7 @@ class FullCodeGenerator: public AstVisitor {
// Platform-specific return sequence // Platform-specific return sequence
void EmitReturnSequence(); void EmitReturnSequence();
void EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call);
// Platform-specific code sequences for calls // Platform-specific code sequences for calls
void EmitCall(Call* expr, ConvertReceiverMode = ConvertReceiverMode::kAny); void EmitCall(Call* expr, ConvertReceiverMode = ConvertReceiverMode::kAny);
......
...@@ -386,6 +386,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, ...@@ -386,6 +386,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
} }
void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
bool is_tail_call) {
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ j(positive, &ok, Label::kNear);
// Don't need to save result register if we are going to do a tail call.
if (!is_tail_call) {
__ push(eax);
}
__ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
if (!is_tail_call) {
__ pop(eax);
}
EmitProfilingCounterReset();
__ bind(&ok);
}
void FullCodeGenerator::EmitReturnSequence() { void FullCodeGenerator::EmitReturnSequence() {
Comment cmnt(masm_, "[ Return sequence"); Comment cmnt(masm_, "[ Return sequence");
...@@ -398,24 +422,7 @@ void FullCodeGenerator::EmitReturnSequence() { ...@@ -398,24 +422,7 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(eax); __ push(eax);
__ CallRuntime(Runtime::kTraceExit); __ CallRuntime(Runtime::kTraceExit);
} }
// Pretend that the exit is a backwards jump to the entry. EmitProfilingCounterHandlingForReturnSequence(false);
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight,
Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ j(positive, &ok, Label::kNear);
__ push(eax);
__ call(isolate()->builtins()->InterruptCheck(),
RelocInfo::CODE_TARGET);
__ pop(eax);
EmitProfilingCounterReset();
__ bind(&ok);
SetReturnPosition(literal()); SetReturnPosition(literal());
__ leave(); __ leave();
...@@ -2639,6 +2646,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { ...@@ -2639,6 +2646,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
SetCallPosition(expr); SetCallPosition(expr);
if (expr->tail_call_mode() == TailCallMode::kAllow) {
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceTailCall);
}
// Update profiling counters before the tail call since we will
// not return to this function.
EmitProfilingCounterHandlingForReturnSequence(true);
}
Handle<Code> ic = Handle<Code> ic =
CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
.code(); .code();
......
...@@ -414,6 +414,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, ...@@ -414,6 +414,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
} }
void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
bool is_tail_call) {
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ Branch(&ok, ge, a3, Operand(zero_reg));
// Don't need to save result register if we are going to do a tail call.
if (!is_tail_call) {
__ push(v0);
}
__ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
if (!is_tail_call) {
__ pop(v0);
}
EmitProfilingCounterReset();
__ bind(&ok);
}
void FullCodeGenerator::EmitReturnSequence() { void FullCodeGenerator::EmitReturnSequence() {
Comment cmnt(masm_, "[ Return sequence"); Comment cmnt(masm_, "[ Return sequence");
...@@ -427,24 +451,7 @@ void FullCodeGenerator::EmitReturnSequence() { ...@@ -427,24 +451,7 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(v0); __ push(v0);
__ CallRuntime(Runtime::kTraceExit); __ CallRuntime(Runtime::kTraceExit);
} }
// Pretend that the exit is a backwards jump to the entry. EmitProfilingCounterHandlingForReturnSequence(false);
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight,
Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ Branch(&ok, ge, a3, Operand(zero_reg));
__ push(v0);
__ Call(isolate()->builtins()->InterruptCheck(),
RelocInfo::CODE_TARGET);
__ pop(v0);
EmitProfilingCounterReset();
__ bind(&ok);
// Make sure that the constant pool is not emitted inside of the return // Make sure that the constant pool is not emitted inside of the return
// sequence. // sequence.
...@@ -2741,6 +2748,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { ...@@ -2741,6 +2748,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
// Record source position of the IC call. // Record source position of the IC call.
SetCallPosition(expr); SetCallPosition(expr);
if (expr->tail_call_mode() == TailCallMode::kAllow) {
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceTailCall);
}
// Update profiling counters before the tail call since we will
// not return to this function.
EmitProfilingCounterHandlingForReturnSequence(true);
}
Handle<Code> ic = Handle<Code> ic =
CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
.code(); .code();
......
...@@ -414,6 +414,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, ...@@ -414,6 +414,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
} }
void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
bool is_tail_call) {
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ Branch(&ok, ge, a3, Operand(zero_reg));
// Don't need to save result register if we are going to do a tail call.
if (!is_tail_call) {
__ push(v0);
}
__ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
if (!is_tail_call) {
__ pop(v0);
}
EmitProfilingCounterReset();
__ bind(&ok);
}
void FullCodeGenerator::EmitReturnSequence() { void FullCodeGenerator::EmitReturnSequence() {
Comment cmnt(masm_, "[ Return sequence"); Comment cmnt(masm_, "[ Return sequence");
...@@ -427,24 +451,7 @@ void FullCodeGenerator::EmitReturnSequence() { ...@@ -427,24 +451,7 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(v0); __ push(v0);
__ CallRuntime(Runtime::kTraceExit); __ CallRuntime(Runtime::kTraceExit);
} }
// Pretend that the exit is a backwards jump to the entry. EmitProfilingCounterHandlingForReturnSequence(false);
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight,
Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ Branch(&ok, ge, a3, Operand(zero_reg));
__ push(v0);
__ Call(isolate()->builtins()->InterruptCheck(),
RelocInfo::CODE_TARGET);
__ pop(v0);
EmitProfilingCounterReset();
__ bind(&ok);
// Make sure that the constant pool is not emitted inside of the return // Make sure that the constant pool is not emitted inside of the return
// sequence. // sequence.
...@@ -2747,6 +2754,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { ...@@ -2747,6 +2754,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
// Record source position of the IC call. // Record source position of the IC call.
SetCallPosition(expr); SetCallPosition(expr);
if (expr->tail_call_mode() == TailCallMode::kAllow) {
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceTailCall);
}
// Update profiling counters before the tail call since we will
// not return to this function.
EmitProfilingCounterHandlingForReturnSequence(true);
}
Handle<Code> ic = Handle<Code> ic =
CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
.code(); .code();
......
...@@ -389,6 +389,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, ...@@ -389,6 +389,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
} }
void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
bool is_tail_call) {
// Pretend that the exit is a backwards jump to the entry.
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ j(positive, &ok, Label::kNear);
// Don't need to save result register if we are going to do a tail call.
if (!is_tail_call) {
__ Push(rax);
}
__ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
if (!is_tail_call) {
__ Pop(rax);
}
EmitProfilingCounterReset();
__ bind(&ok);
}
void FullCodeGenerator::EmitReturnSequence() { void FullCodeGenerator::EmitReturnSequence() {
Comment cmnt(masm_, "[ Return sequence"); Comment cmnt(masm_, "[ Return sequence");
...@@ -400,24 +424,7 @@ void FullCodeGenerator::EmitReturnSequence() { ...@@ -400,24 +424,7 @@ void FullCodeGenerator::EmitReturnSequence() {
__ Push(rax); __ Push(rax);
__ CallRuntime(Runtime::kTraceExit); __ CallRuntime(Runtime::kTraceExit);
} }
// Pretend that the exit is a backwards jump to the entry. EmitProfilingCounterHandlingForReturnSequence(false);
int weight = 1;
if (info_->ShouldSelfOptimize()) {
weight = FLAG_interrupt_budget / FLAG_self_opt_count;
} else {
int distance = masm_->pc_offset();
weight = Min(kMaxBackEdgeWeight,
Max(1, distance / kCodeSizeMultiplier));
}
EmitProfilingCounterDecrement(weight);
Label ok;
__ j(positive, &ok, Label::kNear);
__ Push(rax);
__ call(isolate()->builtins()->InterruptCheck(),
RelocInfo::CODE_TARGET);
__ Pop(rax);
EmitProfilingCounterReset();
__ bind(&ok);
SetReturnPosition(literal()); SetReturnPosition(literal());
__ leave(); __ leave();
...@@ -2625,6 +2632,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { ...@@ -2625,6 +2632,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
SetCallPosition(expr); SetCallPosition(expr);
if (expr->tail_call_mode() == TailCallMode::kAllow) {
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceTailCall);
}
// Update profiling counters before the tail call since we will
// not return to this function.
EmitProfilingCounterHandlingForReturnSequence(true);
}
Handle<Code> ic = Handle<Code> ic =
CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
.code(); .code();
......
...@@ -408,53 +408,54 @@ RUNTIME_FUNCTION(Runtime_DisassembleFunction) { ...@@ -408,53 +408,54 @@ RUNTIME_FUNCTION(Runtime_DisassembleFunction) {
return isolate->heap()->undefined_value(); return isolate->heap()->undefined_value();
} }
namespace {
static int StackSize(Isolate* isolate) { int StackSize(Isolate* isolate) {
int n = 0; int n = 0;
for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) n++; for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) n++;
return n; return n;
} }
void PrintIndentation(Isolate* isolate) {
static void PrintTransition(Isolate* isolate, Object* result) { const int nmax = 80;
// indentation int n = StackSize(isolate);
{ if (n <= nmax) {
const int nmax = 80; PrintF("%4d:%*s", n, n, "");
int n = StackSize(isolate);
if (n <= nmax)
PrintF("%4d:%*s", n, n, "");
else
PrintF("%4d:%*s", n, nmax, "...");
}
if (result == NULL) {
JavaScriptFrame::PrintTop(isolate, stdout, true, false);
PrintF(" {\n");
} else { } else {
// function result PrintF("%4d:%*s", n, nmax, "...");
PrintF("} -> ");
result->ShortPrint();
PrintF("\n");
} }
} }
} // namespace
RUNTIME_FUNCTION(Runtime_TraceEnter) { RUNTIME_FUNCTION(Runtime_TraceEnter) {
SealHandleScope shs(isolate); SealHandleScope shs(isolate);
DCHECK(args.length() == 0); DCHECK_EQ(0, args.length());
PrintTransition(isolate, NULL); PrintIndentation(isolate);
JavaScriptFrame::PrintTop(isolate, stdout, true, false);
PrintF(" {\n");
return isolate->heap()->undefined_value(); return isolate->heap()->undefined_value();
} }
RUNTIME_FUNCTION(Runtime_TraceExit) { RUNTIME_FUNCTION(Runtime_TraceExit) {
SealHandleScope shs(isolate); SealHandleScope shs(isolate);
DCHECK(args.length() == 1); DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(Object, obj, 0); CONVERT_ARG_CHECKED(Object, obj, 0);
PrintTransition(isolate, obj); PrintIndentation(isolate);
PrintF("} -> ");
obj->ShortPrint();
PrintF("\n");
return obj; // return TOS return obj; // return TOS
} }
RUNTIME_FUNCTION(Runtime_TraceTailCall) {
SealHandleScope shs(isolate);
DCHECK_EQ(0, args.length());
PrintIndentation(isolate);
PrintF("} -> tail call ->\n");
return isolate->heap()->undefined_value();
}
RUNTIME_FUNCTION(Runtime_HaveSameMap) { RUNTIME_FUNCTION(Runtime_HaveSameMap) {
SealHandleScope shs(isolate); SealHandleScope shs(isolate);
......
...@@ -919,7 +919,6 @@ namespace internal { ...@@ -919,7 +919,6 @@ namespace internal {
F(SymbolRegistry, 0, 1) \ F(SymbolRegistry, 0, 1) \
F(SymbolIsPrivate, 1, 1) F(SymbolIsPrivate, 1, 1)
#define FOR_EACH_INTRINSIC_TEST(F) \ #define FOR_EACH_INTRINSIC_TEST(F) \
F(DeoptimizeFunction, 1, 1) \ F(DeoptimizeFunction, 1, 1) \
F(DeoptimizeNow, 0, 1) \ F(DeoptimizeNow, 0, 1) \
...@@ -947,6 +946,7 @@ namespace internal { ...@@ -947,6 +946,7 @@ namespace internal {
F(DisassembleFunction, 1, 1) \ F(DisassembleFunction, 1, 1) \
F(TraceEnter, 0, 1) \ F(TraceEnter, 0, 1) \
F(TraceExit, 1, 1) \ F(TraceExit, 1, 1) \
F(TraceTailCall, 0, 1) \
F(HaveSameMap, 2, 1) \ F(HaveSameMap, 2, 1) \
F(InNewSpace, 1, 1) \ F(InNewSpace, 1, 1) \
F(HasFastSmiElements, 1, 1) \ F(HasFastSmiElements, 1, 1) \
...@@ -968,7 +968,6 @@ namespace internal { ...@@ -968,7 +968,6 @@ namespace internal {
F(HasFixedFloat64Elements, 1, 1) \ F(HasFixedFloat64Elements, 1, 1) \
F(HasFixedUint8ClampedElements, 1, 1) F(HasFixedUint8ClampedElements, 1, 1)
#define FOR_EACH_INTRINSIC_TYPEDARRAY(F) \ #define FOR_EACH_INTRINSIC_TYPEDARRAY(F) \
F(ArrayBufferGetByteLength, 1, 1) \ F(ArrayBufferGetByteLength, 1, 1) \
F(ArrayBufferSliceImpl, 4, 1) \ F(ArrayBufferSliceImpl, 4, 1) \
......
...@@ -15,6 +15,8 @@ ...@@ -15,6 +15,8 @@
return f(n - 1); return f(n - 1);
} }
assertThrows(()=>{ f(1e6) }); assertThrows(()=>{ f(1e6) });
%OptimizeFunctionOnNextCall(f);
assertThrows(()=>{ f(1e6) });
})(); })();
...@@ -30,6 +32,8 @@ ...@@ -30,6 +32,8 @@
return f(n - 1); return f(n - 1);
} }
assertEquals("foo", f(1e6)); assertEquals("foo", f(1e6));
%OptimizeFunctionOnNextCall(f);
assertEquals("foo", f(1e6));
})(); })();
...@@ -49,6 +53,9 @@ ...@@ -49,6 +53,9 @@
} }
assertEquals("foo", f(1e6)); assertEquals("foo", f(1e6));
assertEquals("bar", f(1e6 + 1)); assertEquals("bar", f(1e6 + 1));
%OptimizeFunctionOnNextCall(f);
assertEquals("foo", f(1e6));
assertEquals("bar", f(1e6 + 1));
})(); })();
...@@ -61,9 +68,14 @@ ...@@ -61,9 +68,14 @@
if (n <= 0) { if (n <= 0) {
return "foo"; return "foo";
} }
return f(n - 1); return f_bound(n - 1);
}
var f_bound = f0.bind({});
function f(n) {
return f_bound(n);
} }
var f = f0.bind({}); assertEquals("foo", f(1e6));
%OptimizeFunctionOnNextCall(f);
assertEquals("foo", f(1e6)); assertEquals("foo", f(1e6));
})(); })();
...@@ -74,17 +86,22 @@ ...@@ -74,17 +86,22 @@
if (n <= 0) { if (n <= 0) {
return "foo"; return "foo";
} }
return g(n - 1); return g_bound(n - 1);
} }
function g0(n){ function g0(n){
if (n <= 0) { if (n <= 0) {
return "bar"; return "bar";
} }
return f(n - 1); return f_bound(n - 1);
} }
var f = f0.bind({}); var f_bound = f0.bind({});
var g = g0.bind({}); var g_bound = g0.bind({});
function f(n) {
return f_bound(n);
}
assertEquals("foo", f(1e6));
assertEquals("bar", f(1e6 + 1));
%OptimizeFunctionOnNextCall(f);
assertEquals("foo", f(1e6)); assertEquals("foo", f(1e6));
assertEquals("bar", f(1e6 + 1)); assertEquals("bar", f(1e6 + 1));
})(); })();
...@@ -47,7 +47,8 @@ f(null); ...@@ -47,7 +47,8 @@ f(null);
eval('f(null)'); eval('f(null)');
// Check called from strict builtin functions. // Check called from strict builtin functions.
[null, null].sort(f); // [null, null].sort(f); // Does not work because sort tail calls.
[null].forEach(f, null);
// Check called from sloppy builtin functions. // Check called from sloppy builtin functions.
"abel".replace(/b/g, function h() { "abel".replace(/b/g, function h() {
......
...@@ -43,6 +43,9 @@ ...@@ -43,6 +43,9 @@
# This test non-deterministically runs out of memory on Windows ia32. # This test non-deterministically runs out of memory on Windows ia32.
'regress/regress-crbug-160010': [SKIP], 'regress/regress-crbug-160010': [SKIP],
# Issue 4698: not fully supported by Turbofan yet
'es6/tail-call-simple': [SKIP],
# Issue 3389: deopt_every_n_garbage_collections is unsafe # Issue 3389: deopt_every_n_garbage_collections is unsafe
'regress/regress-2653': [SKIP], 'regress/regress-2653': [SKIP],
......
...@@ -1149,7 +1149,9 @@ function CheckArgumentsPillDescriptor(func, name) { ...@@ -1149,7 +1149,9 @@ function CheckArgumentsPillDescriptor(func, name) {
function strict() { function strict() {
"use strict"; "use strict";
return return_my_caller(); // Returning result via local variable to avoid tail call optimization.
var res = return_my_caller();
return res;
} }
assertSame(null, strict()); assertSame(null, strict());
...@@ -1163,7 +1165,9 @@ function CheckArgumentsPillDescriptor(func, name) { ...@@ -1163,7 +1165,9 @@ function CheckArgumentsPillDescriptor(func, name) {
(function TestNonStrictFunctionCallerPill() { (function TestNonStrictFunctionCallerPill() {
function strict(n) { function strict(n) {
"use strict"; "use strict";
return non_strict(n); // Returning result via local variable to avoid tail call optimization.
var res = non_strict(n);
return res;
} }
function recurse(n, then) { function recurse(n, then) {
...@@ -1191,7 +1195,9 @@ function CheckArgumentsPillDescriptor(func, name) { ...@@ -1191,7 +1195,9 @@ function CheckArgumentsPillDescriptor(func, name) {
(function TestNonStrictFunctionCallerDescriptorPill() { (function TestNonStrictFunctionCallerDescriptorPill() {
function strict(n) { function strict(n) {
"use strict"; "use strict";
return non_strict(n); // Returning result via local variable to avoid tail call optimization.
var res = non_strict(n);
return res;
} }
function recurse(n, then) { function recurse(n, then) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment