Commit 95aedaa0 authored by mbrandy's avatar mbrandy Committed by Commit bot

PPC: [es6] Tail calls support.

Port 6131ab1e

Original commit message:
    This CL implements PrepareForTailCall() mentioned in ES6 spec for full codegen, Crankshaft and Turbofan.
    When debugger is active tail calls are disabled.

    Tail calling can be enabled by --harmony-tailcalls flag.

R=ishell@chromium.org, joransiu@ca.ibm.com, jyan@ca.ibm.com, michael_dawson@ca.ibm.com
BUG=v8:4698
LOG=Y

Review URL: https://codereview.chromium.org/1635823003

Cr-Commit-Position: refs/heads/master@{#33524}
parent 8f0fd8c0
......@@ -3927,31 +3927,35 @@ void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) {
void LCodeGen::DoCallFunction(LCallFunction* instr) {
HCallFunction* hinstr = instr->hydrogen();
DCHECK(ToRegister(instr->context()).is(cp));
DCHECK(ToRegister(instr->function()).is(r4));
DCHECK(ToRegister(instr->result()).is(r3));
int arity = instr->arity();
ConvertReceiverMode mode = instr->hydrogen()->convert_mode();
if (instr->hydrogen()->HasVectorAndSlot()) {
ConvertReceiverMode mode = hinstr->convert_mode();
TailCallMode tail_call_mode = hinstr->tail_call_mode();
if (hinstr->HasVectorAndSlot()) {
Register slot_register = ToRegister(instr->temp_slot());
Register vector_register = ToRegister(instr->temp_vector());
DCHECK(slot_register.is(r6));
DCHECK(vector_register.is(r5));
AllowDeferredHandleDereference vector_structure_check;
Handle<TypeFeedbackVector> vector = instr->hydrogen()->feedback_vector();
int index = vector->GetIndex(instr->hydrogen()->slot());
Handle<TypeFeedbackVector> vector = hinstr->feedback_vector();
int index = vector->GetIndex(hinstr->slot());
__ Move(vector_register, vector);
__ LoadSmiLiteral(slot_register, Smi::FromInt(index));
Handle<Code> ic =
CodeFactory::CallICInOptimizedCode(isolate(), arity, mode).code();
Handle<Code> ic = CodeFactory::CallICInOptimizedCode(isolate(), arity, mode,
tail_call_mode)
.code();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
} else {
__ mov(r3, Operand(arity));
CallCode(isolate()->builtins()->Call(mode), RelocInfo::CODE_TARGET, instr);
CallCode(isolate()->builtins()->Call(mode, tail_call_mode),
RelocInfo::CODE_TARGET, instr);
}
}
......
......@@ -2855,7 +2855,9 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
SetCallPosition(expr);
Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, mode).code();
Handle<Code> ic =
CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
.code();
__ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot()));
__ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
// Don't assign a type feedback id to the IC, since type feedback is provided
......
......@@ -1982,10 +1982,134 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
}
}
namespace {
// Drops top JavaScript frame and an arguments adaptor frame below it (if
// present) preserving all the arguments prepared for current call.
// Does nothing if debugger is currently active.
// ES6 14.6.3. PrepareForTailCall
//
// Stack structure for the function g() tail calling f():
//
// ------- Caller frame: -------
// | ...
// | g()'s arg M
// | ...
// | g()'s arg 1
// | g()'s receiver arg
// | g()'s caller pc
// ------- g()'s frame: -------
// | g()'s caller fp <- fp
// | g()'s context
// | function pointer: g
// | -------------------------
// | ...
// | ...
// | f()'s arg N
// | ...
// | f()'s arg 1
// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
// ----------------------
//
void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
Register scratch1, Register scratch2,
Register scratch3) {
DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
Comment cmnt(masm, "[ PrepareForTailCall");
// Prepare for tail call only if the debugger is not active.
Label done;
ExternalReference debug_is_active =
ExternalReference::debug_is_active_address(masm->isolate());
__ mov(scratch1, Operand(debug_is_active));
__ lbz(scratch1, MemOperand(scratch1));
__ cmpi(scratch1, Operand::Zero());
__ bne(&done);
// Check if next frame is an arguments adaptor frame.
Label no_arguments_adaptor, formal_parameter_count_loaded;
__ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ LoadP(scratch3,
MemOperand(scratch2, StandardFrameConstants::kContextOffset));
__ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
__ bne(&no_arguments_adaptor);
// Drop arguments adaptor frame and load arguments count.
__ mr(fp, scratch2);
__ LoadP(scratch1,
MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ SmiUntag(scratch1);
__ b(&formal_parameter_count_loaded);
__ bind(&no_arguments_adaptor);
// Load caller's formal parameter count
__ LoadP(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ LoadP(scratch1,
FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
__ LoadWordArith(
scratch1, FieldMemOperand(
scratch1, SharedFunctionInfo::kFormalParameterCountOffset));
#if !V8_TARGET_ARCH_PPC64
__ SmiUntag(scratch1);
#endif
__ bind(&formal_parameter_count_loaded);
// Calculate the end of destination area where we will put the arguments
// after we drop current frame. We add kPointerSize to count the receiver
// argument which is not included into formal parameters count.
Register dst_reg = scratch2;
__ ShiftLeftImm(dst_reg, scratch1, Operand(kPointerSizeLog2));
__ add(dst_reg, fp, dst_reg);
__ addi(dst_reg, dst_reg,
Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize));
Register src_reg = scratch1;
__ ShiftLeftImm(src_reg, args_reg, Operand(kPointerSizeLog2));
__ add(src_reg, sp, src_reg);
// Count receiver argument as well (not included in args_reg).
__ addi(src_reg, src_reg, Operand(kPointerSize));
if (FLAG_debug_code) {
__ cmpl(src_reg, dst_reg);
__ Check(lt, kStackAccessBelowStackPointer);
}
// Restore caller's frame pointer and return address now as they will be
// overwritten by the copying loop.
if (FLAG_enable_embedded_constant_pool) {
__ LoadP(kConstantPoolRegister,
MemOperand(fp, StandardFrameConstants::kConstantPoolOffset));
}
__ LoadP(r0, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
__ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ mtlr(r0);
// Now copy callee arguments to the caller frame going backwards to avoid
// callee arguments corruption (source and destination areas could overlap).
// Both src_reg and dst_reg are pointing to the word after the one to copy,
// so they must be pre-decremented in the loop.
Register tmp_reg = scratch3;
Label loop;
__ addi(tmp_reg, args_reg, Operand(1)); // +1 for receiver
__ mtctr(tmp_reg);
__ bind(&loop);
__ LoadPU(tmp_reg, MemOperand(src_reg, -kPointerSize));
__ StorePU(tmp_reg, MemOperand(dst_reg, -kPointerSize));
__ bdnz(&loop);
// Leave current frame.
__ mr(sp, dst_reg);
__ bind(&done);
}
} // namespace
// static
void Builtins::Generate_CallFunction(MacroAssembler* masm,
ConvertReceiverMode mode) {
ConvertReceiverMode mode,
TailCallMode tail_call_mode) {
// ----------- S t a t e -------------
// -- r3 : the number of arguments (not including the receiver)
// -- r4 : the function to call (checked to be a JSFunction)
......@@ -2070,6 +2194,10 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
// -- cp : the function context.
// -----------------------------------
if (tail_call_mode == TailCallMode::kAllow) {
PrepareForTailCall(masm, r3, r6, r7, r8);
}
__ LoadWordArith(
r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
#if !V8_TARGET_ARCH_PPC64
......@@ -2174,13 +2302,18 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
// static
void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) {
void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
TailCallMode tail_call_mode) {
// ----------- S t a t e -------------
// -- r3 : the number of arguments (not including the receiver)
// -- r4 : the function to call (checked to be a JSBoundFunction)
// -----------------------------------
__ AssertBoundFunction(r4);
if (tail_call_mode == TailCallMode::kAllow) {
PrepareForTailCall(masm, r3, r6, r7, r8);
}
// Patch the receiver to [[BoundThis]].
__ LoadP(ip, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset));
__ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
......@@ -2201,7 +2334,8 @@ void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) {
// static
void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
TailCallMode tail_call_mode) {
// ----------- S t a t e -------------
// -- r3 : the number of arguments (not including the receiver)
// -- r4 : the target to call (can be any Object).
......@@ -2211,14 +2345,19 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
__ JumpIfSmi(r4, &non_callable);
__ bind(&non_smi);
__ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
__ Jump(masm->isolate()->builtins()->CallFunction(mode),
__ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
RelocInfo::CODE_TARGET, eq);
__ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
__ Jump(masm->isolate()->builtins()->CallBoundFunction(),
__ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
RelocInfo::CODE_TARGET, eq);
__ cmpi(r8, Operand(JS_PROXY_TYPE));
__ bne(&non_function);
// 0. Prepare for tail call if necessary.
if (tail_call_mode == TailCallMode::kAllow) {
PrepareForTailCall(masm, r3, r6, r7, r8);
}
// 1. Runtime fallback for Proxy [[Call]].
__ Push(r4);
// Increase the arguments size to include the pushed function and the
......@@ -2241,7 +2380,7 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
// Let the "call_as_function_delegate" take care of the rest.
__ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r4);
__ Jump(masm->isolate()->builtins()->CallFunction(
ConvertReceiverMode::kNotNullOrUndefined),
ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
RelocInfo::CODE_TARGET);
// 3. Call to something that is not callable.
......
......@@ -2644,7 +2644,8 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ bind(&call_function);
__ mov(r3, Operand(argc));
__ Jump(masm->isolate()->builtins()->CallFunction(convert_mode()),
__ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
tail_call_mode()),
RelocInfo::CODE_TARGET);
__ bind(&extra_checks_or_miss);
......@@ -2682,7 +2683,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ bind(&call);
__ mov(r3, Operand(argc));
__ Jump(masm->isolate()->builtins()->Call(convert_mode()),
__ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
RelocInfo::CODE_TARGET);
__ bind(&uninitialized);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment