Commit 89737c5d authored by Junliang Yan's avatar Junliang Yan Committed by Commit Bot

PPC/s390: [turbofan] Ensure instruction start is in fixed register.

Port c462ddc8

Original Commit Message:

    This makes sure that {JSFunction} invocations always load the code start
    address into the fixed {kJavaScriptCallCodeStartRegister} register. This
    allows us to perform PC-relative operations more effective. For now this
    only applies to code with {kCallJSFunction} linkage.

R=mstarzinger@chromium.org, joransiu@ca.ibm.com, michael_dawson@ca.ibm.com
BUG=
LOG=N

Change-Id: If346a3cbaea820b1fcec38c5105605496961a888
Reviewed-on: https://chromium-review.googlesource.com/938721Reviewed-by: 's avatarJoran Siu <joransiu@ca.ibm.com>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Junliang Yan <jyan@ca.ibm.com>
Cr-Commit-Position: refs/heads/master@{#51608}
parent 910f45fa
...@@ -150,13 +150,6 @@ void Builtins::Generate_ArrayConstructor(MacroAssembler* masm) { ...@@ -150,13 +150,6 @@ void Builtins::Generate_ArrayConstructor(MacroAssembler* masm) {
__ TailCallStub(&stub); __ TailCallStub(&stub);
} }
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
__ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
__ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
__ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
__ JumpToJSEntry(ip);
}
static void GenerateTailCallToReturnedCode(MacroAssembler* masm, static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
Runtime::FunctionId function_id) { Runtime::FunctionId function_id) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
...@@ -179,8 +172,9 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm, ...@@ -179,8 +172,9 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
__ Pop(r3, r4, r6); __ Pop(r3, r4, r6);
__ SmiUntag(r3); __ SmiUntag(r3);
} }
__ addi(ip, r5, Operand(Code::kHeaderSize - kHeapObjectTag)); static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
__ JumpToJSEntry(ip); __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
__ JumpToJSEntry(r5);
} }
namespace { namespace {
...@@ -563,9 +557,10 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -563,9 +557,10 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// undefined because generator functions are non-constructable. // undefined because generator functions are non-constructable.
__ mr(r6, r4); __ mr(r6, r4);
__ mr(r4, r7); __ mr(r4, r7);
__ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeOffset)); static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
__ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
__ JumpToJSEntry(ip); __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
__ JumpToJSEntry(r5);
} }
__ bind(&prepare_step_in_if_stepping); __ bind(&prepare_step_in_if_stepping);
...@@ -827,10 +822,11 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm, ...@@ -827,10 +822,11 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
// register. // register.
ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure, ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
scratch2, scratch3, feedback_vector); scratch2, scratch3, feedback_vector);
__ addi(optimized_code_entry, optimized_code_entry, static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
__ addi(r5, optimized_code_entry,
Operand(Code::kHeaderSize - kHeapObjectTag)); Operand(Code::kHeaderSize - kHeapObjectTag));
__ mr(ip, optimized_code_entry); __ mr(ip, r5);
__ Jump(optimized_code_entry); __ Jump(r5);
// Optimized code slot contains deoptimized code, evict it and re-enter the // Optimized code slot contains deoptimized code, evict it and re-enter the
// closure's code. // closure's code.
...@@ -1329,7 +1325,11 @@ void Builtins::Generate_CheckOptimizationMarker(MacroAssembler* masm) { ...@@ -1329,7 +1325,11 @@ void Builtins::Generate_CheckOptimizationMarker(MacroAssembler* masm) {
MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, r7, r9, r8); MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, r7, r9, r8);
// Otherwise, tail call the SFI code. // Otherwise, tail call the SFI code.
GenerateTailCallToSharedCode(masm); static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
__ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
__ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kCodeOffset));
__ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
__ JumpToJSEntry(r5);
} }
void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) { void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
...@@ -1542,9 +1542,10 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { ...@@ -1542,9 +1542,10 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
} }
// On failure, tail call back to regular js by re-calling the function // On failure, tail call back to regular js by re-calling the function
// which has be reset to the compile lazy builtin. // which has be reset to the compile lazy builtin.
__ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeOffset)); static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
__ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
__ JumpToJSEntry(ip); __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
__ JumpToJSEntry(r5);
} }
namespace { namespace {
...@@ -2461,8 +2462,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2461,8 +2462,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
Label invoke, dont_adapt_arguments, stack_overflow; Label invoke, dont_adapt_arguments, stack_overflow;
Label enough, too_few; Label enough, too_few;
__ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeOffset));
__ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
__ cmp(r3, r5); __ cmp(r3, r5);
__ blt(&too_few); __ blt(&too_few);
__ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
...@@ -2478,7 +2477,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2478,7 +2477,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r4: function // r4: function
// r5: expected number of arguments // r5: expected number of arguments
// r6: new target (passed through to callee) // r6: new target (passed through to callee)
// ip: code entry to call
__ SmiToPtrArrayOffset(r3, r3); __ SmiToPtrArrayOffset(r3, r3);
__ add(r3, r3, fp); __ add(r3, r3, fp);
// adjust for return address and receiver // adjust for return address and receiver
...@@ -2492,7 +2490,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2492,7 +2490,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r5: expected number of arguments // r5: expected number of arguments
// r6: new target (passed through to callee) // r6: new target (passed through to callee)
// r7: copy end address // r7: copy end address
// ip: code entry to call
Label copy; Label copy;
__ bind(&copy); __ bind(&copy);
...@@ -2516,7 +2513,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2516,7 +2513,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r4: function // r4: function
// r5: expected number of arguments // r5: expected number of arguments
// r6: new target (passed through to callee) // r6: new target (passed through to callee)
// ip: code entry to call
__ SmiToPtrArrayOffset(r3, r3); __ SmiToPtrArrayOffset(r3, r3);
__ add(r3, r3, fp); __ add(r3, r3, fp);
...@@ -2525,7 +2521,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2525,7 +2521,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r4: function // r4: function
// r5: expected number of arguments // r5: expected number of arguments
// r6: new target (passed through to callee) // r6: new target (passed through to callee)
// ip: code entry to call
Label copy; Label copy;
__ bind(&copy); __ bind(&copy);
// Adjust load for return address and receiver. // Adjust load for return address and receiver.
...@@ -2539,7 +2534,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2539,7 +2534,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r4: function // r4: function
// r5: expected number of arguments // r5: expected number of arguments
// r6: new target (passed through to callee) // r6: new target (passed through to callee)
// ip: code entry to call
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex); __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
__ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2)); __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
__ sub(r7, fp, r7); __ sub(r7, fp, r7);
...@@ -2561,7 +2555,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2561,7 +2555,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r3 : expected number of arguments // r3 : expected number of arguments
// r4 : function (passed through to callee) // r4 : function (passed through to callee)
// r6 : new target (passed through to callee) // r6 : new target (passed through to callee)
__ CallJSEntry(ip); static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
__ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
__ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
__ CallJSEntry(r5);
// Store offset of return address for deoptimizer. // Store offset of return address for deoptimizer.
masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
...@@ -2574,7 +2571,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2574,7 +2571,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Dont adapt arguments. // Dont adapt arguments.
// ------------------------------------------- // -------------------------------------------
__ bind(&dont_adapt_arguments); __ bind(&dont_adapt_arguments);
__ JumpToJSEntry(ip); static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
__ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
__ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
__ JumpToJSEntry(r5);
__ bind(&stack_overflow); __ bind(&stack_overflow);
{ {
......
...@@ -150,13 +150,6 @@ void Builtins::Generate_ArrayConstructor(MacroAssembler* masm) { ...@@ -150,13 +150,6 @@ void Builtins::Generate_ArrayConstructor(MacroAssembler* masm) {
__ TailCallStub(&stub); __ TailCallStub(&stub);
} }
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
__ LoadP(ip, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
__ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
__ AddP(ip, Operand(Code::kHeaderSize - kHeapObjectTag));
__ JumpToJSEntry(ip);
}
static void GenerateTailCallToReturnedCode(MacroAssembler* masm, static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
Runtime::FunctionId function_id) { Runtime::FunctionId function_id) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
...@@ -179,8 +172,9 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm, ...@@ -179,8 +172,9 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
__ Pop(r2, r3, r5); __ Pop(r2, r3, r5);
__ SmiUntag(r2); __ SmiUntag(r2);
} }
__ AddP(ip, r4, Operand(Code::kHeaderSize - kHeapObjectTag)); static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
__ JumpToJSEntry(ip); __ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
__ JumpToJSEntry(r4);
} }
namespace { namespace {
...@@ -558,9 +552,10 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { ...@@ -558,9 +552,10 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// undefined because generator functions are non-constructable. // undefined because generator functions are non-constructable.
__ LoadRR(r5, r3); __ LoadRR(r5, r3);
__ LoadRR(r3, r6); __ LoadRR(r3, r6);
__ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeOffset)); static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
__ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); __ LoadP(r4, FieldMemOperand(r3, JSFunction::kCodeOffset));
__ JumpToJSEntry(ip); __ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
__ JumpToJSEntry(r4);
} }
__ bind(&prepare_step_in_if_stepping); __ bind(&prepare_step_in_if_stepping);
...@@ -830,9 +825,10 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm, ...@@ -830,9 +825,10 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
// register. // register.
ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure, ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
scratch2, scratch3, feedback_vector); scratch2, scratch3, feedback_vector);
__ AddP(optimized_code_entry, optimized_code_entry, static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
__ AddP(r4, optimized_code_entry,
Operand(Code::kHeaderSize - kHeapObjectTag)); Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(optimized_code_entry); __ Jump(r4);
// Optimized code slot contains deoptimized code, evict it and re-enter the // Optimized code slot contains deoptimized code, evict it and re-enter the
// closure's code. // closure's code.
...@@ -1326,7 +1322,11 @@ void Builtins::Generate_CheckOptimizationMarker(MacroAssembler* masm) { ...@@ -1326,7 +1322,11 @@ void Builtins::Generate_CheckOptimizationMarker(MacroAssembler* masm) {
MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, r6, r8, r7); MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, r6, r8, r7);
// Otherwise, tail call the SFI code. // Otherwise, tail call the SFI code.
GenerateTailCallToSharedCode(masm); static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
__ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
__ LoadP(r4, FieldMemOperand(r4, SharedFunctionInfo::kCodeOffset));
__ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
__ JumpToJSEntry(r4);
} }
void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) { void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
...@@ -1538,9 +1538,10 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { ...@@ -1538,9 +1538,10 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
} }
// On failure, tail call back to regular js by re-calling the function // On failure, tail call back to regular js by re-calling the function
// which has be reset to the compile lazy builtin. // which has be reset to the compile lazy builtin.
__ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeOffset)); static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
__ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); __ LoadP(r4, FieldMemOperand(r3, JSFunction::kCodeOffset));
__ JumpToJSEntry(ip); __ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
__ JumpToJSEntry(r4);
} }
namespace { namespace {
...@@ -2460,8 +2461,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2460,8 +2461,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
Label invoke, dont_adapt_arguments, stack_overflow; Label invoke, dont_adapt_arguments, stack_overflow;
Label enough, too_few; Label enough, too_few;
__ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeOffset));
__ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
__ CmpP(r2, r4); __ CmpP(r2, r4);
__ blt(&too_few); __ blt(&too_few);
__ CmpP(r4, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); __ CmpP(r4, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
...@@ -2477,7 +2476,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2477,7 +2476,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r3: function // r3: function
// r4: expected number of arguments // r4: expected number of arguments
// r5: new target (passed through to callee) // r5: new target (passed through to callee)
// ip: code entry to call
__ SmiToPtrArrayOffset(r2, r2); __ SmiToPtrArrayOffset(r2, r2);
__ AddP(r2, fp); __ AddP(r2, fp);
// adjust for return address and receiver // adjust for return address and receiver
...@@ -2491,7 +2489,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2491,7 +2489,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r4: expected number of arguments // r4: expected number of arguments
// r5: new target (passed through to callee) // r5: new target (passed through to callee)
// r6: copy end address // r6: copy end address
// ip: code entry to call
Label copy; Label copy;
__ bind(&copy); __ bind(&copy);
...@@ -2515,7 +2512,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2515,7 +2512,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r3: function // r3: function
// r4: expected number of arguments // r4: expected number of arguments
// r5: new target (passed through to callee) // r5: new target (passed through to callee)
// ip: code entry to call
__ SmiToPtrArrayOffset(r2, r2); __ SmiToPtrArrayOffset(r2, r2);
__ lay(r2, MemOperand(r2, fp)); __ lay(r2, MemOperand(r2, fp));
...@@ -2524,7 +2520,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2524,7 +2520,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r3: function // r3: function
// r4: expected number of arguments // r4: expected number of arguments
// r5: new target (passed through to callee) // r5: new target (passed through to callee)
// ip: code entry to call
Label copy; Label copy;
__ bind(&copy); __ bind(&copy);
// Adjust load for return address and receiver. // Adjust load for return address and receiver.
...@@ -2537,7 +2532,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2537,7 +2532,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Fill the remaining expected arguments with undefined. // Fill the remaining expected arguments with undefined.
// r3: function // r3: function
// r4: expected number of argumentus // r4: expected number of argumentus
// ip: code entry to call
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex); __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
__ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2)); __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2));
__ SubP(r6, fp, r6); __ SubP(r6, fp, r6);
...@@ -2559,7 +2553,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2559,7 +2553,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r2 : expected number of arguments // r2 : expected number of arguments
// r3 : function (passed through to callee) // r3 : function (passed through to callee)
// r5 : new target (passed through to callee) // r5 : new target (passed through to callee)
__ CallJSEntry(ip); static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
__ LoadP(r4, FieldMemOperand(r3, JSFunction::kCodeOffset));
__ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
__ CallJSEntry(r4);
// Store offset of return address for deoptimizer. // Store offset of return address for deoptimizer.
masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
...@@ -2572,7 +2569,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -2572,7 +2569,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Dont adapt arguments. // Dont adapt arguments.
// ------------------------------------------- // -------------------------------------------
__ bind(&dont_adapt_arguments); __ bind(&dont_adapt_arguments);
__ JumpToJSEntry(ip); static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
__ LoadP(r4, FieldMemOperand(r3, JSFunction::kCodeOffset));
__ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
__ JumpToJSEntry(r4);
__ bind(&stack_overflow); __ bind(&stack_overflow);
{ {
......
...@@ -2213,7 +2213,7 @@ void CodeGenerator::AssembleConstructFrame() { ...@@ -2213,7 +2213,7 @@ void CodeGenerator::AssembleConstructFrame() {
__ mr(fp, sp); __ mr(fp, sp);
} }
} else if (call_descriptor->IsJSFunctionCall()) { } else if (call_descriptor->IsJSFunctionCall()) {
__ Prologue(ip); __ Prologue();
if (call_descriptor->PushArgumentCount()) { if (call_descriptor->PushArgumentCount()) {
__ Push(kJavaScriptCallArgCountRegister); __ Push(kJavaScriptCallArgCountRegister);
} }
......
...@@ -833,40 +833,28 @@ void MacroAssembler::LoadConstantPoolPointerRegisterFromCodeTargetAddress( ...@@ -833,40 +833,28 @@ void MacroAssembler::LoadConstantPoolPointerRegisterFromCodeTargetAddress(
add(kConstantPoolRegister, kConstantPoolRegister, code_target_address); add(kConstantPoolRegister, kConstantPoolRegister, code_target_address);
} }
void TurboAssembler::LoadConstantPoolPointerRegister(Register base,
int code_start_delta) {
add_label_offset(kConstantPoolRegister, base, ConstantPoolPosition(),
code_start_delta);
}
void TurboAssembler::LoadConstantPoolPointerRegister() { void TurboAssembler::LoadConstantPoolPointerRegister() {
mov_label_addr(kConstantPoolRegister, ConstantPoolPosition()); mov_label_addr(kConstantPoolRegister, ConstantPoolPosition());
} }
void TurboAssembler::StubPrologue(StackFrame::Type type, Register base, void TurboAssembler::StubPrologue(StackFrame::Type type) {
int prologue_offset) {
{ {
ConstantPoolUnavailableScope constant_pool_unavailable(this); ConstantPoolUnavailableScope constant_pool_unavailable(this);
mov(r11, Operand(StackFrame::TypeToMarker(type))); mov(r11, Operand(StackFrame::TypeToMarker(type)));
PushCommonFrame(r11); PushCommonFrame(r11);
} }
if (FLAG_enable_embedded_constant_pool) { if (FLAG_enable_embedded_constant_pool) {
if (base != no_reg) { LoadConstantPoolPointerRegister();
// base contains prologue address
LoadConstantPoolPointerRegister(base, -prologue_offset);
} else {
LoadConstantPoolPointerRegister();
}
set_constant_pool_available(true); set_constant_pool_available(true);
} }
} }
void TurboAssembler::Prologue(Register base, int prologue_offset) { void TurboAssembler::Prologue() {
DCHECK(base != no_reg); DCHECK(base != no_reg);
PushStandardFrame(r4); PushStandardFrame(r4);
if (FLAG_enable_embedded_constant_pool) { if (FLAG_enable_embedded_constant_pool) {
// base contains prologue address // base contains prologue address
LoadConstantPoolPointerRegister(base, -prologue_offset); LoadConstantPoolPointerRegister();
set_constant_pool_available(true); set_constant_pool_available(true);
} }
} }
...@@ -1273,7 +1261,7 @@ void MacroAssembler::InvokeFunctionCode(Register function, Register new_target, ...@@ -1273,7 +1261,7 @@ void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
// We call indirectly through the code field in the function to // We call indirectly through the code field in the function to
// allow recompilation to take effect without changing any of the // allow recompilation to take effect without changing any of the
// call sites. // call sites.
Register code = ip; Register code = kJavaScriptCallCodeStartRegister;
LoadP(code, FieldMemOperand(function, JSFunction::kCodeOffset)); LoadP(code, FieldMemOperand(function, JSFunction::kCodeOffset));
addi(code, code, Operand(Code::kHeaderSize - kHeapObjectTag)); addi(code, code, Operand(Code::kHeaderSize - kHeapObjectTag));
if (flag == CALL_FUNCTION) { if (flag == CALL_FUNCTION) {
......
...@@ -175,9 +175,8 @@ class TurboAssembler : public Assembler { ...@@ -175,9 +175,8 @@ class TurboAssembler : public Assembler {
void PushCommonFrame(Register marker_reg = no_reg); void PushCommonFrame(Register marker_reg = no_reg);
// Generates function and stub prologue code. // Generates function and stub prologue code.
void StubPrologue(StackFrame::Type type, Register base = no_reg, void StubPrologue(StackFrame::Type type);
int prologue_offset = 0); void Prologue();
void Prologue(Register base, int prologue_offset = 0);
// Push a standard frame, consisting of lr, fp, constant pool, // Push a standard frame, consisting of lr, fp, constant pool,
// context and JS function // context and JS function
...@@ -642,7 +641,6 @@ class TurboAssembler : public Assembler { ...@@ -642,7 +641,6 @@ class TurboAssembler : public Assembler {
void CallStubDelayed(CodeStub* stub); void CallStubDelayed(CodeStub* stub);
void LoadConstantPoolPointerRegister(); void LoadConstantPoolPointerRegister();
void LoadConstantPoolPointerRegister(Register base, int code_entry_delta = 0);
void AbortConstantPoolBuilding() { void AbortConstantPoolBuilding() {
#ifdef DEBUG #ifdef DEBUG
// Avoid DCHECK(!is_linked()) failure in ~Label() // Avoid DCHECK(!is_linked()) failure in ~Label()
......
...@@ -1296,7 +1296,7 @@ void MacroAssembler::InvokeFunctionCode(Register function, Register new_target, ...@@ -1296,7 +1296,7 @@ void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
// We call indirectly through the code field in the function to // We call indirectly through the code field in the function to
// allow recompilation to take effect without changing any of the // allow recompilation to take effect without changing any of the
// call sites. // call sites.
Register code = ip; Register code = kJavaScriptCallCodeStartRegister;
LoadP(code, FieldMemOperand(function, JSFunction::kCodeOffset)); LoadP(code, FieldMemOperand(function, JSFunction::kCodeOffset));
AddP(code, code, Operand(Code::kHeaderSize - kHeapObjectTag)); AddP(code, code, Operand(Code::kHeaderSize - kHeapObjectTag));
if (flag == CALL_FUNCTION) { if (flag == CALL_FUNCTION) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment