Commit 51669873 authored by danno's avatar danno Committed by Commit bot

[turbofan] Add general support for sp-based frame access

Some highlights of this CL:
* Refactor the mutable state out of Frame into FrameAccessState,
  which is maintained and updated during code generation to
  record whether sp- or fp-based frame access is currently active
  and how deep the stack on top of the frame is.
* The operand resultion in linkage.cc now uses FrameAccessState
  to determine how to generate frame-accessing operands.
* Update all platforms to accurately track additionally pushed
  stack slots (e.g. arguments for calls) in the FrameAccessState.
* Add a flag, --turbo_sp_frame_access, which forces all frame
  access to be sp-based whenever possible. This will likely never
  be used in production, but for testing it's useful in verifying
  that the stack-tracking of each platform maintained in the
  FrameAccessState is correct.
* Use sp-based frame access for gap resolving before tail
  calls. This will allow for slightly more efficient restoration
  of the frame pointer in the tail call in a later CL.
* Remove most ad hoc groping into CallDescriptors to
  determine if a frame is needed, instead consistently use
  predicates like needs_frame(), IsCFunctionCall() and
  IsJSFunctionCall().

BUG=v8:4076
LOG=n

Review URL: https://codereview.chromium.org/1460183002

Cr-Commit-Position: refs/heads/master@{#32234}
parent abb9ca8b
......@@ -149,8 +149,8 @@ class ArmOperandConverter final : public InstructionOperandConverter {
MemOperand ToMemOperand(InstructionOperand* op) const {
DCHECK(op != NULL);
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
FrameOffset offset =
linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame());
FrameOffset offset = frame_access_state()->GetFrameOffset(
AllocatedOperand::cast(op)->index());
return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
}
};
......@@ -359,16 +359,14 @@ void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
if (sp_slot_delta > 0) {
__ add(sp, sp, Operand(sp_slot_delta * kPointerSize));
}
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
int spill_slots = frame()->GetSpillSlotCount();
bool has_frame = descriptor->IsJSFunctionCall() || spill_slots > 0;
if (has_frame) {
if (frame()->needs_frame()) {
if (FLAG_enable_embedded_constant_pool) {
__ ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
} else {
__ ldm(ia_w, sp, fp.bit() | lr.bit());
}
}
frame_access_state()->SetFrameAccessToDefault();
}
......@@ -376,7 +374,9 @@ void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
if (sp_slot_delta < 0) {
__ sub(sp, sp, Operand(-sp_slot_delta * kPointerSize));
frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
}
frame_access_state()->SetFrameAccessToSP();
}
......@@ -399,6 +399,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
}
RecordCallPosition(instr);
DCHECK_EQ(LeaveCC, i.OutputSBit());
frame_access_state()->ClearSPDelta();
break;
}
case kArchTailCallCodeObject: {
......@@ -413,6 +414,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ Jump(ip);
}
DCHECK_EQ(LeaveCC, i.OutputSBit());
frame_access_state()->ClearSPDelta();
break;
}
case kArchCallJSFunction: {
......@@ -428,6 +430,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ Call(ip);
RecordCallPosition(instr);
DCHECK_EQ(LeaveCC, i.OutputSBit());
frame_access_state()->ClearSPDelta();
break;
}
case kArchTailCallJSFunction: {
......@@ -443,6 +446,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ ldr(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
__ Jump(ip);
DCHECK_EQ(LeaveCC, i.OutputSBit());
frame_access_state()->ClearSPDelta();
break;
}
case kArchLazyBailout: {
......@@ -453,6 +457,8 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kArchPrepareCallCFunction: {
int const num_parameters = MiscField::decode(instr->opcode());
__ PrepareCallCFunction(num_parameters, kScratchReg);
// Frame alignment requires using FP-relative frame addressing.
frame_access_state()->SetFrameAccessToFP();
break;
}
case kArchPrepareTailCall:
......@@ -467,6 +473,8 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
Register func = i.InputRegister(0);
__ CallCFunction(func, num_parameters);
}
frame_access_state()->SetFrameAccessToDefault();
frame_access_state()->ClearSPDelta();
break;
}
case kArchJmp:
......@@ -921,8 +929,10 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kArmPush:
if (instr->InputAt(0)->IsDoubleRegister()) {
__ vpush(i.InputDoubleRegister(0));
frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
} else {
__ push(i.InputRegister(0));
frame_access_state()->IncreaseSPDelta(1);
}
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
......@@ -1044,7 +1054,7 @@ void CodeGenerator::AssembleDeoptimizerCall(
void CodeGenerator::AssemblePrologue() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
if (descriptor->kind() == CallDescriptor::kCallAddress) {
if (descriptor->IsCFunctionCall()) {
if (FLAG_enable_embedded_constant_pool) {
__ Push(lr, fp, pp);
// Adjust FP to point to saved FP.
......@@ -1056,11 +1066,12 @@ void CodeGenerator::AssemblePrologue() {
} else if (descriptor->IsJSFunctionCall()) {
CompilationInfo* info = this->info();
__ Prologue(info->IsCodePreAgingActive());
} else if (needs_frame_) {
} else if (frame()->needs_frame()) {
__ StubPrologue();
} else {
frame()->SetElidedFrameSizeInSlots(0);
}
frame_access_state()->SetFrameAccessToDefault();
int stack_shrink_slots = frame()->GetSpillSlotCount();
if (info()->is_osr()) {
......@@ -1131,9 +1142,9 @@ void CodeGenerator::AssembleReturn() {
DwVfpRegister::from_code(last));
}
if (descriptor->kind() == CallDescriptor::kCallAddress) {
if (descriptor->IsCFunctionCall()) {
__ LeaveFrame(StackFrame::MANUAL);
} else if (descriptor->IsJSFunctionCall() || needs_frame_) {
} else if (frame()->needs_frame()) {
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ b(&return_label_);
......
......@@ -205,11 +205,13 @@ class Arm64OperandConverter final : public InstructionOperandConverter {
MemOperand ToMemOperand(InstructionOperand* op, MacroAssembler* masm) const {
DCHECK(op != NULL);
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
FrameOffset offset =
linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame());
FrameOffset offset = frame_access_state()->GetFrameOffset(
AllocatedOperand::cast(op)->index());
if (offset.from_frame_pointer()) {
int from_sp =
offset.offset() + (frame()->GetSpToFpSlotCount() * kPointerSize);
offset.offset() +
((frame()->GetSpToFpSlotCount() + frame_access_state()->sp_delta()) *
kPointerSize);
// Convert FP-offsets to SP-offsets if it results in better code.
if (Assembler::IsImmLSUnscaled(from_sp) ||
Assembler::IsImmLSScaled(from_sp, LSDoubleWord)) {
......@@ -462,12 +464,10 @@ void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
if (sp_slot_delta > 0) {
__ Add(jssp, jssp, Operand(sp_slot_delta * kPointerSize));
}
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
int spill_slots = frame()->GetSpillSlotCount();
bool has_frame = descriptor->IsJSFunctionCall() || spill_slots > 0;
if (has_frame) {
if (frame()->needs_frame()) {
__ Pop(fp, lr);
}
frame_access_state()->SetFrameAccessToDefault();
}
......@@ -475,8 +475,9 @@ void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
if (sp_slot_delta < 0) {
__ Sub(jssp, jssp, Operand(-sp_slot_delta * kPointerSize));
frame()->AllocateOutgoingParameterSlots(-sp_slot_delta);
frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
}
frame_access_state()->SetFrameAccessToSP();
}
......@@ -495,7 +496,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
__ Call(target);
}
frame()->ClearOutgoingParameterSlots();
frame_access_state()->ClearSPDelta();
RecordCallPosition(instr);
break;
}
......@@ -510,7 +511,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
__ Jump(target);
}
frame()->ClearOutgoingParameterSlots();
frame_access_state()->ClearSPDelta();
break;
}
case kArchCallJSFunction: {
......@@ -526,7 +527,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
}
__ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
__ Call(x10);
frame()->ClearOutgoingParameterSlots();
frame_access_state()->ClearSPDelta();
RecordCallPosition(instr);
break;
}
......@@ -544,7 +545,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
AssembleDeconstructActivationRecord(stack_param_delta);
__ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
__ Jump(x10);
frame()->ClearOutgoingParameterSlots();
frame_access_state()->ClearSPDelta();
break;
}
case kArchLazyBailout: {
......@@ -572,7 +573,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
}
// CallCFunction only supports register arguments so we never need to call
// frame()->ClearOutgoingParameterSlots() here.
DCHECK(frame()->GetOutgoingParameterSlotCount() == 0);
DCHECK(frame_access_state()->sp_delta() == 0);
break;
}
case kArchJmp:
......@@ -858,7 +859,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break;
case kArm64ClaimForCallArguments: {
__ Claim(i.InputInt32(0));
frame()->AllocateOutgoingParameterSlots(i.InputInt32(0));
frame_access_state()->IncreaseSPDelta(i.InputInt32(0));
break;
}
case kArm64Poke: {
......@@ -1263,7 +1264,7 @@ void CodeGenerator::AssembleDeoptimizerCall(
void CodeGenerator::AssemblePrologue() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
if (descriptor->kind() == CallDescriptor::kCallAddress) {
if (descriptor->IsCFunctionCall()) {
__ SetStackPointer(csp);
__ Push(lr, fp);
__ Mov(fp, csp);
......@@ -1271,12 +1272,13 @@ void CodeGenerator::AssemblePrologue() {
CompilationInfo* info = this->info();
__ SetStackPointer(jssp);
__ Prologue(info->IsCodePreAgingActive());
} else if (needs_frame_) {
} else if (frame()->needs_frame()) {
__ SetStackPointer(jssp);
__ StubPrologue();
} else {
frame()->SetElidedFrameSizeInSlots(0);
}
frame_access_state()->SetFrameAccessToDefault();
int stack_shrink_slots = frame()->GetSpillSlotCount();
if (info()->is_osr()) {
......@@ -1343,10 +1345,10 @@ void CodeGenerator::AssembleReturn() {
}
int pop_count = static_cast<int>(descriptor->StackParameterCount());
if (descriptor->kind() == CallDescriptor::kCallAddress) {
if (descriptor->IsCFunctionCall()) {
__ Mov(csp, fp);
__ Pop(fp, lr);
} else if (descriptor->IsJSFunctionCall() || needs_frame_) {
} else if (frame()->needs_frame()) {
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ B(&return_label_);
......
......@@ -128,6 +128,9 @@ class InstructionOperandConverter {
}
Frame* frame() const { return gen_->frame(); }
FrameAccessState* frame_access_state() const {
return gen_->frame_access_state();
}
Isolate* isolate() const { return gen_->isolate(); }
Linkage* linkage() const { return gen_->linkage(); }
......
......@@ -34,7 +34,7 @@ class CodeGenerator::JumpTable final : public ZoneObject {
CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
InstructionSequence* code, CompilationInfo* info)
: frame_(frame),
: frame_access_state_(new (code->zone()) FrameAccessState(frame)),
linkage_(linkage),
code_(code),
info_(info),
......@@ -52,11 +52,13 @@ CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
last_lazy_deopt_pc_(0),
jump_tables_(nullptr),
ools_(nullptr),
osr_pc_offset_(-1),
needs_frame_(frame->GetSpillSlotCount() > 0 || code->ContainsCall()) {
osr_pc_offset_(-1) {
for (int i = 0; i < code->InstructionBlockCount(); ++i) {
new (&labels_[i]) Label;
}
if (code->ContainsCall()) {
frame->MarkNeedsFrame();
}
}
......
......@@ -16,6 +16,7 @@ namespace internal {
namespace compiler {
// Forward declarations.
class FrameAccessState;
class Linkage;
class OutOfLineCode;
......@@ -37,7 +38,8 @@ class CodeGenerator final : public GapResolver::Assembler {
Handle<Code> GenerateCode();
InstructionSequence* code() const { return code_; }
Frame* frame() const { return frame_; }
FrameAccessState* frame_access_state() const { return frame_access_state_; }
Frame* const frame() const { return frame_access_state_->frame(); }
Isolate* isolate() const { return info_->isolate(); }
Linkage* linkage() const { return linkage_; }
......@@ -128,14 +130,14 @@ class CodeGenerator final : public GapResolver::Assembler {
void RecordCallPosition(Instruction* instr);
void PopulateDeoptimizationData(Handle<Code> code);
int DefineDeoptimizationLiteral(Handle<Object> literal);
FrameStateDescriptor* GetFrameStateDescriptor(Instruction* instr,
size_t frame_state_offset);
FrameStateDescriptor* GetFrameStateDescriptor(
Instruction* instr, size_t frame_access_state_offset);
int BuildTranslation(Instruction* instr, int pc_offset,
size_t frame_state_offset,
size_t frame_access_state_offset,
OutputFrameStateCombine state_combine);
void BuildTranslationForFrameStateDescriptor(
FrameStateDescriptor* descriptor, Instruction* instr,
Translation* translation, size_t frame_state_offset,
Translation* translation, size_t frame_access_state_offset,
OutputFrameStateCombine state_combine);
void AddTranslationForOperand(Translation* translation, Instruction* instr,
InstructionOperand* op, MachineType type);
......@@ -176,7 +178,7 @@ class CodeGenerator final : public GapResolver::Assembler {
friend class OutOfLineCode;
Frame* const frame_;
FrameAccessState* frame_access_state_;
Linkage* const linkage_;
InstructionSequence* const code_;
CompilationInfo* const info_;
......@@ -196,7 +198,6 @@ class CodeGenerator final : public GapResolver::Assembler {
JumpTable* jump_tables_;
OutOfLineCode* ools_;
int osr_pc_offset_;
bool needs_frame_;
};
} // namespace compiler
......
......@@ -12,14 +12,41 @@ namespace v8 {
namespace internal {
namespace compiler {
Frame::Frame(int fixed_frame_size_in_slots)
: frame_slot_count_(fixed_frame_size_in_slots),
outgoing_parameter_slot_count_(0),
Frame::Frame(int fixed_frame_size_in_slots, const CallDescriptor* descriptor)
: needs_frame_((descriptor != nullptr) &&
descriptor->RequiresFrameAsIncoming()),
frame_slot_count_(fixed_frame_size_in_slots),
callee_saved_slot_count_(0),
spill_slot_count_(0),
allocated_registers_(NULL),
allocated_double_registers_(NULL) {}
void FrameAccessState::SetFrameAccessToDefault() {
if (frame()->needs_frame() && !FLAG_turbo_sp_frame_access) {
SetFrameAccessToFP();
} else {
SetFrameAccessToSP();
}
}
FrameOffset FrameAccessState::GetFrameOffset(int spill_slot) const {
const int offset =
(StandardFrameConstants::kFixedSlotCountAboveFp - spill_slot - 1) *
kPointerSize;
if (access_frame_with_fp()) {
DCHECK(frame()->needs_frame());
return FrameOffset::FromFramePointer(offset);
} else {
// No frame. Retrieve all parameters relative to stack pointer.
int sp_offset =
offset + ((frame()->GetSpToFpSlotCount() + sp_delta()) * kPointerSize);
return FrameOffset::FromStackPointer(sp_offset);
}
}
} // namespace compiler
} // namespace internal
} // namespace v8
......@@ -12,11 +12,17 @@ namespace v8 {
namespace internal {
namespace compiler {
class CallDescriptor;
// Collects the spill slot and other frame slot requirements for a compiled
// function. Frames are usually populated by the register allocator and are used
// by Linkage to generate code for the prologue and epilogue to compiled code.
// by Linkage to generate code for the prologue and epilogue to compiled
// code. Frame objects must be considered immutable once they've been
// instantiated and the basic information about the frame has been collected
// into them. Mutable state associated with the frame is stored separately in
// FrameAccessState.
//
// Frames are divided up into four regions.
// Frames are divided up into three regions.
// - The first is the fixed header, which always has a constant size and can be
// predicted before code generation begins depending on the type of code being
// generated.
......@@ -27,8 +33,6 @@ namespace compiler {
// reserved after register allocation, since its size can only be precisely
// determined after register allocation once the number of used callee-saved
// register is certain.
// - The fourth region is used to pass arguments to other functions. It should
// be empty except when a call is being prepared.
//
// Every pointer in a frame has a slot id. On 32-bit platforms, doubles consume
// two slots.
......@@ -78,18 +82,26 @@ namespace compiler {
// |- - - - - - - - -| | |
// | ... | Callee-saved |
// |- - - - - - - - -| | |
// m+r+4 | callee-saved r | v |
// +-----------------+---- |
// | parameter 0 | ^ |
// |- - - - - - - - -| | |
// | ... | Outgoing parameters |
// |- - - - - - - - -| | (for function calls) |
// | parameter p | v v
// m+r+4 | callee-saved r | v v
// -----+-----------------+----- <-- stack ptr -------------
//
class Frame : public ZoneObject {
public:
explicit Frame(int fixed_frame_size_in_slots);
explicit Frame(int fixed_frame_size_in_slots,
const CallDescriptor* descriptor);
static int FPOffsetToSlot(int frame_offset) {
return StandardFrameConstants::kFixedSlotCountAboveFp - 1 -
frame_offset / kPointerSize;
}
static int SlotToFPOffset(int slot) {
return (StandardFrameConstants::kFixedSlotCountAboveFp - 1 - slot) *
kPointerSize;
}
inline bool needs_frame() const { return needs_frame_; }
inline void MarkNeedsFrame() { needs_frame_ = true; }
inline int GetTotalFrameSlotCount() const { return frame_slot_count_; }
......@@ -97,9 +109,6 @@ class Frame : public ZoneObject {
return GetTotalFrameSlotCount() -
StandardFrameConstants::kFixedSlotCountAboveFp;
}
inline int GetOutgoingParameterSlotCount() const {
return outgoing_parameter_slot_count_;
}
inline int GetSavedCalleeRegisterSlotCount() const {
return callee_saved_slot_count_;
}
......@@ -125,32 +134,23 @@ class Frame : public ZoneObject {
return !allocated_double_registers_->IsEmpty();
}
void AllocateOutgoingParameterSlots(int count) {
outgoing_parameter_slot_count_ += count;
frame_slot_count_ += count;
}
void ClearOutgoingParameterSlots() {
frame_slot_count_ -= outgoing_parameter_slot_count_;
outgoing_parameter_slot_count_ = 0;
}
int AlignSavedCalleeRegisterSlots() {
DCHECK_EQ(0, callee_saved_slot_count_);
needs_frame_ = true;
int delta = frame_slot_count_ & 1;
frame_slot_count_ += delta;
return delta;
}
void AllocateSavedCalleeRegisterSlots(int count) {
DCHECK_EQ(0, outgoing_parameter_slot_count_);
needs_frame_ = true;
frame_slot_count_ += count;
callee_saved_slot_count_ += count;
}
int AllocateSpillSlot(int width) {
DCHECK_EQ(0, outgoing_parameter_slot_count_);
DCHECK_EQ(0, callee_saved_slot_count_);
needs_frame_ = true;
int frame_slot_count_before = frame_slot_count_;
int slot = AllocateAlignedFrameSlot(width);
spill_slot_count_ += (frame_slot_count_ - frame_slot_count_before);
......@@ -158,9 +158,9 @@ class Frame : public ZoneObject {
}
int ReserveSpillSlots(size_t slot_count) {
DCHECK_EQ(0, outgoing_parameter_slot_count_);
DCHECK_EQ(0, callee_saved_slot_count_);
DCHECK_EQ(0, spill_slot_count_);
needs_frame_ = true;
spill_slot_count_ += static_cast<int>(slot_count);
frame_slot_count_ += static_cast<int>(slot_count);
return frame_slot_count_ - 1;
......@@ -182,8 +182,8 @@ class Frame : public ZoneObject {
}
private:
bool needs_frame_;
int frame_slot_count_;
int outgoing_parameter_slot_count_;
int callee_saved_slot_count_;
int spill_slot_count_;
BitVector* allocated_registers_;
......@@ -218,6 +218,38 @@ class FrameOffset {
static const int kFromSp = 1;
static const int kFromFp = 0;
};
// Encapsulates the mutable state maintained during code generation about the
// current function's frame.
class FrameAccessState : public ZoneObject {
public:
explicit FrameAccessState(Frame* const frame)
: frame_(frame), access_frame_with_fp_(false), sp_delta_(0) {
SetFrameAccessToDefault();
}
Frame* const frame() const { return frame_; }
int sp_delta() const { return sp_delta_; }
void ClearSPDelta() { sp_delta_ = 0; }
void IncreaseSPDelta(int amount) { sp_delta_ += amount; }
bool access_frame_with_fp() const { return access_frame_with_fp_; }
void SetFrameAccessToDefault();
void SetFrameAccessToFP() { access_frame_with_fp_ = true; }
void SetFrameAccessToSP() { access_frame_with_fp_ = false; }
// Get the frame offset for a given spill slot. The location depends on the
// calling convention and the specific frame layout, and may thus be
// architecture-specific. Negative spill slots indicate arguments on the
// caller's frame.
FrameOffset GetFrameOffset(int spill_slot) const;
private:
Frame* const frame_;
bool access_frame_with_fp_;
int sp_delta_;
};
} // namespace compiler
} // namespace internal
} // namespace v8
......
......@@ -48,12 +48,18 @@ class IA32OperandConverter : public InstructionOperandConverter {
return Operand(ToDoubleRegister(op));
}
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
FrameOffset offset =
linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame());
FrameOffset offset = frame_access_state()->GetFrameOffset(
AllocatedOperand::cast(op)->index());
return Operand(offset.from_stack_pointer() ? esp : ebp,
offset.offset() + extra);
}
Operand ToMaterializableOperand(int materializable_offset) {
FrameOffset offset = frame_access_state()->GetFrameOffset(
Frame::FPOffsetToSlot(materializable_offset));
return Operand(offset.from_stack_pointer() ? esp : ebp, offset.offset());
}
Operand HighOperand(InstructionOperand* op) {
DCHECK(op->IsDoubleStackSlot());
return ToOperand(op, kPointerSize);
......@@ -331,12 +337,10 @@ void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
if (sp_slot_delta > 0) {
__ add(esp, Immediate(sp_slot_delta * kPointerSize));
}
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
int spill_slots = frame()->GetSpillSlotCount();
bool has_frame = descriptor->IsJSFunctionCall() || spill_slots > 0;
if (has_frame) {
if (frame()->needs_frame()) {
__ pop(ebp);
}
frame_access_state()->SetFrameAccessToDefault();
}
......@@ -344,7 +348,9 @@ void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
if (sp_slot_delta < 0) {
__ sub(esp, Immediate(-sp_slot_delta * kPointerSize));
frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
}
frame_access_state()->SetFrameAccessToSP();
}
......@@ -364,6 +370,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ call(reg);
}
RecordCallPosition(instr);
frame_access_state()->ClearSPDelta();
break;
}
case kArchTailCallCodeObject: {
......@@ -377,6 +384,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag));
__ jmp(reg);
}
frame_access_state()->ClearSPDelta();
break;
}
case kArchCallJSFunction: {
......@@ -389,6 +397,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
}
__ call(FieldOperand(func, JSFunction::kCodeEntryOffset));
RecordCallPosition(instr);
frame_access_state()->ClearSPDelta();
break;
}
case kArchTailCallJSFunction: {
......@@ -401,6 +410,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
AssembleDeconstructActivationRecord(stack_param_delta);
__ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset));
frame_access_state()->ClearSPDelta();
break;
}
case kArchLazyBailout: {
......@@ -409,6 +419,8 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break;
}
case kArchPrepareCallCFunction: {
// Frame alignment requires using FP-relative frame addressing.
frame_access_state()->SetFrameAccessToFP();
int const num_parameters = MiscField::decode(instr->opcode());
__ PrepareCallCFunction(num_parameters, i.TempRegister(0));
break;
......@@ -425,6 +437,8 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
Register func = i.InputRegister(0);
__ CallCFunction(func, num_parameters);
}
frame_access_state()->SetFrameAccessToDefault();
frame_access_state()->ClearSPDelta();
break;
}
case kArchJmp:
......@@ -980,10 +994,13 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
if (instr->InputAt(0)->IsDoubleRegister()) {
__ sub(esp, Immediate(kDoubleSize));
__ movsd(Operand(esp, 0), i.InputDoubleRegister(0));
frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
} else if (HasImmediateInput(instr, 0)) {
__ push(i.InputImmediate(0));
frame_access_state()->IncreaseSPDelta(1);
} else {
__ push(i.InputOperand(0));
frame_access_state()->IncreaseSPDelta(1);
}
break;
case kIA32Poke: {
......@@ -1354,7 +1371,7 @@ void CodeGenerator::AssembleDeoptimizerCall(
void CodeGenerator::AssemblePrologue() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
if (descriptor->kind() == CallDescriptor::kCallAddress) {
if (descriptor->IsCFunctionCall()) {
// Assemble a prologue similar the to cdecl calling convention.
__ push(ebp);
__ mov(ebp, esp);
......@@ -1363,11 +1380,12 @@ void CodeGenerator::AssemblePrologue() {
// code aging.
CompilationInfo* info = this->info();
__ Prologue(info->IsCodePreAgingActive());
} else if (needs_frame_) {
} else if (frame()->needs_frame()) {
__ StubPrologue();
} else {
frame()->SetElidedFrameSizeInSlots(kPCOnStackSize / kPointerSize);
}
frame_access_state()->SetFrameAccessToDefault();
int stack_shrink_slots = frame()->GetSpillSlotCount();
if (info()->is_osr()) {
......@@ -1415,10 +1433,10 @@ void CodeGenerator::AssembleReturn() {
}
}
if (descriptor->kind() == CallDescriptor::kCallAddress) {
if (descriptor->IsCFunctionCall()) {
__ mov(esp, ebp); // Move stack pointer back to frame pointer.
__ pop(ebp); // Pop caller's frame pointer.
} else if (descriptor->IsJSFunctionCall() || needs_frame_) {
} else if (frame()->needs_frame()) {
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ jmp(&return_label_);
......@@ -1465,11 +1483,11 @@ void CodeGenerator::AssembleMove(InstructionOperand* source,
if (IsMaterializableFromFrame(src, &offset)) {
if (destination->IsRegister()) {
Register dst = g.ToRegister(destination);
__ mov(dst, Operand(ebp, offset));
__ mov(dst, g.ToMaterializableOperand(offset));
} else {
DCHECK(destination->IsStackSlot());
Operand dst = g.ToOperand(destination);
__ push(Operand(ebp, offset));
__ push(g.ToMaterializableOperand(offset));
__ pop(dst);
}
} else if (destination->IsRegister()) {
......@@ -1561,12 +1579,16 @@ void CodeGenerator::AssembleSwap(InstructionOperand* source,
__ xchg(g.ToRegister(source), g.ToOperand(destination));
} else if (source->IsStackSlot() && destination->IsStackSlot()) {
// Memory-memory.
Operand src = g.ToOperand(source);
Operand dst = g.ToOperand(destination);
__ push(dst);
__ push(src);
__ pop(dst);
__ pop(src);
Operand dst1 = g.ToOperand(destination);
__ push(dst1);
frame_access_state()->IncreaseSPDelta(1);
Operand src1 = g.ToOperand(source);
__ push(src1);
Operand dst2 = g.ToOperand(destination);
__ pop(dst2);
frame_access_state()->IncreaseSPDelta(-1);
Operand src2 = g.ToOperand(source);
__ pop(src2);
} else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) {
// XMM register-register swap.
XMMRegister src = g.ToDoubleRegister(source);
......
......@@ -147,24 +147,6 @@ CallDescriptor* Linkage::ComputeIncoming(Zone* zone, CompilationInfo* info) {
}
FrameOffset Linkage::GetFrameOffset(int spill_slot, Frame* frame) const {
bool has_frame = frame->GetSpillSlotCount() > 0 ||
incoming_->IsJSFunctionCall() ||
incoming_->kind() == CallDescriptor::kCallAddress;
const int offset =
(StandardFrameConstants::kFixedSlotCountAboveFp - spill_slot - 1) *
kPointerSize;
if (has_frame) {
return FrameOffset::FromFramePointer(offset);
} else {
// No frame. Retrieve all parameters relative to stack pointer.
DCHECK(spill_slot < 0); // Must be a parameter.
int sp_offset = offset + (frame->GetSpToFpSlotCount() * kPointerSize);
return FrameOffset::FromStackPointer(sp_offset);
}
}
// static
int Linkage::FrameStateInputCount(Runtime::FunctionId function) {
// Most runtime functions need a FrameState. A few chosen ones that we know
......
......@@ -188,6 +188,10 @@ class CallDescriptor final : public ZoneObject {
// Returns {true} if this descriptor is a call to a JSFunction.
bool IsJSFunctionCall() const { return kind_ == kCallJSFunction; }
bool RequiresFrameAsIncoming() const {
return IsCFunctionCall() || IsJSFunctionCall();
}
// The number of return values from this call.
size_t ReturnCount() const { return machine_sig_->return_count(); }
......@@ -348,12 +352,6 @@ class Linkage : public ZoneObject {
bool ParameterHasSecondaryLocation(int index) const;
LinkageLocation GetParameterSecondaryLocation(int index) const;
// Get the frame offset for a given spill slot. The location depends on the
// calling convention and the specific frame layout, and may thus be
// architecture-specific. Negative spill slots indicate arguments on the
// caller's frame.
FrameOffset GetFrameOffset(int spill_slot, Frame* frame) const;
static int FrameStateInputCount(Runtime::FunctionId function);
// Get the location where an incoming OSR value is stored.
......
......@@ -120,8 +120,8 @@ class MipsOperandConverter final : public InstructionOperandConverter {
MemOperand ToMemOperand(InstructionOperand* op) const {
DCHECK(op != NULL);
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
FrameOffset offset =
linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame());
FrameOffset offset = frame_access_state()->GetFrameOffset(
AllocatedOperand::cast(op)->index());
return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
}
};
......@@ -458,12 +458,10 @@ void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
if (sp_slot_delta > 0) {
__ addiu(sp, sp, sp_slot_delta * kPointerSize);
}
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
int spill_slots = frame()->GetSpillSlotCount();
bool has_frame = descriptor->IsJSFunctionCall() || spill_slots > 0;
if (has_frame) {
if (frame()->needs_frame()) {
__ Pop(ra, fp);
}
frame_access_state()->SetFrameAccessToDefault();
}
......@@ -471,7 +469,9 @@ void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
if (sp_slot_delta < 0) {
__ Subu(sp, sp, Operand(-sp_slot_delta * kPointerSize));
frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
}
frame_access_state()->SetFrameAccessToSP();
}
......@@ -491,6 +491,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ Call(at);
}
RecordCallPosition(instr);
frame_access_state()->ClearSPDelta();
break;
}
case kArchTailCallCodeObject: {
......@@ -503,6 +504,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ addiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag);
__ Jump(at);
}
frame_access_state()->ClearSPDelta();
break;
}
case kArchCallJSFunction: {
......@@ -517,6 +519,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ lw(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
__ Call(at);
RecordCallPosition(instr);
frame_access_state()->ClearSPDelta();
break;
}
case kArchTailCallJSFunction: {
......@@ -531,6 +534,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
AssembleDeconstructActivationRecord(stack_param_delta);
__ lw(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
__ Jump(at);
frame_access_state()->ClearSPDelta();
break;
}
case kArchLazyBailout: {
......@@ -541,6 +545,8 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kArchPrepareCallCFunction: {
int const num_parameters = MiscField::decode(instr->opcode());
__ PrepareCallCFunction(num_parameters, kScratchReg);
// Frame alignment requires using FP-relative frame addressing.
frame_access_state()->SetFrameAccessToFP();
break;
}
case kArchPrepareTailCall:
......@@ -555,6 +561,8 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
Register func = i.InputRegister(0);
__ CallCFunction(func, num_parameters);
}
frame_access_state()->SetFrameAccessToDefault();
frame_access_state()->ClearSPDelta();
break;
}
case kArchJmp:
......@@ -963,12 +971,15 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
if (instr->InputAt(0)->IsDoubleRegister()) {
__ sdc1(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize));
__ Subu(sp, sp, Operand(kDoubleSize));
frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
} else {
__ Push(i.InputRegister(0));
frame_access_state()->IncreaseSPDelta(1);
}
break;
case kMipsStackClaim: {
__ Subu(sp, sp, Operand(i.InputInt32(0)));
frame_access_state()->IncreaseSPDelta(i.InputInt32(0) / kPointerSize);
break;
}
case kMipsStoreToStackSlot: {
......@@ -1299,17 +1310,18 @@ void CodeGenerator::AssembleDeoptimizerCall(
void CodeGenerator::AssemblePrologue() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
int stack_shrink_slots = frame()->GetSpillSlotCount();
if (descriptor->kind() == CallDescriptor::kCallAddress) {
if (descriptor->IsCFunctionCall()) {
__ Push(ra, fp);
__ mov(fp, sp);
} else if (descriptor->IsJSFunctionCall()) {
CompilationInfo* info = this->info();
__ Prologue(info->IsCodePreAgingActive());
} else if (needs_frame_) {
} else if (frame()->needs_frame()) {
__ StubPrologue();
} else {
frame()->SetElidedFrameSizeInSlots(0);
}
frame_access_state()->SetFrameAccessToDefault();
if (info()->is_osr()) {
// TurboFan OSR-compiled functions cannot be entered directly.
......@@ -1372,10 +1384,10 @@ void CodeGenerator::AssembleReturn() {
__ MultiPopFPU(saves_fpu);
}
if (descriptor->kind() == CallDescriptor::kCallAddress) {
if (descriptor->IsCFunctionCall()) {
__ mov(sp, fp);
__ Pop(ra, fp);
} else if (descriptor->IsJSFunctionCall() || needs_frame_) {
} else if (frame()->needs_frame()) {
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ Branch(&return_label_);
......
......@@ -120,8 +120,8 @@ class MipsOperandConverter final : public InstructionOperandConverter {
MemOperand ToMemOperand(InstructionOperand* op) const {
DCHECK(op != NULL);
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
FrameOffset offset =
linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame());
FrameOffset offset = frame_access_state()->GetFrameOffset(
AllocatedOperand::cast(op)->index());
return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
}
};
......@@ -458,12 +458,10 @@ void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
if (sp_slot_delta > 0) {
__ daddiu(sp, sp, sp_slot_delta * kPointerSize);
}
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
int spill_slots = frame()->GetSpillSlotCount();
bool has_frame = descriptor->IsJSFunctionCall() || spill_slots > 0;
if (has_frame) {
if (frame()->needs_frame()) {
__ Pop(ra, fp);
}
frame_access_state()->SetFrameAccessToDefault();
}
......@@ -471,7 +469,9 @@ void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
if (sp_slot_delta < 0) {
__ Dsubu(sp, sp, Operand(-sp_slot_delta * kPointerSize));
frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
}
frame_access_state()->SetFrameAccessToSP();
}
......@@ -491,6 +491,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ Call(at);
}
RecordCallPosition(instr);
frame_access_state()->ClearSPDelta();
break;
}
case kArchTailCallCodeObject: {
......@@ -503,6 +504,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ daddiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag);
__ Jump(at);
}
frame_access_state()->ClearSPDelta();
break;
}
case kArchCallJSFunction: {
......@@ -516,6 +518,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ ld(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
__ Call(at);
RecordCallPosition(instr);
frame_access_state()->ClearSPDelta();
break;
}
case kArchTailCallJSFunction: {
......@@ -529,6 +532,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
AssembleDeconstructActivationRecord(stack_param_delta);
__ ld(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
__ Jump(at);
frame_access_state()->ClearSPDelta();
break;
}
case kArchLazyBailout: {
......@@ -539,6 +543,8 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kArchPrepareCallCFunction: {
int const num_parameters = MiscField::decode(instr->opcode());
__ PrepareCallCFunction(num_parameters, kScratchReg);
// Frame alignment requires using FP-relative frame addressing.
frame_access_state()->SetFrameAccessToFP();
break;
}
case kArchPrepareTailCall:
......@@ -553,6 +559,8 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
Register func = i.InputRegister(0);
__ CallCFunction(func, num_parameters);
}
frame_access_state()->SetFrameAccessToDefault();
frame_access_state()->ClearSPDelta();
break;
}
case kArchJmp:
......@@ -1072,12 +1080,15 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
if (instr->InputAt(0)->IsDoubleRegister()) {
__ sdc1(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize));
__ Subu(sp, sp, Operand(kDoubleSize));
frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
} else {
__ Push(i.InputRegister(0));
frame_access_state()->IncreaseSPDelta(1);
}
break;
case kMips64StackClaim: {
__ Dsubu(sp, sp, Operand(i.InputInt32(0)));
frame_access_state()->IncreaseSPDelta(i.InputInt32(0) / kPointerSize);
break;
}
case kMips64StoreToStackSlot: {
......@@ -1417,17 +1428,18 @@ void CodeGenerator::AssembleDeoptimizerCall(
void CodeGenerator::AssemblePrologue() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
if (descriptor->kind() == CallDescriptor::kCallAddress) {
if (descriptor->IsCFunctionCall()) {
__ Push(ra, fp);
__ mov(fp, sp);
} else if (descriptor->IsJSFunctionCall()) {
CompilationInfo* info = this->info();
__ Prologue(info->IsCodePreAgingActive());
} else if (needs_frame_) {
} else if (frame()->needs_frame()) {
__ StubPrologue();
} else {
frame()->SetElidedFrameSizeInSlots(0);
}
frame_access_state()->SetFrameAccessToDefault();
int stack_shrink_slots = frame()->GetSpillSlotCount();
if (info()->is_osr()) {
......@@ -1487,10 +1499,10 @@ void CodeGenerator::AssembleReturn() {
__ MultiPopFPU(saves_fpu);
}
if (descriptor->kind() == CallDescriptor::kCallAddress) {
if (descriptor->IsCFunctionCall()) {
__ mov(sp, fp);
__ Pop(ra, fp);
} else if (descriptor->IsJSFunctionCall() || needs_frame_) {
} else if (frame()->needs_frame()) {
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ Branch(&return_label_);
......
......@@ -274,12 +274,12 @@ class PipelineData {
DCHECK(register_allocation_data_ == nullptr);
int fixed_frame_size = 0;
if (descriptor != nullptr) {
fixed_frame_size = (descriptor->kind() == CallDescriptor::kCallAddress)
fixed_frame_size = (descriptor->IsCFunctionCall())
? StandardFrameConstants::kFixedSlotCountAboveFp +
StandardFrameConstants::kCPSlotCount
: StandardFrameConstants::kFixedSlotCount;
}
frame_ = new (instruction_zone()) Frame(fixed_frame_size);
frame_ = new (instruction_zone()) Frame(fixed_frame_size, descriptor);
register_allocation_data_ = new (register_allocation_zone())
RegisterAllocationData(config, register_allocation_zone(), frame(),
sequence(), debug_name);
......
......@@ -49,8 +49,8 @@ class X64OperandConverter : public InstructionOperandConverter {
Operand ToOperand(InstructionOperand* op, int extra = 0) {
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
FrameOffset offset =
linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame());
FrameOffset offset = frame_access_state()->GetFrameOffset(
AllocatedOperand::cast(op)->index());
return Operand(offset.from_stack_pointer() ? rsp : rbp,
offset.offset() + extra);
}
......@@ -578,12 +578,10 @@ void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
if (sp_slot_delta > 0) {
__ addq(rsp, Immediate(sp_slot_delta * kPointerSize));
}
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
int spill_slots = frame()->GetSpillSlotCount();
bool has_frame = descriptor->IsJSFunctionCall() || spill_slots > 0;
if (has_frame) {
if (frame()->needs_frame()) {
__ popq(rbp);
}
frame_access_state()->SetFrameAccessToDefault();
}
......@@ -591,7 +589,9 @@ void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
if (sp_slot_delta < 0) {
__ subq(rsp, Immediate(-sp_slot_delta * kPointerSize));
frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
}
frame_access_state()->SetFrameAccessToSP();
}
......@@ -611,6 +611,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ call(reg);
}
RecordCallPosition(instr);
frame_access_state()->ClearSPDelta();
break;
}
case kArchTailCallCodeObject: {
......@@ -624,6 +625,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ addp(reg, Immediate(Code::kHeaderSize - kHeapObjectTag));
__ jmp(reg);
}
frame_access_state()->ClearSPDelta();
break;
}
case kArchCallJSFunction: {
......@@ -635,6 +637,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ Assert(equal, kWrongFunctionContext);
}
__ Call(FieldOperand(func, JSFunction::kCodeEntryOffset));
frame_access_state()->ClearSPDelta();
RecordCallPosition(instr);
break;
}
......@@ -648,6 +651,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
AssembleDeconstructActivationRecord(stack_param_delta);
__ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset));
frame_access_state()->ClearSPDelta();
break;
}
case kArchLazyBailout: {
......@@ -656,6 +660,8 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break;
}
case kArchPrepareCallCFunction: {
// Frame alignment requires using FP-relative frame addressing.
frame_access_state()->SetFrameAccessToFP();
int const num_parameters = MiscField::decode(instr->opcode());
__ PrepareCallCFunction(num_parameters);
break;
......@@ -672,6 +678,8 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
Register func = i.InputRegister(0);
__ CallCFunction(func, num_parameters);
}
frame_access_state()->SetFrameAccessToDefault();
frame_access_state()->ClearSPDelta();
break;
}
case kArchJmp:
......@@ -1435,15 +1443,19 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kX64Push:
if (HasImmediateInput(instr, 0)) {
__ pushq(i.InputImmediate(0));
frame_access_state()->IncreaseSPDelta(1);
} else {
if (instr->InputAt(0)->IsRegister()) {
__ pushq(i.InputRegister(0));
frame_access_state()->IncreaseSPDelta(1);
} else if (instr->InputAt(0)->IsDoubleRegister()) {
// TODO(titzer): use another machine instruction?
__ subq(rsp, Immediate(kDoubleSize));
frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
__ Movsd(Operand(rsp, 0), i.InputDoubleRegister(0));
} else {
__ pushq(i.InputOperand(0));
frame_access_state()->IncreaseSPDelta(1);
}
}
break;
......@@ -1682,17 +1694,18 @@ static const int kQuadWordSize = 16;
void CodeGenerator::AssemblePrologue() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
if (descriptor->kind() == CallDescriptor::kCallAddress) {
if (descriptor->IsCFunctionCall()) {
__ pushq(rbp);
__ movq(rbp, rsp);
} else if (descriptor->IsJSFunctionCall()) {
CompilationInfo* info = this->info();
__ Prologue(info->IsCodePreAgingActive());
} else if (needs_frame_) {
} else if (frame()->needs_frame()) {
__ StubPrologue();
} else {
frame()->SetElidedFrameSizeInSlots(kPCOnStackSize / kPointerSize);
}
frame_access_state()->SetFrameAccessToDefault();
int stack_shrink_slots = frame()->GetSpillSlotCount();
if (info()->is_osr()) {
......@@ -1774,10 +1787,10 @@ void CodeGenerator::AssembleReturn() {
__ addp(rsp, Immediate(stack_size));
}
if (descriptor->kind() == CallDescriptor::kCallAddress) {
if (descriptor->IsCFunctionCall()) {
__ movq(rsp, rbp); // Move stack pointer back to frame pointer.
__ popq(rbp); // Pop caller's frame pointer.
} else if (descriptor->IsJSFunctionCall() || needs_frame_) {
} else if (frame()->needs_frame()) {
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ jmp(&return_label_);
......
......@@ -413,6 +413,8 @@ DEFINE_IMPLICATION(turbo, turbo_asm_deoptimization)
DEFINE_IMPLICATION(turbo, turbo_inlining)
DEFINE_BOOL(turbo_shipping, true, "enable TurboFan compiler on subset")
DEFINE_BOOL(turbo_greedy_regalloc, false, "use the greedy register allocator")
DEFINE_BOOL(turbo_sp_frame_access, false,
"use stack pointer-relative access to frame wherever possible")
DEFINE_BOOL(turbo_preprocess_ranges, true,
"run pre-register allocation heuristics")
DEFINE_BOOL(turbo_loop_stackcheck, true, "enable stack checks in loops")
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment