ARM64: use jssp for stack slots

R=bmeurer@chromium.org, ulan@chromium.org

Review URL: https://codereview.chromium.org/262533006

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@21101 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent f7e8255f
...@@ -409,6 +409,7 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) { ...@@ -409,6 +409,7 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
int arity = instr->arity(); int arity = instr->arity();
CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags()); CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
after_push_argument_ = false;
} }
...@@ -423,6 +424,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) { ...@@ -423,6 +424,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS); CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
after_push_argument_ = false;
ASSERT(ToRegister(instr->result()).is(x0)); ASSERT(ToRegister(instr->result()).is(x0));
} }
...@@ -470,6 +472,7 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) { ...@@ -470,6 +472,7 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode); ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
} }
after_push_argument_ = false;
ASSERT(ToRegister(instr->result()).is(x0)); ASSERT(ToRegister(instr->result()).is(x0));
} }
...@@ -491,7 +494,7 @@ void LCodeGen::LoadContextFromDeferred(LOperand* context) { ...@@ -491,7 +494,7 @@ void LCodeGen::LoadContextFromDeferred(LOperand* context) {
if (context->IsRegister()) { if (context->IsRegister()) {
__ Mov(cp, ToRegister(context)); __ Mov(cp, ToRegister(context));
} else if (context->IsStackSlot()) { } else if (context->IsStackSlot()) {
__ Ldr(cp, ToMemOperand(context)); __ Ldr(cp, ToMemOperand(context, kMustUseFramePointer));
} else if (context->IsConstantOperand()) { } else if (context->IsConstantOperand()) {
HConstant* constant = HConstant* constant =
chunk_->LookupConstant(LConstantOperand::cast(context)); chunk_->LookupConstant(LConstantOperand::cast(context));
...@@ -1232,13 +1235,38 @@ static ptrdiff_t ArgumentsOffsetWithoutFrame(ptrdiff_t index) { ...@@ -1232,13 +1235,38 @@ static ptrdiff_t ArgumentsOffsetWithoutFrame(ptrdiff_t index) {
} }
MemOperand LCodeGen::ToMemOperand(LOperand* op) const { MemOperand LCodeGen::ToMemOperand(LOperand* op, StackMode stack_mode) const {
ASSERT(op != NULL); ASSERT(op != NULL);
ASSERT(!op->IsRegister()); ASSERT(!op->IsRegister());
ASSERT(!op->IsDoubleRegister()); ASSERT(!op->IsDoubleRegister());
ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot()); ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
if (NeedsEagerFrame()) { if (NeedsEagerFrame()) {
return MemOperand(fp, StackSlotOffset(op->index())); int fp_offset = StackSlotOffset(op->index());
if (op->index() >= 0) {
// Loads and stores have a bigger reach in positive offset than negative.
// When the load or the store can't be done in one instruction via fp
// (too big negative offset), we try to access via jssp (positive offset).
// We can reference a stack slot from jssp only if jssp references the end
// of the stack slots. It's not the case when:
// - stack_mode != kCanUseStackPointer: this is the case when a deferred
// code saved the registers.
// - after_push_argument_: arguments has been pushed for a call.
// - inlined_arguments_: inlined arguments have been pushed once. All the
// remainder of the function cannot trust jssp any longer.
// - saves_caller_doubles: some double registers have been pushed, jssp
// references the end of the double registers and not the end of the
// stack slots.
// Also, if the offset from fp is small enough to make a load/store in
// one instruction, we use a fp access.
if ((stack_mode == kCanUseStackPointer) && !after_push_argument_ &&
!inlined_arguments_ && !is_int9(fp_offset) &&
!info()->saves_caller_doubles()) {
int jssp_offset =
(GetStackSlotCount() - op->index() - 1) * kPointerSize;
return MemOperand(masm()->StackPointer(), jssp_offset);
}
}
return MemOperand(fp, fp_offset);
} else { } else {
// Retrieve parameter without eager stack-frame relative to the // Retrieve parameter without eager stack-frame relative to the
// stack-pointer. // stack-pointer.
...@@ -1628,6 +1656,10 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) { ...@@ -1628,6 +1656,10 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
// We push some arguments and they will be pop in an other block. We can't
// trust that jssp references the end of the stack slots until the end of
// the function.
inlined_arguments_ = true;
Register result = ToRegister(instr->result()); Register result = ToRegister(instr->result());
if (instr->hydrogen()->from_inlined()) { if (instr->hydrogen()->from_inlined()) {
...@@ -1996,6 +2028,7 @@ void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) { ...@@ -1996,6 +2028,7 @@ void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) {
__ Call(target); __ Call(target);
} }
generator.AfterCall(); generator.AfterCall();
after_push_argument_ = false;
} }
...@@ -2015,11 +2048,13 @@ void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) { ...@@ -2015,11 +2048,13 @@ void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) {
__ Call(x10); __ Call(x10);
RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
after_push_argument_ = false;
} }
void LCodeGen::DoCallRuntime(LCallRuntime* instr) { void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
CallRuntime(instr->function(), instr->arity(), instr); CallRuntime(instr->function(), instr->arity(), instr);
after_push_argument_ = false;
} }
...@@ -2045,6 +2080,7 @@ void LCodeGen::DoCallStub(LCallStub* instr) { ...@@ -2045,6 +2080,7 @@ void LCodeGen::DoCallStub(LCallStub* instr) {
default: default:
UNREACHABLE(); UNREACHABLE();
} }
after_push_argument_ = false;
} }
...@@ -3102,6 +3138,7 @@ void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { ...@@ -3102,6 +3138,7 @@ void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
instr, instr,
x1); x1);
} }
after_push_argument_ = false;
} }
...@@ -4610,6 +4647,7 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) { ...@@ -4610,6 +4647,7 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) {
Abort(kDoPushArgumentNotImplementedForDoubleType); Abort(kDoPushArgumentNotImplementedForDoubleType);
} else { } else {
__ Push(ToRegister(argument)); __ Push(ToRegister(argument));
after_push_argument_ = true;
} }
} }
......
...@@ -37,10 +37,16 @@ class LCodeGen: public LCodeGenBase { ...@@ -37,10 +37,16 @@ class LCodeGen: public LCodeGenBase {
frame_is_built_(false), frame_is_built_(false),
safepoints_(info->zone()), safepoints_(info->zone()),
resolver_(this), resolver_(this),
expected_safepoint_kind_(Safepoint::kSimple) { expected_safepoint_kind_(Safepoint::kSimple),
after_push_argument_(false),
inlined_arguments_(false) {
PopulateDeoptimizationLiteralsWithInlinedFunctions(); PopulateDeoptimizationLiteralsWithInlinedFunctions();
} }
~LCodeGen() {
ASSERT(!after_push_argument_ || inlined_arguments_);
}
// Simple accessors. // Simple accessors.
Scope* scope() const { return scope_; } Scope* scope() const { return scope_; }
...@@ -82,7 +88,9 @@ class LCodeGen: public LCodeGenBase { ...@@ -82,7 +88,9 @@ class LCodeGen: public LCodeGenBase {
Operand ToOperand(LOperand* op); Operand ToOperand(LOperand* op);
Operand ToOperand32I(LOperand* op); Operand ToOperand32I(LOperand* op);
Operand ToOperand32U(LOperand* op); Operand ToOperand32U(LOperand* op);
MemOperand ToMemOperand(LOperand* op) const; enum StackMode { kMustUseFramePointer, kCanUseStackPointer };
MemOperand ToMemOperand(LOperand* op,
StackMode stack_mode = kCanUseStackPointer) const;
Handle<Object> ToHandle(LConstantOperand* op) const; Handle<Object> ToHandle(LConstantOperand* op) const;
// TODO(jbramley): Examine these helpers and check that they make sense. // TODO(jbramley): Examine these helpers and check that they make sense.
...@@ -348,6 +356,15 @@ class LCodeGen: public LCodeGenBase { ...@@ -348,6 +356,15 @@ class LCodeGen: public LCodeGenBase {
Safepoint::Kind expected_safepoint_kind_; Safepoint::Kind expected_safepoint_kind_;
// This flag is true when we are after a push (but before a call).
// In this situation, jssp no longer references the end of the stack slots so,
// we can only reference a stack slot via fp.
bool after_push_argument_;
// If we have inlined arguments, we are no longer able to use jssp because
// jssp is modified and we never know if we are in a block after or before
// the pop of the arguments (which restores jssp).
bool inlined_arguments_;
int old_position_; int old_position_;
class PushSafepointRegistersScope BASE_EMBEDDED { class PushSafepointRegistersScope BASE_EMBEDDED {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment