Commit 49d1f64e authored by rmcilroy@chromium.org's avatar rmcilroy@chromium.org

Arm64: Remove forced csp alignment to 16 byte values for Nvidia chips.

Remove the forced alignment of csp to 16 byte values on Nvidia chips.
Benchmarks on current devices show that this is no longer required.

R=rodolph.perfetta@arm.com, ulan@chromium.org

Review URL: https://codereview.chromium.org/710613002

Cr-Commit-Position: refs/heads/master@{#25225}
git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@25225 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent c24ebcd3
......@@ -44,17 +44,8 @@ namespace internal {
// CpuFeatures implementation.
void CpuFeatures::ProbeImpl(bool cross_compile) {
if (cross_compile) {
// Always align csp in cross compiled code - this is safe and ensures that
// csp will always be aligned if it is enabled by probing at runtime.
if (FLAG_enable_always_align_csp) supported_ |= 1u << ALWAYS_ALIGN_CSP;
} else {
base::CPU cpu;
if (FLAG_enable_always_align_csp &&
(cpu.implementer() == base::CPU::NVIDIA || FLAG_debug_code)) {
supported_ |= 1u << ALWAYS_ALIGN_CSP;
}
}
// AArch64 has no configuration options, no further probing is required.
supported_ = 0;
}
......
......@@ -4334,18 +4334,10 @@ void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
}
static unsigned int GetProfileEntryHookCallSize(MacroAssembler* masm) {
// The entry hook is a "BumpSystemStackPointer" instruction (sub),
// followed by a "Push lr" instruction, followed by a call.
unsigned int size =
Assembler::kCallSizeWithRelocation + (2 * kInstructionSize);
if (CpuFeatures::IsSupported(ALWAYS_ALIGN_CSP)) {
// If ALWAYS_ALIGN_CSP then there will be an extra bic instruction in
// "BumpSystemStackPointer".
size += kInstructionSize;
}
return size;
}
// The entry hook is a "BumpSystemStackPointer" instruction (sub), followed by
// a "Push lr" instruction, followed by a call.
static const unsigned int kProfileEntryHookCallSize =
Assembler::kCallSizeWithRelocation + (2 * kInstructionSize);
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
......@@ -4358,7 +4350,7 @@ void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
__ Push(lr);
__ CallStub(&stub);
DCHECK(masm->SizeOfCodeGeneratedSince(&entry_hook_call_start) ==
GetProfileEntryHookCallSize(masm));
kProfileEntryHookCallSize);
__ Pop(lr);
}
......@@ -4376,7 +4368,7 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
const int kNumSavedRegs = kCallerSaved.Count();
// Compute the function's address as the first argument.
__ Sub(x0, lr, GetProfileEntryHookCallSize(masm));
__ Sub(x0, lr, kProfileEntryHookCallSize);
#if V8_HOST_ARCH_ARM64
uintptr_t entry_hook =
......
......@@ -1244,14 +1244,7 @@ void MacroAssembler::Uxtw(const Register& rd, const Register& rn) {
void MacroAssembler::BumpSystemStackPointer(const Operand& space) {
DCHECK(!csp.Is(sp_));
if (!TmpList()->IsEmpty()) {
if (CpuFeatures::IsSupported(ALWAYS_ALIGN_CSP)) {
UseScratchRegisterScope temps(this);
Register temp = temps.AcquireX();
Sub(temp, StackPointer(), space);
Bic(csp, temp, 0xf);
} else {
Sub(csp, StackPointer(), space);
}
Sub(csp, StackPointer(), space);
} else {
// TODO(jbramley): Several callers rely on this not using scratch
// registers, so we use the assembler directly here. However, this means
......@@ -1288,11 +1281,7 @@ void MacroAssembler::SyncSystemStackPointer() {
DCHECK(emit_debug_code());
DCHECK(!csp.Is(sp_));
{ InstructionAccurateScope scope(this);
if (CpuFeatures::IsSupported(ALWAYS_ALIGN_CSP)) {
bic(csp, StackPointer(), 0xf);
} else {
mov(csp, StackPointer());
}
mov(csp, StackPointer());
}
AssertStackConsistency();
}
......
......@@ -1308,7 +1308,7 @@ void MacroAssembler::AssertStackConsistency() {
// Avoid emitting code when !use_real_abort() since non-real aborts cause too
// much code to be generated.
if (emit_debug_code() && use_real_aborts()) {
if (csp.Is(StackPointer()) || CpuFeatures::IsSupported(ALWAYS_ALIGN_CSP)) {
if (csp.Is(StackPointer())) {
// Always check the alignment of csp if ALWAYS_ALIGN_CSP is true. We
// can't check the alignment of csp without using a scratch register (or
// clobbering the flags), but the processor (or simulator) will abort if
......
......@@ -761,9 +761,9 @@ class MacroAssembler : public Assembler {
// it can be evidence of a potential bug because the ABI forbids accesses
// below csp.
//
// If StackPointer() is the system stack pointer (csp) or ALWAYS_ALIGN_CSP is
// enabled, then csp will be dereferenced to cause the processor
// (or simulator) to abort if it is not properly aligned.
// If StackPointer() is the system stack pointer (csp), then csp will be
// dereferenced to cause the processor (or simulator) to abort if it is not
// properly aligned.
//
// If emit_debug_code() is false, this emits no code.
void AssertStackConsistency();
......@@ -831,9 +831,7 @@ class MacroAssembler : public Assembler {
inline void BumpSystemStackPointer(const Operand& space);
// Re-synchronizes the system stack pointer (csp) with the current stack
// pointer (according to StackPointer()). This function will ensure the
// new value of the system stack pointer is remains aligned to 16 bytes, and
// is lower than or equal to the value of the current stack pointer.
// pointer (according to StackPointer()).
//
// This method asserts that StackPointer() is not csp, since the call does
// not make sense in that context.
......
......@@ -438,11 +438,6 @@ DEFINE_BOOL(enable_vldr_imm, false,
DEFINE_BOOL(force_long_branches, false,
"force all emitted branches to be in long mode (MIPS only)")
// cpu-arm64.cc
DEFINE_BOOL(enable_always_align_csp, true,
"enable alignment of csp to 16 bytes on platforms which prefer "
"the register to always be aligned (ARM64 only)")
// bootstrapper.cc
DEFINE_STRING(expose_natives_as, NULL, "expose natives in global object")
DEFINE_STRING(expose_debug_as, NULL, "expose debug in global object")
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment