Commit 49d1f64e authored by rmcilroy@chromium.org's avatar rmcilroy@chromium.org

Arm64: Remove forced csp alignment to 16 byte values for Nvidia chips.

Remove the forced alignment of csp to 16 byte values on Nvidia chips.
Benchmarks on current devices show that this is no longer required.

R=rodolph.perfetta@arm.com, ulan@chromium.org

Review URL: https://codereview.chromium.org/710613002

Cr-Commit-Position: refs/heads/master@{#25225}
git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@25225 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent c24ebcd3
...@@ -44,17 +44,8 @@ namespace internal { ...@@ -44,17 +44,8 @@ namespace internal {
// CpuFeatures implementation. // CpuFeatures implementation.
void CpuFeatures::ProbeImpl(bool cross_compile) { void CpuFeatures::ProbeImpl(bool cross_compile) {
if (cross_compile) { // AArch64 has no configuration options, no further probing is required.
// Always align csp in cross compiled code - this is safe and ensures that supported_ = 0;
// csp will always be aligned if it is enabled by probing at runtime.
if (FLAG_enable_always_align_csp) supported_ |= 1u << ALWAYS_ALIGN_CSP;
} else {
base::CPU cpu;
if (FLAG_enable_always_align_csp &&
(cpu.implementer() == base::CPU::NVIDIA || FLAG_debug_code)) {
supported_ |= 1u << ALWAYS_ALIGN_CSP;
}
}
} }
......
...@@ -4334,18 +4334,10 @@ void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { ...@@ -4334,18 +4334,10 @@ void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
} }
static unsigned int GetProfileEntryHookCallSize(MacroAssembler* masm) { // The entry hook is a "BumpSystemStackPointer" instruction (sub), followed by
// The entry hook is a "BumpSystemStackPointer" instruction (sub), // a "Push lr" instruction, followed by a call.
// followed by a "Push lr" instruction, followed by a call. static const unsigned int kProfileEntryHookCallSize =
unsigned int size =
Assembler::kCallSizeWithRelocation + (2 * kInstructionSize); Assembler::kCallSizeWithRelocation + (2 * kInstructionSize);
if (CpuFeatures::IsSupported(ALWAYS_ALIGN_CSP)) {
// If ALWAYS_ALIGN_CSP then there will be an extra bic instruction in
// "BumpSystemStackPointer".
size += kInstructionSize;
}
return size;
}
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
...@@ -4358,7 +4350,7 @@ void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { ...@@ -4358,7 +4350,7 @@ void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
__ Push(lr); __ Push(lr);
__ CallStub(&stub); __ CallStub(&stub);
DCHECK(masm->SizeOfCodeGeneratedSince(&entry_hook_call_start) == DCHECK(masm->SizeOfCodeGeneratedSince(&entry_hook_call_start) ==
GetProfileEntryHookCallSize(masm)); kProfileEntryHookCallSize);
__ Pop(lr); __ Pop(lr);
} }
...@@ -4376,7 +4368,7 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) { ...@@ -4376,7 +4368,7 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
const int kNumSavedRegs = kCallerSaved.Count(); const int kNumSavedRegs = kCallerSaved.Count();
// Compute the function's address as the first argument. // Compute the function's address as the first argument.
__ Sub(x0, lr, GetProfileEntryHookCallSize(masm)); __ Sub(x0, lr, kProfileEntryHookCallSize);
#if V8_HOST_ARCH_ARM64 #if V8_HOST_ARCH_ARM64
uintptr_t entry_hook = uintptr_t entry_hook =
......
...@@ -1244,14 +1244,7 @@ void MacroAssembler::Uxtw(const Register& rd, const Register& rn) { ...@@ -1244,14 +1244,7 @@ void MacroAssembler::Uxtw(const Register& rd, const Register& rn) {
void MacroAssembler::BumpSystemStackPointer(const Operand& space) { void MacroAssembler::BumpSystemStackPointer(const Operand& space) {
DCHECK(!csp.Is(sp_)); DCHECK(!csp.Is(sp_));
if (!TmpList()->IsEmpty()) { if (!TmpList()->IsEmpty()) {
if (CpuFeatures::IsSupported(ALWAYS_ALIGN_CSP)) {
UseScratchRegisterScope temps(this);
Register temp = temps.AcquireX();
Sub(temp, StackPointer(), space);
Bic(csp, temp, 0xf);
} else {
Sub(csp, StackPointer(), space); Sub(csp, StackPointer(), space);
}
} else { } else {
// TODO(jbramley): Several callers rely on this not using scratch // TODO(jbramley): Several callers rely on this not using scratch
// registers, so we use the assembler directly here. However, this means // registers, so we use the assembler directly here. However, this means
...@@ -1288,12 +1281,8 @@ void MacroAssembler::SyncSystemStackPointer() { ...@@ -1288,12 +1281,8 @@ void MacroAssembler::SyncSystemStackPointer() {
DCHECK(emit_debug_code()); DCHECK(emit_debug_code());
DCHECK(!csp.Is(sp_)); DCHECK(!csp.Is(sp_));
{ InstructionAccurateScope scope(this); { InstructionAccurateScope scope(this);
if (CpuFeatures::IsSupported(ALWAYS_ALIGN_CSP)) {
bic(csp, StackPointer(), 0xf);
} else {
mov(csp, StackPointer()); mov(csp, StackPointer());
} }
}
AssertStackConsistency(); AssertStackConsistency();
} }
......
...@@ -1308,7 +1308,7 @@ void MacroAssembler::AssertStackConsistency() { ...@@ -1308,7 +1308,7 @@ void MacroAssembler::AssertStackConsistency() {
// Avoid emitting code when !use_real_abort() since non-real aborts cause too // Avoid emitting code when !use_real_abort() since non-real aborts cause too
// much code to be generated. // much code to be generated.
if (emit_debug_code() && use_real_aborts()) { if (emit_debug_code() && use_real_aborts()) {
if (csp.Is(StackPointer()) || CpuFeatures::IsSupported(ALWAYS_ALIGN_CSP)) { if (csp.Is(StackPointer())) {
// Always check the alignment of csp if ALWAYS_ALIGN_CSP is true. We // Always check the alignment of csp if ALWAYS_ALIGN_CSP is true. We
// can't check the alignment of csp without using a scratch register (or // can't check the alignment of csp without using a scratch register (or
// clobbering the flags), but the processor (or simulator) will abort if // clobbering the flags), but the processor (or simulator) will abort if
......
...@@ -761,9 +761,9 @@ class MacroAssembler : public Assembler { ...@@ -761,9 +761,9 @@ class MacroAssembler : public Assembler {
// it can be evidence of a potential bug because the ABI forbids accesses // it can be evidence of a potential bug because the ABI forbids accesses
// below csp. // below csp.
// //
// If StackPointer() is the system stack pointer (csp) or ALWAYS_ALIGN_CSP is // If StackPointer() is the system stack pointer (csp), then csp will be
// enabled, then csp will be dereferenced to cause the processor // dereferenced to cause the processor (or simulator) to abort if it is not
// (or simulator) to abort if it is not properly aligned. // properly aligned.
// //
// If emit_debug_code() is false, this emits no code. // If emit_debug_code() is false, this emits no code.
void AssertStackConsistency(); void AssertStackConsistency();
...@@ -831,9 +831,7 @@ class MacroAssembler : public Assembler { ...@@ -831,9 +831,7 @@ class MacroAssembler : public Assembler {
inline void BumpSystemStackPointer(const Operand& space); inline void BumpSystemStackPointer(const Operand& space);
// Re-synchronizes the system stack pointer (csp) with the current stack // Re-synchronizes the system stack pointer (csp) with the current stack
// pointer (according to StackPointer()). This function will ensure the // pointer (according to StackPointer()).
// new value of the system stack pointer is remains aligned to 16 bytes, and
// is lower than or equal to the value of the current stack pointer.
// //
// This method asserts that StackPointer() is not csp, since the call does // This method asserts that StackPointer() is not csp, since the call does
// not make sense in that context. // not make sense in that context.
......
...@@ -438,11 +438,6 @@ DEFINE_BOOL(enable_vldr_imm, false, ...@@ -438,11 +438,6 @@ DEFINE_BOOL(enable_vldr_imm, false,
DEFINE_BOOL(force_long_branches, false, DEFINE_BOOL(force_long_branches, false,
"force all emitted branches to be in long mode (MIPS only)") "force all emitted branches to be in long mode (MIPS only)")
// cpu-arm64.cc
DEFINE_BOOL(enable_always_align_csp, true,
"enable alignment of csp to 16 bytes on platforms which prefer "
"the register to always be aligned (ARM64 only)")
// bootstrapper.cc // bootstrapper.cc
DEFINE_STRING(expose_natives_as, NULL, "expose natives in global object") DEFINE_STRING(expose_natives_as, NULL, "expose natives in global object")
DEFINE_STRING(expose_debug_as, NULL, "expose debug in global object") DEFINE_STRING(expose_debug_as, NULL, "expose debug in global object")
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment