Commit 7caea488 authored by Milad Farazmand's avatar Milad Farazmand Committed by Commit Bot

PPC/s390: [ptr-compr] Switch to 31 bit Smis on 64-bit architectures

Port 12a9ee3a

Original Commit Message:

    32 bit Smis are incompatible with pointer compression so we land disable
    them before enabling pointer compression in order to separate memory and
    performance regressions caused by 31 bit Smis from pointer compression
    change.

R=ishell@chromium.org, joransiu@ca.ibm.com, jyan@ca.ibm.com, michael_dawson@ca.ibm.com
BUG=
LOG=N

Change-Id: I1a353f2d6a682ba27e579a7de1bf7ea2240a6bbd
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1822117Reviewed-by: 's avatarJunliang Yan <jyan@ca.ibm.com>
Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Commit-Queue: Milad Farazmand <miladfar@ca.ibm.com>
Cr-Commit-Position: refs/heads/master@{#63951}
parent 944236fa
...@@ -2941,14 +2941,18 @@ void TurboAssembler::JumpIfLessThan(Register x, int32_t y, Label* dest) { ...@@ -2941,14 +2941,18 @@ void TurboAssembler::JumpIfLessThan(Register x, int32_t y, Label* dest) {
void TurboAssembler::LoadEntryFromBuiltinIndex(Register builtin_index) { void TurboAssembler::LoadEntryFromBuiltinIndex(Register builtin_index) {
STATIC_ASSERT(kSystemPointerSize == 8); STATIC_ASSERT(kSystemPointerSize == 8);
STATIC_ASSERT(kSmiShiftSize == 31);
STATIC_ASSERT(kSmiTagSize == 1); STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0); STATIC_ASSERT(kSmiTag == 0);
// The builtin_index register contains the builtin index as a Smi. // The builtin_index register contains the builtin index as a Smi.
// Untagging is folded into the indexing operand below. // Untagging is folded into the indexing operand below.
#if defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
ShiftLeftImm(builtin_index, builtin_index,
Operand(kSystemPointerSizeLog2 - kSmiShift));
#else
ShiftRightArithImm(builtin_index, builtin_index, ShiftRightArithImm(builtin_index, builtin_index,
kSmiShift - kSystemPointerSizeLog2); kSmiShift - kSystemPointerSizeLog2);
#endif
addi(builtin_index, builtin_index, addi(builtin_index, builtin_index,
Operand(IsolateData::builtin_entry_table_offset())); Operand(IsolateData::builtin_entry_table_offset()));
LoadPX(builtin_index, MemOperand(kRootRegister, builtin_index)); LoadPX(builtin_index, MemOperand(kRootRegister, builtin_index));
......
...@@ -876,12 +876,12 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { ...@@ -876,12 +876,12 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
} }
void SmiToPtrArrayOffset(Register dst, Register src) { void SmiToPtrArrayOffset(Register dst, Register src) {
#if V8_TARGET_ARCH_PPC64 #if defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
STATIC_ASSERT(kSmiTag == 0 && kSmiShift > kPointerSizeLog2);
ShiftRightArithImm(dst, src, kSmiShift - kPointerSizeLog2);
#else
STATIC_ASSERT(kSmiTag == 0 && kSmiShift < kPointerSizeLog2); STATIC_ASSERT(kSmiTag == 0 && kSmiShift < kPointerSizeLog2);
ShiftLeftImm(dst, src, Operand(kPointerSizeLog2 - kSmiShift)); ShiftLeftImm(dst, src, Operand(kPointerSizeLog2 - kSmiShift));
#else
STATIC_ASSERT(kSmiTag == 0 && kSmiShift > kPointerSizeLog2);
ShiftRightArithImm(dst, src, kSmiShift - kPointerSizeLog2);
#endif #endif
} }
...@@ -895,7 +895,7 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { ...@@ -895,7 +895,7 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
void AssertNotSmi(Register object); void AssertNotSmi(Register object);
void AssertSmi(Register object); void AssertSmi(Register object);
#if V8_TARGET_ARCH_PPC64 #if !defined(V8_COMPRESS_POINTERS) && !defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
// Ensure it is permissible to read/write int value directly from // Ensure it is permissible to read/write int value directly from
// upper half of the smi. // upper half of the smi.
STATIC_ASSERT(kSmiTag == 0); STATIC_ASSERT(kSmiTag == 0);
......
...@@ -3415,12 +3415,12 @@ void TurboAssembler::LoadIntLiteral(Register dst, int value) { ...@@ -3415,12 +3415,12 @@ void TurboAssembler::LoadIntLiteral(Register dst, int value) {
void TurboAssembler::LoadSmiLiteral(Register dst, Smi smi) { void TurboAssembler::LoadSmiLiteral(Register dst, Smi smi) {
intptr_t value = static_cast<intptr_t>(smi.ptr()); intptr_t value = static_cast<intptr_t>(smi.ptr());
#if V8_TARGET_ARCH_S390X #if defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
llilf(dst, Operand(value));
#else
DCHECK_EQ(value & 0xFFFFFFFF, 0); DCHECK_EQ(value & 0xFFFFFFFF, 0);
// The smi value is loaded in upper 32-bits. Lower 32-bit are zeros. // The smi value is loaded in upper 32-bits. Lower 32-bit are zeros.
llihf(dst, Operand(value >> 32)); llihf(dst, Operand(value >> 32));
#else
llilf(dst, Operand(value));
#endif #endif
} }
...@@ -4342,14 +4342,19 @@ void TurboAssembler::JumpIfLessThan(Register x, int32_t y, Label* dest) { ...@@ -4342,14 +4342,19 @@ void TurboAssembler::JumpIfLessThan(Register x, int32_t y, Label* dest) {
void TurboAssembler::LoadEntryFromBuiltinIndex(Register builtin_index) { void TurboAssembler::LoadEntryFromBuiltinIndex(Register builtin_index) {
STATIC_ASSERT(kSystemPointerSize == 8); STATIC_ASSERT(kSystemPointerSize == 8);
STATIC_ASSERT(kSmiShiftSize == 31);
STATIC_ASSERT(kSmiTagSize == 1); STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0); STATIC_ASSERT(kSmiTag == 0);
// The builtin_index register contains the builtin index as a Smi. // The builtin_index register contains the builtin index as a Smi.
// Untagging is folded into the indexing operand below. // Untagging is folded into the indexing operand below.
#if defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
STATIC_ASSERT(kSmiShiftSize == 0);
ShiftLeftP(builtin_index, builtin_index,
Operand(kSystemPointerSizeLog2 - kSmiShift));
#else
ShiftRightArithP(builtin_index, builtin_index, ShiftRightArithP(builtin_index, builtin_index,
Operand(kSmiShift - kSystemPointerSizeLog2)); Operand(kSmiShift - kSystemPointerSizeLog2));
#endif
AddP(builtin_index, builtin_index, AddP(builtin_index, builtin_index,
Operand(IsolateData::builtin_entry_table_offset())); Operand(IsolateData::builtin_entry_table_offset()));
LoadP(builtin_index, MemOperand(kRootRegister, builtin_index)); LoadP(builtin_index, MemOperand(kRootRegister, builtin_index));
......
...@@ -1182,12 +1182,12 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { ...@@ -1182,12 +1182,12 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
} }
void SmiToPtrArrayOffset(Register dst, Register src) { void SmiToPtrArrayOffset(Register dst, Register src) {
#if V8_TARGET_ARCH_S390X #if defined(V8_COMPRESS_POINTERS) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
STATIC_ASSERT(kSmiTag == 0 && kSmiShift > kPointerSizeLog2);
ShiftRightArithP(dst, src, Operand(kSmiShift - kPointerSizeLog2));
#else
STATIC_ASSERT(kSmiTag == 0 && kSmiShift < kPointerSizeLog2); STATIC_ASSERT(kSmiTag == 0 && kSmiShift < kPointerSizeLog2);
ShiftLeftP(dst, src, Operand(kPointerSizeLog2 - kSmiShift)); ShiftLeftP(dst, src, Operand(kPointerSizeLog2 - kSmiShift));
#else
STATIC_ASSERT(kSmiTag == 0 && kSmiShift > kPointerSizeLog2);
ShiftRightArithP(dst, src, Operand(kSmiShift - kPointerSizeLog2));
#endif #endif
} }
...@@ -1201,7 +1201,7 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { ...@@ -1201,7 +1201,7 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
void AssertNotSmi(Register object); void AssertNotSmi(Register object);
void AssertSmi(Register object); void AssertSmi(Register object);
#if V8_TARGET_ARCH_S390X #if !defined(V8_COMPRESS_POINTERS) && !defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
// Ensure it is permissible to read/write int value directly from // Ensure it is permissible to read/write int value directly from
// upper half of the smi. // upper half of the smi.
STATIC_ASSERT(kSmiTag == 0); STATIC_ASSERT(kSmiTag == 0);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment