Commit 81597be2 authored by Hao Xu's avatar Hao Xu Committed by V8 LUCI CQ

[ptr-compr][turbofan][x64] Support pointer decompressing by

addressing mode, pt.2

Compiler generates the below instructions for compressed pointer:

  [ DecompressTaggedPointer
  movl r8,[r8+0x13]
  REX.W addq r8,r14
  ]
  addl [r8+0x7],0xe6

This CL optimize the pointer decompression by using complex
addressing mode in x64:

  movl r8,[r8+0x13]
  addl [r14+r8*1+0x7],0xe6

Bug: v8:13056, v8:7703
Change-Id: I5beec9803d595d2e97367d53ae4497387e0830fe
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3754285
Commit-Queue: Hao A Xu <hao.a.xu@intel.com>
Reviewed-by: 's avatarTobias Tebbi <tebbi@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82226}
parent 4b683314
......@@ -151,6 +151,20 @@ class X64OperandConverter : public InstructionOperandConverter {
int32_t disp = InputInt32(NextOffset(offset));
return Operand(base, disp);
}
case kMode_MCR: {
Register base = kPtrComprCageBaseRegister;
Register index = InputRegister(NextOffset(offset));
ScaleFactor scale = static_cast<ScaleFactor>(0);
int32_t disp = 0;
return Operand(base, index, scale, disp);
}
case kMode_MCRI: {
Register base = kPtrComprCageBaseRegister;
Register index = InputRegister(NextOffset(offset));
ScaleFactor scale = static_cast<ScaleFactor>(0);
int32_t disp = InputInt32(NextOffset(offset));
return Operand(base, index, scale, disp);
}
case kMode_None:
UNREACHABLE();
}
......
......@@ -420,26 +420,28 @@ namespace compiler {
// N = index register * N for N in {1, 2, 4, 8}
// I = immediate displacement (32-bit signed integer)
#define TARGET_ADDRESSING_MODE_LIST(V) \
V(MR) /* [%r1 ] */ \
V(MRI) /* [%r1 + K] */ \
V(MR1) /* [%r1 + %r2*1 ] */ \
V(MR2) /* [%r1 + %r2*2 ] */ \
V(MR4) /* [%r1 + %r2*4 ] */ \
V(MR8) /* [%r1 + %r2*8 ] */ \
V(MR1I) /* [%r1 + %r2*1 + K] */ \
V(MR2I) /* [%r1 + %r2*2 + K] */ \
V(MR4I) /* [%r1 + %r2*4 + K] */ \
V(MR8I) /* [%r1 + %r2*8 + K] */ \
V(M1) /* [ %r2*1 ] */ \
V(M2) /* [ %r2*2 ] */ \
V(M4) /* [ %r2*4 ] */ \
V(M8) /* [ %r2*8 ] */ \
V(M1I) /* [ %r2*1 + K] */ \
V(M2I) /* [ %r2*2 + K] */ \
V(M4I) /* [ %r2*4 + K] */ \
V(M8I) /* [ %r2*8 + K] */ \
V(Root) /* [%root + K] */
#define TARGET_ADDRESSING_MODE_LIST(V) \
V(MR) /* [%r1 ] */ \
V(MRI) /* [%r1 + K] */ \
V(MR1) /* [%r1 + %r2*1 ] */ \
V(MR2) /* [%r1 + %r2*2 ] */ \
V(MR4) /* [%r1 + %r2*4 ] */ \
V(MR8) /* [%r1 + %r2*8 ] */ \
V(MR1I) /* [%r1 + %r2*1 + K] */ \
V(MR2I) /* [%r1 + %r2*2 + K] */ \
V(MR4I) /* [%r1 + %r2*4 + K] */ \
V(MR8I) /* [%r1 + %r2*8 + K] */ \
V(M1) /* [ %r2*1 ] */ \
V(M2) /* [ %r2*2 ] */ \
V(M4) /* [ %r2*4 ] */ \
V(M8) /* [ %r2*8 ] */ \
V(M1I) /* [ %r2*1 + K] */ \
V(M2I) /* [ %r2*2 + K] */ \
V(M4I) /* [ %r2*4 + K] */ \
V(M8I) /* [ %r2*8 + K] */ \
V(Root) /* [%root + K] */ \
V(MCR) /* [%compressed_base + %r1] */ \
V(MCRI) /* [%compressed_base + %r1 + K] */
} // namespace compiler
} // namespace internal
......
......@@ -27,6 +27,26 @@ namespace v8 {
namespace internal {
namespace compiler {
namespace {
bool IsCompressed(Node* const node) {
if (node == nullptr) return false;
const IrOpcode::Value opcode = node->opcode();
if (opcode == IrOpcode::kLoad || opcode == IrOpcode::kProtectedLoad ||
opcode == IrOpcode::kUnalignedLoad ||
opcode == IrOpcode::kLoadImmutable) {
LoadRepresentation load_rep = LoadRepresentationOf(node->op());
return load_rep.IsCompressed();
} else if (node->opcode() == IrOpcode::kPhi) {
MachineRepresentation phi_rep = PhiRepresentationOf(node->op());
return phi_rep == MachineRepresentation::kCompressed ||
phi_rep == MachineRepresentation::kCompressedPointer;
}
return false;
}
} // namespace
// Adds X64-specific methods for generating operands.
class X64OperandGenerator final : public OperandGenerator {
public:
......@@ -215,6 +235,21 @@ class X64OperandGenerator final : public OperandGenerator {
}
BaseWithIndexAndDisplacement64Matcher m(operand, AddressOption::kAllowAll);
DCHECK(m.matches());
// Decompress pointer by complex addressing mode.
if (IsCompressed(m.base())) {
DCHECK(m.index() == nullptr);
DCHECK(m.displacement() == nullptr || CanBeImmediate(m.displacement()));
AddressingMode mode = kMode_MCR;
inputs[(*input_count)++] = UseRegister(m.base(), reg_kind);
if (m.displacement() != nullptr) {
inputs[(*input_count)++] =
m.displacement_mode() == kNegativeDisplacement
? UseNegatedImmediate(m.displacement())
: UseImmediate(m.displacement());
mode = kMode_MCRI;
}
return mode;
}
if (m.displacement() == nullptr || CanBeImmediate(m.displacement())) {
return GenerateMemoryOperandInputs(
m.index(), m.scale(), m.base(), m.displacement(),
......@@ -1096,6 +1131,8 @@ inline AddressingMode AddDisplacementToAddressingMode(AddressingMode mode) {
case kMode_M4I:
case kMode_M8I:
case kMode_Root:
case kMode_MCR:
case kMode_MCRI:
UNREACHABLE();
}
UNREACHABLE();
......
......@@ -29,6 +29,11 @@ bool IsHeapConstant(Node* const node) {
return node->opcode() == IrOpcode::kHeapConstant;
}
bool IsIntConstant(Node* const node) {
return node->opcode() == IrOpcode::kInt32Constant ||
node->opcode() == IrOpcode::kInt64Constant;
}
bool IsTaggedPhi(Node* const node) {
if (node->opcode() == IrOpcode::kPhi) {
return CanBeTaggedPointer(PhiRepresentationOf(node->op()));
......@@ -92,6 +97,25 @@ void DecompressionOptimizer::MarkNodeInputs(Node* node) {
State::kOnly32BitsObserved); // value_1
break;
// SPECIAL CASES.
// SPECIAL CASES - Load.
case IrOpcode::kLoad:
case IrOpcode::kProtectedLoad:
case IrOpcode::kUnalignedLoad:
case IrOpcode::kLoadImmutable:
DCHECK_EQ(node->op()->ValueInputCount(), 2);
// Mark addressing base pointer in compressed form to allow pointer
// decompression via complex addressing mode.
if (DECOMPRESS_POINTER_BY_ADDRESSING_MODE &&
node->InputAt(0)->OwnedBy(node) && IsIntConstant(node->InputAt(1))) {
MarkAddressingBase(node->InputAt(0));
} else {
MaybeMarkAndQueueForRevisit(
node->InputAt(0),
State::kEverythingObserved); // base pointer
MaybeMarkAndQueueForRevisit(node->InputAt(1),
State::kEverythingObserved); // index
}
break;
// SPECIAL CASES - Store.
case IrOpcode::kStore:
case IrOpcode::kProtectedStore:
......@@ -152,6 +176,32 @@ void DecompressionOptimizer::MarkNodeInputs(Node* node) {
}
}
// We mark the addressing base pointer as kOnly32BitsObserved so it can be
// optimized to compressed form. This allows us to move the decompression to
// use-site on X64.
void DecompressionOptimizer::MarkAddressingBase(Node* base) {
if (IsTaggedMachineLoad(base)) {
MaybeMarkAndQueueForRevisit(base,
State::kOnly32BitsObserved); // base pointer
} else if (IsTaggedPhi(base)) {
bool should_compress = true;
for (int i = 0; i < base->op()->ValueInputCount(); ++i) {
if (!IsTaggedMachineLoad(base->InputAt(i)) ||
!base->InputAt(i)->OwnedBy(base)) {
should_compress = false;
break;
}
}
MaybeMarkAndQueueForRevisit(
base,
should_compress ? State::kOnly32BitsObserved
: State::kEverythingObserved); // base pointer
} else {
MaybeMarkAndQueueForRevisit(base,
State::kEverythingObserved); // base pointer
}
}
void DecompressionOptimizer::MaybeMarkAndQueueForRevisit(Node* const node,
State state) {
DCHECK_NE(state, State::kUnvisited);
......
......@@ -83,6 +83,8 @@ class V8_EXPORT_PRIVATE DecompressionOptimizer final {
// State may be updated, and therefore has to be revisited.
void MarkNodeInputs(Node* node);
void MarkAddressingBase(Node* base);
// Mark node's State to be state. We only do this if we have new information,
// i.e either if:
// * We are marking an unvisited node, or
......
......@@ -824,7 +824,8 @@ class MachineRepresentationChecker {
void CheckValueInputIsTaggedOrPointer(Node const* node, int index) {
Node const* input = node->InputAt(index);
switch (inferrer_->GetRepresentation(input)) {
MachineRepresentation rep = inferrer_->GetRepresentation(input);
switch (rep) {
case MachineRepresentation::kTagged:
case MachineRepresentation::kTaggedPointer:
case MachineRepresentation::kTaggedSigned:
......@@ -845,6 +846,21 @@ class MachineRepresentationChecker {
default:
break;
}
switch (node->opcode()) {
case IrOpcode::kLoad:
case IrOpcode::kProtectedLoad:
case IrOpcode::kUnalignedLoad:
case IrOpcode::kLoadImmutable:
if (rep == MachineRepresentation::kCompressed ||
rep == MachineRepresentation::kCompressedPointer) {
if (DECOMPRESS_POINTER_BY_ADDRESSING_MODE && index == 0) {
return;
}
}
break;
default:
break;
}
if (inferrer_->GetRepresentation(input) !=
MachineType::PointerRepresentation()) {
std::ostringstream str;
......
......@@ -134,6 +134,12 @@
#define COMPRESS_POINTERS_BOOL false
#endif
#if COMPRESS_POINTERS_BOOL && V8_TARGET_ARCH_X64
#define DECOMPRESS_POINTER_BY_ADDRESSING_MODE true
#else
#define DECOMPRESS_POINTER_BY_ADDRESSING_MODE false
#endif
#ifdef V8_MAP_PACKING
#define V8_MAP_PACKING_BOOL true
#else
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment