Commit ad5b7365 authored by Shiyu Zhang's avatar Shiyu Zhang Committed by Commit Bot

[x64] Apply rip-relative call/jump for OFF_HEAP_TARGET

Merge rip-relative loading and call/jump into one instruction for
OFF_HEAP_TARGET call/jump. For example,

  REX.W movq r10,[rip+#disp]
  call r10

turns into:

  call [rip+#disp]

Change-Id: I17e115d054b4b352bdaf8eba2e6ac4054bbedaca
Reviewed-on: https://chromium-review.googlesource.com/1172152
Commit-Queue: Shiyu Zhang <shiyu.zhang@intel.com>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarSigurd Schneider <sigurds@chromium.org>
Cr-Commit-Position: refs/heads/master@{#55150}
parent 3a606b91
...@@ -354,21 +354,8 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) { ...@@ -354,21 +354,8 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
} }
// Partial Constant Pool. // Partial Constant Pool.
bool ConstPool::AddSharedEntry(uint64_t data, int offset) { bool ConstPool::TryRecordEntry(intptr_t data, int disp_offset,
auto existing = entries_.find(data); RelocInfo::Mode mode) {
if (existing == entries_.end()) {
entries_.insert(std::make_pair(data, offset + kMoveImm64Offset));
return false;
}
// Make sure this is called with strictly ascending offsets.
DCHECK_GT(offset + kMoveImm64Offset, existing->second);
entries_.insert(std::make_pair(data, offset + kMoveRipRelativeDispOffset));
return true;
}
bool ConstPool::TryRecordEntry(intptr_t data, RelocInfo::Mode mode) {
if (!FLAG_partial_constant_pool) return false; if (!FLAG_partial_constant_pool) return false;
if (!RelocInfo::IsShareableRelocMode(mode)) return false; if (!RelocInfo::IsShareableRelocMode(mode)) return false;
...@@ -379,15 +366,23 @@ bool ConstPool::TryRecordEntry(intptr_t data, RelocInfo::Mode mode) { ...@@ -379,15 +366,23 @@ bool ConstPool::TryRecordEntry(intptr_t data, RelocInfo::Mode mode) {
return false; return false;
uint64_t raw_data = static_cast<uint64_t>(data); uint64_t raw_data = static_cast<uint64_t>(data);
int offset = assm_->pc_offset(); int pc_offset = assm_->pc_offset();
return AddSharedEntry(raw_data, offset); auto existing = entries_.find(raw_data);
} if (existing == entries_.end()) {
entries_.insert(std::make_pair(raw_data, pc_offset + kMoveImm64Offset));
return false;
}
bool ConstPool::IsMoveRipRelative(byte* instr) { // Return if the offset of first shareable constant is already recorded. This
if ((*reinterpret_cast<uint32_t*>(instr) & kMoveRipRelativeMask) == // happens since duplicate call for the same pc offset may happen (e.g. in
kMoveRipRelativeInstr) // generic path of call/jmpPcRelative).
return true; if (pc_offset + kMoveImm64Offset == existing->second) return false;
return false;
// Make sure this is called with strictly ascending offsets.
DCHECK_GT(pc_offset + kMoveImm64Offset, existing->second);
entries_.insert(std::make_pair(raw_data, pc_offset + disp_offset));
return true;
} }
void ConstPool::Clear() { entries_.clear(); } void ConstPool::Clear() { entries_.clear(); }
...@@ -410,10 +405,9 @@ void ConstPool::PatchEntries() { ...@@ -410,10 +405,9 @@ void ConstPool::PatchEntries() {
constant_entry_offset - (it->second + kRipRelativeDispSize); constant_entry_offset - (it->second + kRipRelativeDispSize);
byte* disp_addr = assm_->addr_at(it->second); byte* disp_addr = assm_->addr_at(it->second);
// Check if the instruction is actually a rip-relative move. DCHECK(IsInPool(disp_addr));
DCHECK(IsMoveRipRelative(disp_addr - kMoveRipRelativeDispOffset)); // Check dummy displacement of rip-relative addressing before patching.
// The displacement of the rip-relative move should be 0 before patching. DCHECK_EQ(*reinterpret_cast<uint32_t*>(disp_addr), kDummyDispValue);
DCHECK(*reinterpret_cast<uint32_t*>(disp_addr) == 0);
*reinterpret_cast<int32_t*>(disp_addr) = disp32; *reinterpret_cast<int32_t*>(disp_addr) = disp32;
} }
} }
...@@ -428,12 +422,6 @@ void Assembler::PatchConstPool() { ...@@ -428,12 +422,6 @@ void Assembler::PatchConstPool() {
constpool_.PatchEntries(); constpool_.PatchEntries();
} }
bool Assembler::UseConstPoolFor(RelocInfo::Mode rmode) {
if (!FLAG_partial_constant_pool) return false;
return (rmode == RelocInfo::NONE || rmode == RelocInfo::EXTERNAL_REFERENCE ||
rmode == RelocInfo::OFF_HEAP_TARGET);
}
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// Implementation of Assembler. // Implementation of Assembler.
...@@ -1127,6 +1115,23 @@ void Assembler::call(Operand op) { ...@@ -1127,6 +1115,23 @@ void Assembler::call(Operand op) {
emit_operand(0x2, op); emit_operand(0x2, op);
} }
void Assembler::CallPcRelative(Address entry, RelocInfo::Mode rmode,
Register scratch) {
if (constpool_.TryRecordEntry(entry, ConstPool::kCallRipRelativeDispOffset,
rmode)) {
// Emit rip-relative call. The displacement is set to 0 here and will be
// patched in PatchConstPool().
Label label;
call(Operand(&label, ConstPool::kDummyDispValue));
bind(&label);
} else {
// Emit generic code.
EnsureSpace ensure_space(this);
DCHECK(rmode > RelocInfo::LAST_GCED_ENUM);
movp(scratch, entry, rmode);
call(scratch);
}
}
// Calls directly to the given address using a relative offset. // Calls directly to the given address using a relative offset.
// Should only ever be used in Code objects for calls within the // Should only ever be used in Code objects for calls within the
...@@ -1622,6 +1627,24 @@ void Assembler::jmp(Operand src) { ...@@ -1622,6 +1627,24 @@ void Assembler::jmp(Operand src) {
emit_operand(0x4, src); emit_operand(0x4, src);
} }
void Assembler::JmpPcRelative(Address entry, RelocInfo::Mode rmode,
Register scratch) {
if (constpool_.TryRecordEntry(entry, ConstPool::kJumpRipRelativeDispOffset,
rmode)) {
// Emit rip-relative jump. The displacement is set to 0 here and will be
// patched in PatchConstPool().
Label label;
jmp(Operand(&label, ConstPool::kDummyDispValue));
bind(&label);
} else {
// Emit generic code.
EnsureSpace ensure_space(this);
DCHECK(rmode > RelocInfo::LAST_GCED_ENUM);
movp(scratch, entry, rmode);
jmp(scratch);
}
}
void Assembler::emit_lea(Register dst, Operand src, int size) { void Assembler::emit_lea(Register dst, Operand src, int size) {
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
emit_rex(dst, src, size); emit_rex(dst, src, size);
...@@ -1777,10 +1800,12 @@ void Assembler::emit_mov(Operand dst, Immediate value, int size) { ...@@ -1777,10 +1800,12 @@ void Assembler::emit_mov(Operand dst, Immediate value, int size) {
} }
void Assembler::movp(Register dst, Address value, RelocInfo::Mode rmode) { void Assembler::movp(Register dst, Address value, RelocInfo::Mode rmode) {
if (constpool_.TryRecordEntry(value, rmode)) { if (constpool_.TryRecordEntry(value, ConstPool::kMoveRipRelativeDispOffset,
// Emit rip-relative move with offset = 0 rmode)) {
// Emit rip-relative move. The displacement is set to 0 here and will be
// patched in PatchConstPool().
Label label; Label label;
emit_mov(dst, Operand(&label, 0), kPointerSize); emit_mov(dst, Operand(&label, ConstPool::kDummyDispValue), kPointerSize);
bind(&label); bind(&label);
} else { } else {
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
...@@ -1799,10 +1824,12 @@ void Assembler::movp_heap_number(Register dst, double value) { ...@@ -1799,10 +1824,12 @@ void Assembler::movp_heap_number(Register dst, double value) {
} }
void Assembler::movq(Register dst, int64_t value, RelocInfo::Mode rmode) { void Assembler::movq(Register dst, int64_t value, RelocInfo::Mode rmode) {
if (constpool_.TryRecordEntry(value, rmode)) { if (constpool_.TryRecordEntry(value, ConstPool::kMoveRipRelativeDispOffset,
// Emit rip-relative move with offset = 0 rmode)) {
// Emit rip-relative move. The displacement is set to 0 here and will be
// patched in PatchConstPool().
Label label; Label label;
emit_mov(dst, Operand(&label, 0), kPointerSize); emit_mov(dst, Operand(&label, ConstPool::kDummyDispValue), kPointerSize);
bind(&label); bind(&label);
} else { } else {
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
......
...@@ -448,20 +448,46 @@ class ConstPool { ...@@ -448,20 +448,46 @@ class ConstPool {
public: public:
explicit ConstPool(Assembler* assm) : assm_(assm) {} explicit ConstPool(Assembler* assm) : assm_(assm) {}
// Returns true when partial constant pool is valid for this entry. // Returns true when partial constant pool is valid for this entry.
bool TryRecordEntry(intptr_t data, RelocInfo::Mode mode); bool TryRecordEntry(intptr_t data, int disp_offset, RelocInfo::Mode mode);
bool IsEmpty() const { return entries_.empty(); } bool IsEmpty() const { return entries_.empty(); }
void PatchEntries(); void PatchEntries();
// Discard any pending pool entries. // Discard any pending pool entries.
void Clear(); void Clear();
// Distance between the displacement of rip-relative addressing and the head
// of the instruction.
static constexpr int kMoveRipRelativeDispOffset = 3; // REX Opcode ModRM Disp
static constexpr int kCallRipRelativeDispOffset = 2; // Opcode ModRM Disp
static constexpr int kJumpRipRelativeDispOffset = 2; // Opcode ModRM Disp
// We set the dummy displacement of rip-relative addressing to 0 before
// patching entries.
static constexpr int kDummyDispValue = 0;
private: private:
// Adds a shared entry to entries_. Returns true if this is not the first time // Check if the constant is in a pool.
// we add this entry, false otherwise. static bool IsInPool(byte* addr) {
bool AddSharedEntry(uint64_t data, int offset); return IsMoveRipRelative(addr - kMoveRipRelativeDispOffset) ||
IsCallRipRelative(addr - kCallRipRelativeDispOffset) ||
IsJumpRipRelative(addr - kJumpRipRelativeDispOffset);
}
static uint32_t Mask(byte* instr, uint32_t mask) {
return *reinterpret_cast<uint32_t*>(instr) & mask;
}
static bool IsMoveRipRelative(byte* instr) {
return Mask(instr, kMoveRipRelativeMask) == kMoveRipRelativeInstr;
}
// Check if the instruction is a rip-relative move. static bool IsCallRipRelative(byte* instr) {
bool IsMoveRipRelative(byte* instr); return Mask(instr, kCallRipRelativeMask) == kCallRipRelativeInstr;
}
static bool IsJumpRipRelative(byte* instr) {
return Mask(instr, kJumpRipRelativeMask) == kJumpRipRelativeInstr;
}
Assembler* assm_; Assembler* assm_;
...@@ -471,17 +497,17 @@ class ConstPool { ...@@ -471,17 +497,17 @@ class ConstPool {
// Number of bytes taken up by the displacement of rip-relative addressing. // Number of bytes taken up by the displacement of rip-relative addressing.
static constexpr int kRipRelativeDispSize = 4; // 32-bit displacement. static constexpr int kRipRelativeDispSize = 4; // 32-bit displacement.
// Distance between the address of the displacement in the rip-relative move
// instruction and the head address of the instruction.
static constexpr int kMoveRipRelativeDispOffset =
3; // REX Opcode ModRM Displacement
// Distance between the address of the imm64 in the 'movq reg, imm64' // Distance between the address of the imm64 in the 'movq reg, imm64'
// instruction and the head address of the instruction. // instruction and the head address of the instruction.
static constexpr int kMoveImm64Offset = 2; // REX Opcode imm64 static constexpr int kMoveImm64Offset = 2; // REX Opcode imm64
// A mask for rip-relative move instruction.
// Masks and instruction bits for rip-relative addressing instructions.
static constexpr uint32_t kMoveRipRelativeMask = 0x00C7FFFB; static constexpr uint32_t kMoveRipRelativeMask = 0x00C7FFFB;
// The bits for a rip-relative move instruction after mask.
static constexpr uint32_t kMoveRipRelativeInstr = 0x00058B48; static constexpr uint32_t kMoveRipRelativeInstr = 0x00058B48;
static constexpr uint32_t kCallRipRelativeMask = 0x0000FFFF;
static constexpr uint32_t kCallRipRelativeInstr = 0x000015FF;
static constexpr uint32_t kJumpRipRelativeMask = 0x0000FFFF;
static constexpr uint32_t kJumpRipRelativeInstr = 0x000025FF;
}; };
class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
...@@ -942,6 +968,9 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { ...@@ -942,6 +968,9 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// Call near absolute indirect, address in register // Call near absolute indirect, address in register
void call(Register adr); void call(Register adr);
// Call near absolute indirect, address in rip-relative addressing memory
void CallPcRelative(Address entry, RelocInfo::Mode rmode, Register scratch);
// Jumps // Jumps
// Jump short or near relative. // Jump short or near relative.
// Use a 32-bit signed displacement. // Use a 32-bit signed displacement.
...@@ -953,6 +982,9 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { ...@@ -953,6 +982,9 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
void jmp(Register adr); void jmp(Register adr);
void jmp(Operand src); void jmp(Operand src);
// Jump near absolute indirect (rip-relative addressing m64)
void JmpPcRelative(Address entry, RelocInfo::Mode rmode, Register scratch);
// Conditional jumps // Conditional jumps
void j(Condition cc, void j(Condition cc,
Label* L, Label* L,
...@@ -1961,9 +1993,6 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { ...@@ -1961,9 +1993,6 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// Patch entries for partial constant pool. // Patch entries for partial constant pool.
void PatchConstPool(); void PatchConstPool();
// Check if use partial constant pool for this rmode.
static bool UseConstPoolFor(RelocInfo::Mode rmode);
// Check if there is less than kGap bytes available in the buffer. // Check if there is less than kGap bytes available in the buffer.
// If this is the case, we need to grow the buffer before emitting // If this is the case, we need to grow the buffer before emitting
// an instruction or relocation information. // an instruction or relocation information.
......
...@@ -1464,8 +1464,7 @@ void TurboAssembler::Jump(Operand op) { ...@@ -1464,8 +1464,7 @@ void TurboAssembler::Jump(Operand op) {
} }
void TurboAssembler::Jump(Address destination, RelocInfo::Mode rmode) { void TurboAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
Move(kScratchRegister, destination, rmode); JmpPcRelative(destination, rmode, kScratchRegister);
jmp(kScratchRegister);
} }
void TurboAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode, void TurboAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode,
...@@ -1498,8 +1497,7 @@ if (FLAG_embedded_builtins) { ...@@ -1498,8 +1497,7 @@ if (FLAG_embedded_builtins) {
CHECK_NE(builtin_index, Builtins::kNoBuiltinId); CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob(); EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index); Address entry = d.InstructionStartOfBuiltin(builtin_index);
Move(kScratchRegister, entry, RelocInfo::OFF_HEAP_TARGET); Jump(entry, RelocInfo::OFF_HEAP_TARGET);
jmp(kScratchRegister);
return; return;
} }
} }
...@@ -1527,8 +1525,7 @@ void TurboAssembler::Call(Operand op) { ...@@ -1527,8 +1525,7 @@ void TurboAssembler::Call(Operand op) {
} }
void TurboAssembler::Call(Address destination, RelocInfo::Mode rmode) { void TurboAssembler::Call(Address destination, RelocInfo::Mode rmode) {
Move(kScratchRegister, destination, rmode); CallPcRelative(destination, rmode, kScratchRegister);
call(kScratchRegister);
} }
void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) { void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
...@@ -1553,8 +1550,7 @@ void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) { ...@@ -1553,8 +1550,7 @@ void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
CHECK_NE(builtin_index, Builtins::kNoBuiltinId); CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob(); EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index); Address entry = d.InstructionStartOfBuiltin(builtin_index);
Move(kScratchRegister, entry, RelocInfo::OFF_HEAP_TARGET); Call(entry, RelocInfo::OFF_HEAP_TARGET);
call(kScratchRegister);
return; return;
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment