Commit c54d6552 authored by bmeurer@chromium.org's avatar bmeurer@chromium.org

ARM: Merge redundant entries in literal pool.

This patch also clean up 64-bits literals handling.

R=bmeurer@chromium.org

Review URL: https://codereview.chromium.org/61763025

Patch from Rodolph Perfetta <rodolph.perfetta@gmail.com>.

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@17789 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 5b9569b5
......@@ -517,12 +517,13 @@ Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
recorded_ast_id_(TypeFeedbackId::None()),
positions_recorder_(this) {
reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
num_pending_reloc_info_ = 0;
num_pending_32_bit_reloc_info_ = 0;
num_pending_64_bit_reloc_info_ = 0;
next_buffer_check_ = 0;
const_pool_blocked_nesting_ = 0;
no_const_pool_before_ = 0;
first_const_pool_use_ = -1;
first_const_pool_32_use_ = -1;
first_const_pool_64_use_ = -1;
last_bound_pos_ = 0;
ClearRecordedAstId();
}
......@@ -536,7 +537,7 @@ Assembler::~Assembler() {
void Assembler::GetCode(CodeDesc* desc) {
// Emit constant pool if necessary.
CheckConstPool(true, false);
ASSERT(num_pending_reloc_info_ == 0);
ASSERT(num_pending_32_bit_reloc_info_ == 0);
ASSERT(num_pending_64_bit_reloc_info_ == 0);
// Set up code descriptor.
......@@ -3149,14 +3150,19 @@ void Assembler::GrowBuffer() {
// to relocate any emitted relocation entries.
// Relocate pending relocation entries.
for (int i = 0; i < num_pending_reloc_info_; i++) {
RelocInfo& rinfo = pending_reloc_info_[i];
for (int i = 0; i < num_pending_32_bit_reloc_info_; i++) {
RelocInfo& rinfo = pending_32_bit_reloc_info_[i];
ASSERT(rinfo.rmode() != RelocInfo::COMMENT &&
rinfo.rmode() != RelocInfo::POSITION);
if (rinfo.rmode() != RelocInfo::JS_RETURN) {
rinfo.set_pc(rinfo.pc() + pc_delta);
}
}
for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) {
RelocInfo& rinfo = pending_64_bit_reloc_info_[i];
ASSERT(rinfo.rmode() == RelocInfo::NONE64);
rinfo.set_pc(rinfo.pc() + pc_delta);
}
}
......@@ -3164,7 +3170,7 @@ void Assembler::db(uint8_t data) {
// No relocation info should be pending while using db. db is used
// to write pure data with no pointers and the constant pool should
// be emitted before using db.
ASSERT(num_pending_reloc_info_ == 0);
ASSERT(num_pending_32_bit_reloc_info_ == 0);
ASSERT(num_pending_64_bit_reloc_info_ == 0);
CheckBuffer();
*reinterpret_cast<uint8_t*>(pc_) = data;
......@@ -3176,7 +3182,7 @@ void Assembler::dd(uint32_t data) {
// No relocation info should be pending while using dd. dd is used
// to write pure data with no pointers and the constant pool should
// be emitted before using dd.
ASSERT(num_pending_reloc_info_ == 0);
ASSERT(num_pending_32_bit_reloc_info_ == 0);
ASSERT(num_pending_64_bit_reloc_info_ == 0);
CheckBuffer();
*reinterpret_cast<uint32_t*>(pc_) = data;
......@@ -3246,15 +3252,19 @@ void Assembler::RecordRelocInfo(double data) {
void Assembler::RecordRelocInfoConstantPoolEntryHelper(const RelocInfo& rinfo) {
ASSERT(num_pending_reloc_info_ < kMaxNumPendingRelocInfo);
if (num_pending_reloc_info_ == 0) {
first_const_pool_use_ = pc_offset();
}
pending_reloc_info_[num_pending_reloc_info_++] = rinfo;
if (rinfo.rmode() == RelocInfo::NONE64) {
++num_pending_64_bit_reloc_info_;
ASSERT(num_pending_64_bit_reloc_info_ < kMaxNumPending64RelocInfo);
if (num_pending_64_bit_reloc_info_ == 0) {
first_const_pool_64_use_ = pc_offset();
}
pending_64_bit_reloc_info_[num_pending_64_bit_reloc_info_++] = rinfo;
} else {
ASSERT(num_pending_32_bit_reloc_info_ < kMaxNumPending32RelocInfo);
if (num_pending_32_bit_reloc_info_ == 0) {
first_const_pool_32_use_ = pc_offset();
}
pending_32_bit_reloc_info_[num_pending_32_bit_reloc_info_++] = rinfo;
}
ASSERT(num_pending_64_bit_reloc_info_ <= num_pending_reloc_info_);
// Make sure the constant pool is not emitted in place of the next
// instruction for which we just recorded relocation info.
BlockConstPoolFor(1);
......@@ -3264,12 +3274,15 @@ void Assembler::RecordRelocInfoConstantPoolEntryHelper(const RelocInfo& rinfo) {
void Assembler::BlockConstPoolFor(int instructions) {
int pc_limit = pc_offset() + instructions * kInstrSize;
if (no_const_pool_before_ < pc_limit) {
// If there are some pending entries, the constant pool cannot be blocked
// further than constant pool instruction's reach.
ASSERT((num_pending_reloc_info_ == 0) ||
(pc_limit - first_const_pool_use_ < kMaxDistToIntPool));
// TODO(jfb) Also check 64-bit entries are in range (requires splitting
// them up from 32-bit entries).
// Max pool start (if we need a jump and an alignment).
#ifdef DEBUG
int start = pc_limit + kInstrSize + 2 * kPointerSize;
ASSERT((num_pending_32_bit_reloc_info_ == 0) ||
(start - first_const_pool_32_use_ +
num_pending_64_bit_reloc_info_ * kDoubleSize < kMaxDistToIntPool));
ASSERT((num_pending_64_bit_reloc_info_ == 0) ||
(start - first_const_pool_64_use_ < kMaxDistToFPPool));
#endif
no_const_pool_before_ = pc_limit;
}
......@@ -3290,8 +3303,8 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
}
// There is nothing to do if there are no pending constant pool entries.
if (num_pending_reloc_info_ == 0) {
ASSERT(num_pending_64_bit_reloc_info_ == 0);
if ((num_pending_32_bit_reloc_info_ == 0) &&
(num_pending_64_bit_reloc_info_ == 0)) {
// Calculate the offset of the next check.
next_buffer_check_ = pc_offset() + kCheckPoolInterval;
return;
......@@ -3300,24 +3313,18 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
// Check that the code buffer is large enough before emitting the constant
// pool (include the jump over the pool and the constant pool marker and
// the gap to the relocation information).
// Note 64-bit values are wider, and the first one needs to be 64-bit aligned.
int jump_instr = require_jump ? kInstrSize : 0;
int size_up_to_marker = jump_instr + kInstrSize;
int size_after_marker = num_pending_reloc_info_ * kPointerSize;
int size_after_marker = num_pending_32_bit_reloc_info_ * kPointerSize;
bool has_fp_values = (num_pending_64_bit_reloc_info_ > 0);
// 64-bit values must be 64-bit aligned.
// We'll start emitting at PC: branch+marker, then 32-bit values, then
// 64-bit values which might need to be aligned.
bool require_64_bit_align = has_fp_values &&
(((uintptr_t)pc_ + size_up_to_marker + size_after_marker) & 0x3);
bool require_64_bit_align = false;
if (has_fp_values) {
require_64_bit_align = (((uintptr_t)pc_ + size_up_to_marker) & 0x7);
if (require_64_bit_align) {
size_after_marker += kInstrSize;
}
// num_pending_reloc_info_ also contains 64-bit entries, the above code
// therefore already counted half of the size for 64-bit entries. Add the
// remaining size.
STATIC_ASSERT(kPointerSize == kDoubleSize / 2);
size_after_marker += num_pending_64_bit_reloc_info_ * (kDoubleSize / 2);
size_after_marker += num_pending_64_bit_reloc_info_ * kDoubleSize;
}
int size = size_up_to_marker + size_after_marker;
......@@ -3330,19 +3337,25 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
// * the instruction doesn't require a jump after itself to jump over the
// constant pool, and we're getting close to running out of range.
if (!force_emit) {
ASSERT((first_const_pool_use_ >= 0) && (num_pending_reloc_info_ > 0));
int dist = pc_offset() + size - first_const_pool_use_;
ASSERT((first_const_pool_32_use_ >= 0) || (first_const_pool_64_use_ >= 0));
bool need_emit = false;
if (has_fp_values) {
if ((dist < kMaxDistToFPPool - kCheckPoolInterval) &&
(require_jump || (dist < kMaxDistToFPPool / 2))) {
return;
int dist64 = pc_offset() +
size -
num_pending_32_bit_reloc_info_ * kPointerSize -
first_const_pool_64_use_;
if ((dist64 >= kMaxDistToFPPool - kCheckPoolInterval) ||
(!require_jump && (dist64 >= kMaxDistToFPPool / 2))) {
need_emit = true;
}
} else {
if ((dist < kMaxDistToIntPool - kCheckPoolInterval) &&
(require_jump || (dist < kMaxDistToIntPool / 2))) {
return;
}
int dist32 =
pc_offset() + size - first_const_pool_32_use_;
if ((dist32 >= kMaxDistToIntPool - kCheckPoolInterval) ||
(!require_jump && (dist32 >= kMaxDistToIntPool / 2))) {
need_emit = true;
}
if (!need_emit) return;
}
int needed_space = size + kGap;
......@@ -3371,15 +3384,10 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
// Emit 64-bit constant pool entries first: their range is smaller than
// 32-bit entries.
for (int i = 0; i < num_pending_reloc_info_; i++) {
RelocInfo& rinfo = pending_reloc_info_[i];
if (rinfo.rmode() != RelocInfo::NONE64) {
// 32-bit values emitted later.
continue;
}
for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) {
RelocInfo& rinfo = pending_64_bit_reloc_info_[i];
ASSERT(!((uintptr_t)pc_ & 0x3)); // Check 64-bit alignment.
ASSERT(!((uintptr_t)pc_ & 0x7)); // Check 64-bit alignment.
Instr instr = instr_at(rinfo.pc());
// Instruction to patch must be 'vldr rd, [pc, #offset]' with offset == 0.
......@@ -3389,53 +3397,85 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
int delta = pc_ - rinfo.pc() - kPcLoadDelta;
ASSERT(is_uint10(delta));
bool found = false;
uint64_t value = rinfo.raw_data64();
for (int j = 0; j < i; j++) {
RelocInfo& rinfo2 = pending_64_bit_reloc_info_[j];
if (value == rinfo2.raw_data64()) {
found = true;
ASSERT(rinfo2.rmode() == RelocInfo::NONE64);
Instr instr2 = instr_at(rinfo2.pc());
ASSERT(IsVldrDPcImmediateOffset(instr2));
delta = GetVldrDRegisterImmediateOffset(instr2);
delta += rinfo2.pc() - rinfo.pc();
break;
}
}
instr_at_put(rinfo.pc(), SetVldrDRegisterImmediateOffset(instr, delta));
const double double_data = rinfo.data64();
uint64_t uint_data = 0;
OS::MemCopy(&uint_data, &double_data, sizeof(double_data));
if (!found) {
uint64_t uint_data = rinfo.raw_data64();
emit(uint_data & 0xFFFFFFFF);
emit(uint_data >> 32);
}
}
// Emit 32-bit constant pool entries.
for (int i = 0; i < num_pending_reloc_info_; i++) {
RelocInfo& rinfo = pending_reloc_info_[i];
for (int i = 0; i < num_pending_32_bit_reloc_info_; i++) {
RelocInfo& rinfo = pending_32_bit_reloc_info_[i];
ASSERT(rinfo.rmode() != RelocInfo::COMMENT &&
rinfo.rmode() != RelocInfo::POSITION &&
rinfo.rmode() != RelocInfo::STATEMENT_POSITION &&
rinfo.rmode() != RelocInfo::CONST_POOL);
if (rinfo.rmode() == RelocInfo::NONE64) {
// 64-bit values emitted earlier.
continue;
}
rinfo.rmode() != RelocInfo::CONST_POOL &&
rinfo.rmode() != RelocInfo::NONE64);
Instr instr = instr_at(rinfo.pc());
// 64-bit loads shouldn't get here.
ASSERT(!IsVldrDPcImmediateOffset(instr));
if (IsLdrPcImmediateOffset(instr) &&
GetLdrRegisterImmediateOffset(instr) == 0) {
int delta = pc_ - rinfo.pc() - kPcLoadDelta;
ASSERT(is_uint12(delta));
// 0 is the smallest delta:
// ldr rd, [pc, #0]
// constant pool marker
// data
if (IsLdrPcImmediateOffset(instr) &&
GetLdrRegisterImmediateOffset(instr) == 0) {
ASSERT(is_uint12(delta));
bool found = false;
if (!Serializer::enabled() && (rinfo.rmode() >= RelocInfo::CELL)) {
for (int j = 0; j < i; j++) {
RelocInfo& rinfo2 = pending_32_bit_reloc_info_[j];
if ((rinfo2.data() == rinfo.data()) &&
(rinfo2.rmode() == rinfo.rmode())) {
Instr instr2 = instr_at(rinfo2.pc());
if (IsLdrPcImmediateOffset(instr2)) {
delta = GetLdrRegisterImmediateOffset(instr2);
delta += rinfo2.pc() - rinfo.pc();
found = true;
break;
}
}
}
}
instr_at_put(rinfo.pc(), SetLdrRegisterImmediateOffset(instr, delta));
if (!found) {
emit(rinfo.data());
}
} else {
ASSERT(IsMovW(instr));
emit(rinfo.data());
}
}
num_pending_reloc_info_ = 0;
num_pending_32_bit_reloc_info_ = 0;
num_pending_64_bit_reloc_info_ = 0;
first_const_pool_use_ = -1;
first_const_pool_32_use_ = -1;
first_const_pool_64_use_ = -1;
RecordComment("]");
......
......@@ -1443,7 +1443,8 @@ class Assembler : public AssemblerBase {
static const int kMaxDistToIntPool = 4*KB;
static const int kMaxDistToFPPool = 1*KB;
// All relocations could be integer, it therefore acts as the limit.
static const int kMaxNumPendingRelocInfo = kMaxDistToIntPool/kInstrSize;
static const int kMaxNumPending32RelocInfo = kMaxDistToIntPool/kInstrSize;
static const int kMaxNumPending64RelocInfo = kMaxDistToFPPool/kInstrSize;
// Postpone the generation of the constant pool for the specified number of
// instructions.
......@@ -1481,11 +1482,16 @@ class Assembler : public AssemblerBase {
// StartBlockConstPool to have an effect.
void EndBlockConstPool() {
if (--const_pool_blocked_nesting_ == 0) {
#ifdef DEBUG
// Max pool start (if we need a jump and an alignment).
int start = pc_offset() + kInstrSize + 2 * kPointerSize;
// Check the constant pool hasn't been blocked for too long.
ASSERT((num_pending_reloc_info_ == 0) ||
(pc_offset() < (first_const_pool_use_ + kMaxDistToIntPool)));
ASSERT((num_pending_32_bit_reloc_info_ == 0) ||
(start + num_pending_64_bit_reloc_info_ * kDoubleSize <
(first_const_pool_32_use_ + kMaxDistToIntPool)));
ASSERT((num_pending_64_bit_reloc_info_ == 0) ||
(pc_offset() < (first_const_pool_use_ + kMaxDistToFPPool)));
(start < (first_const_pool_64_use_ + kMaxDistToFPPool)));
#endif
// Two cases:
// * no_const_pool_before_ >= next_buffer_check_ and the emission is
// still blocked
......@@ -1534,7 +1540,8 @@ class Assembler : public AssemblerBase {
// Keep track of the first instruction requiring a constant pool entry
// since the previous constant pool was emitted.
int first_const_pool_use_;
int first_const_pool_32_use_;
int first_const_pool_64_use_;
// Relocation info generation
// Each relocation is encoded as a variable size value
......@@ -1548,12 +1555,12 @@ class Assembler : public AssemblerBase {
// If every instruction in a long sequence is accessing the pool, we need one
// pending relocation entry per instruction.
// the buffer of pending relocation info
RelocInfo pending_reloc_info_[kMaxNumPendingRelocInfo];
// number of pending reloc info entries in the buffer
int num_pending_reloc_info_;
// Number of pending reloc info entries included above which also happen to
// be 64-bit.
// The buffers of pending relocation info.
RelocInfo pending_32_bit_reloc_info_[kMaxNumPending32RelocInfo];
RelocInfo pending_64_bit_reloc_info_[kMaxNumPending64RelocInfo];
// Number of pending reloc info entries in the 32 bits buffer.
int num_pending_32_bit_reloc_info_;
// Number of pending reloc info entries in the 64 bits buffer.
int num_pending_64_bit_reloc_info_;
// The bound position, before this we cannot do instruction elimination.
......
......@@ -372,6 +372,9 @@ class RelocInfo BASE_EMBEDDED {
Mode rmode() const { return rmode_; }
intptr_t data() const { return data_; }
double data64() const { return data64_; }
uint64_t raw_data64() {
return BitCast<uint64_t>(data64_);
}
Code* host() const { return host_; }
// Apply a relocation by delta bytes
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment