A64: Record the size of veneer pools for code offset mapping.

Mapping the code offsets between code with and without debug break slots
requires information about the size of the veneer pools and constant pools.

BUG=v8:3173
LOG=N
R=ulan@chromium.org

Review URL: https://codereview.chromium.org/188253005

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@19857 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent e57d0296
......@@ -631,6 +631,13 @@ void Assembler::ConstantPoolMarker(uint32_t size) {
}
void Assembler::EmitPoolGuard() {
// We must generate only one instruction as this is used in scopes that
// control the size of the code generated.
Emit(BLR | Rn(xzr));
}
void Assembler::ConstantPoolGuard() {
#ifdef DEBUG
// Currently this is only used after a constant pool marker.
......@@ -639,9 +646,7 @@ void Assembler::ConstantPoolGuard() {
ASSERT(instr->preceding()->IsLdrLiteralX() &&
instr->preceding()->Rt() == xzr.code());
#endif
// We must generate only one instruction.
Emit(BLR | Rn(xzr));
EmitPoolGuard();
}
......@@ -2434,13 +2439,15 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
RelocInfo rinfo(reinterpret_cast<byte*>(pc_), rmode, data, NULL);
if (((rmode >= RelocInfo::JS_RETURN) &&
(rmode <= RelocInfo::DEBUG_BREAK_SLOT)) ||
(rmode == RelocInfo::CONST_POOL)) {
(rmode == RelocInfo::CONST_POOL) ||
(rmode == RelocInfo::VENEER_POOL)) {
// Adjust code for new modes.
ASSERT(RelocInfo::IsDebugBreakSlot(rmode)
|| RelocInfo::IsJSReturn(rmode)
|| RelocInfo::IsComment(rmode)
|| RelocInfo::IsPosition(rmode)
|| RelocInfo::IsConstPool(rmode));
|| RelocInfo::IsConstPool(rmode)
|| RelocInfo::IsVeneerPool(rmode));
// These modes do not need an entry in the constant pool.
} else {
ASSERT(num_pending_reloc_info_ < kMaxNumPendingRelocInfo);
......@@ -2577,7 +2584,8 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
ASSERT(rinfo.rmode() != RelocInfo::COMMENT &&
rinfo.rmode() != RelocInfo::POSITION &&
rinfo.rmode() != RelocInfo::STATEMENT_POSITION &&
rinfo.rmode() != RelocInfo::CONST_POOL);
rinfo.rmode() != RelocInfo::CONST_POOL &&
rinfo.rmode() != RelocInfo::VENEER_POOL);
Instruction* instr = reinterpret_cast<Instruction*>(rinfo.pc());
// Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0.
......@@ -2615,10 +2623,32 @@ bool Assembler::ShouldEmitVeneer(int max_reachable_pc, int margin) {
}
void Assembler::RecordVeneerPool(int location_offset, int size) {
#ifdef ENABLE_DEBUGGER_SUPPORT
RelocInfo rinfo(buffer_ + location_offset,
RelocInfo::VENEER_POOL, static_cast<intptr_t>(size),
NULL);
reloc_info_writer.Write(&rinfo);
#endif
}
void Assembler::EmitVeneers(bool need_protection, int margin) {
BlockPoolsScope scope(this);
RecordComment("[ Veneers");
// The exact size of the veneer pool must be recorded (see the comment at the
// declaration site of RecordConstPool()), but computing the number of
// veneers that will be generated is not obvious. So instead we remember the
// current position and will record the size after the pool has been
// generated.
Label size_check;
bind(&size_check);
int veneer_pool_relocinfo_loc = pc_offset();
#ifdef DEBUG
byte* reloc_writer_record_pos = reloc_info_writer.pos();
#endif
Label end;
if (need_protection) {
b(&end);
......@@ -2626,7 +2656,7 @@ void Assembler::EmitVeneers(bool need_protection, int margin) {
EmitVeneersGuard();
Label size_check;
Label veneer_size_check;
std::multimap<int, FarBranchInfo>::iterator it, it_to_delete;
......@@ -2637,7 +2667,7 @@ void Assembler::EmitVeneers(bool need_protection, int margin) {
Label* label = it->second.label_;
#ifdef DEBUG
bind(&size_check);
bind(&veneer_size_check);
#endif
// Patch the branch to point to the current position, and emit a branch
// to the label.
......@@ -2646,9 +2676,9 @@ void Assembler::EmitVeneers(bool need_protection, int margin) {
branch->SetImmPCOffsetTarget(veneer);
b(label);
#ifdef DEBUG
ASSERT(SizeOfCodeGeneratedSince(&size_check) <=
ASSERT(SizeOfCodeGeneratedSince(&veneer_size_check) <=
static_cast<uint64_t>(kMaxVeneerCodeSize));
size_check.Unuse();
veneer_size_check.Unuse();
#endif
it_to_delete = it++;
......@@ -2658,6 +2688,11 @@ void Assembler::EmitVeneers(bool need_protection, int margin) {
}
}
// Record the veneer pool size.
ASSERT(reloc_writer_record_pos == reloc_info_writer.pos());
int pool_size = SizeOfCodeGeneratedSince(&size_check);
RecordVeneerPool(veneer_pool_relocinfo_loc, pool_size);
if (unresolved_branches_.empty()) {
next_veneer_pool_check_ = kMaxInt;
} else {
......@@ -2671,13 +2706,6 @@ void Assembler::EmitVeneers(bool need_protection, int margin) {
}
void Assembler::EmitVeneersGuard() {
if (emit_debug_code()) {
Unreachable();
}
}
void Assembler::CheckVeneerPool(bool require_jump,
int margin) {
// There is nothing to do if there are no pending veneer pool entries.
......
......@@ -886,6 +886,7 @@ class Assembler : public AssemblerBase {
static int ConstantPoolSizeAt(Instruction* instr);
// See Assembler::CheckConstPool for more info.
void ConstantPoolMarker(uint32_t size);
void EmitPoolGuard();
void ConstantPoolGuard();
// Prevent veneer pool emission until EndBlockVeneerPool is called.
......@@ -924,20 +925,20 @@ class Assembler : public AssemblerBase {
// Record the emission of a constant pool.
//
// The emission of constant pool depends on the size of the code generated and
// the number of RelocInfo recorded.
// The emission of constant and veneer pools depends on the size of the code
// generated and the number of RelocInfo recorded.
// The Debug mechanism needs to map code offsets between two versions of a
// function, compiled with and without debugger support (see for example
// Debug::PrepareForBreakPoints()).
// Compiling functions with debugger support generates additional code
// (Debug::GenerateSlot()). This may affect the emission of the constant
// pools and cause the version of the code with debugger support to have
// constant pools generated in different places.
// Recording the position and size of emitted constant pools allows to
// correctly compute the offset mappings between the different versions of a
// function in all situations.
// (Debug::GenerateSlot()). This may affect the emission of the pools and
// cause the version of the code with debugger support to have pools generated
// in different places.
// Recording the position and size of emitted pools allows to correctly
// compute the offset mappings between the different versions of a function in
// all situations.
//
// The parameter indicates the size of the constant pool (in bytes), including
// The parameter indicates the size of the pool (in bytes), including
// the marker and branch over the data.
void RecordConstPool(int size);
......@@ -1801,11 +1802,12 @@ class Assembler : public AssemblerBase {
// in the future for example if we decide to add nops between the veneers.
static const int kMaxVeneerCodeSize = 1 * kInstructionSize;
void RecordVeneerPool(int location_offset, int size);
// Emits veneers for branches that are approaching their maximum range.
// If need_protection is true, the veneers are protected by a branch jumping
// over the code.
void EmitVeneers(bool need_protection, int margin = kVeneerDistanceMargin);
void EmitVeneersGuard();
void EmitVeneersGuard() { EmitPoolGuard(); }
// Checks whether veneers need to be emitted at this point.
void CheckVeneerPool(bool require_jump, int margin = kVeneerDistanceMargin);
......
......@@ -286,9 +286,12 @@ int Label::pos() const {
// 00 [4 bit middle_tag] 11 followed by
// 00 [6 bit pc delta]
//
// 1101: constant pool. Used on ARM only for now.
// The format is: 11 1101 11
// signed int (size of the constant pool).
// 1101: constant or veneer pool. Used only on ARM and A64 for now.
// The format is: [2-bit sub-type] 1101 11
// signed int (size of the pool).
// The 2-bit sub-types are:
// 00: constant pool
// 01: veneer pool
// 1110: long_data_record
// The format is: [2-bit data_type_tag] 1110 11
// signed intptr_t, lowest byte written first
......@@ -345,8 +348,9 @@ const int kNonstatementPositionTag = 1;
const int kStatementPositionTag = 2;
const int kCommentTag = 3;
const int kConstPoolExtraTag = kPCJumpExtraTag - 2;
const int kConstPoolTag = 3;
const int kPoolExtraTag = kPCJumpExtraTag - 2;
const int kConstPoolTag = 0;
const int kVeneerPoolTag = 1;
uint32_t RelocInfoWriter::WriteVariableLengthPCJump(uint32_t pc_delta) {
......@@ -406,8 +410,8 @@ void RelocInfoWriter::WriteExtraTaggedIntData(int data_delta, int top_tag) {
}
void RelocInfoWriter::WriteExtraTaggedConstPoolData(int data) {
WriteExtraTag(kConstPoolExtraTag, kConstPoolTag);
void RelocInfoWriter::WriteExtraTaggedPoolData(int data, int pool_type) {
WriteExtraTag(kPoolExtraTag, pool_type);
for (int i = 0; i < kIntSize; i++) {
*--pos_ = static_cast<byte>(data);
// Signed right shift is arithmetic shift. Tested in test-utils.cc.
......@@ -479,9 +483,11 @@ void RelocInfoWriter::Write(const RelocInfo* rinfo) {
WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
WriteExtraTaggedData(rinfo->data(), kCommentTag);
ASSERT(begin_pos - pos_ >= RelocInfo::kMinRelocCommentSize);
} else if (RelocInfo::IsConstPool(rmode)) {
} else if (RelocInfo::IsConstPool(rmode) || RelocInfo::IsVeneerPool(rmode)) {
WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
WriteExtraTaggedConstPoolData(static_cast<int>(rinfo->data()));
WriteExtraTaggedPoolData(static_cast<int>(rinfo->data()),
RelocInfo::IsConstPool(rmode) ? kConstPoolTag
: kVeneerPoolTag);
} else {
ASSERT(rmode > RelocInfo::LAST_COMPACT_ENUM);
int saved_mode = rmode - RelocInfo::LAST_COMPACT_ENUM;
......@@ -532,7 +538,7 @@ void RelocIterator::AdvanceReadId() {
}
void RelocIterator::AdvanceReadConstPoolData() {
void RelocIterator::AdvanceReadPoolData() {
int x = 0;
for (int i = 0; i < kIntSize; i++) {
x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
......@@ -674,10 +680,13 @@ void RelocIterator::next() {
}
Advance(kIntptrSize);
}
} else if ((extra_tag == kConstPoolExtraTag) &&
(GetTopTag() == kConstPoolTag)) {
if (SetMode(RelocInfo::CONST_POOL)) {
AdvanceReadConstPoolData();
} else if (extra_tag == kPoolExtraTag) {
int pool_type = GetTopTag();
ASSERT(pool_type == kConstPoolTag || pool_type == kVeneerPoolTag);
RelocInfo::Mode rmode = (pool_type == kConstPoolTag) ?
RelocInfo::CONST_POOL : RelocInfo::VENEER_POOL;
if (SetMode(rmode)) {
AdvanceReadPoolData();
return;
}
Advance(kIntSize);
......@@ -796,6 +805,8 @@ const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
return "internal reference";
case RelocInfo::CONST_POOL:
return "constant pool";
case RelocInfo::VENEER_POOL:
return "veneer pool";
case RelocInfo::DEBUG_BREAK_SLOT:
#ifndef ENABLE_DEBUGGER_SUPPORT
UNREACHABLE();
......@@ -883,6 +894,7 @@ void RelocInfo::Verify() {
case EXTERNAL_REFERENCE:
case INTERNAL_REFERENCE:
case CONST_POOL:
case VENEER_POOL:
case DEBUG_BREAK_SLOT:
case NONE32:
case NONE64:
......
......@@ -276,9 +276,10 @@ class RelocInfo BASE_EMBEDDED {
EXTERNAL_REFERENCE, // The address of an external C++ function.
INTERNAL_REFERENCE, // An address inside the same function.
// Marks a constant pool. Only used on ARM.
// It uses a custom noncompact encoding.
// Marks constant and veneer pools. Only used on ARM and A64.
// They use a custom noncompact encoding.
CONST_POOL,
VENEER_POOL,
// add more as needed
// Pseudo-types
......@@ -288,7 +289,7 @@ class RelocInfo BASE_EMBEDDED {
CODE_AGE_SEQUENCE, // Not stored in RelocInfo array, used explictly by
// code aging.
FIRST_REAL_RELOC_MODE = CODE_TARGET,
LAST_REAL_RELOC_MODE = CONST_POOL,
LAST_REAL_RELOC_MODE = VENEER_POOL,
FIRST_PSEUDO_RELOC_MODE = CODE_AGE_SEQUENCE,
LAST_PSEUDO_RELOC_MODE = CODE_AGE_SEQUENCE,
LAST_CODE_ENUM = DEBUG_BREAK,
......@@ -342,6 +343,9 @@ class RelocInfo BASE_EMBEDDED {
static inline bool IsConstPool(Mode mode) {
return mode == CONST_POOL;
}
static inline bool IsVeneerPool(Mode mode) {
return mode == VENEER_POOL;
}
static inline bool IsPosition(Mode mode) {
return mode == POSITION || mode == STATEMENT_POSITION;
}
......@@ -546,7 +550,7 @@ class RelocInfoWriter BASE_EMBEDDED {
inline void WriteTaggedPC(uint32_t pc_delta, int tag);
inline void WriteExtraTaggedPC(uint32_t pc_delta, int extra_tag);
inline void WriteExtraTaggedIntData(int data_delta, int top_tag);
inline void WriteExtraTaggedConstPoolData(int data);
inline void WriteExtraTaggedPoolData(int data, int pool_type);
inline void WriteExtraTaggedData(intptr_t data_delta, int top_tag);
inline void WriteTaggedData(intptr_t data_delta, int tag);
inline void WriteExtraTag(int extra_tag, int top_tag);
......@@ -597,7 +601,7 @@ class RelocIterator: public Malloced {
void ReadTaggedPC();
void AdvanceReadPC();
void AdvanceReadId();
void AdvanceReadConstPoolData();
void AdvanceReadPoolData();
void AdvanceReadPosition();
void AdvanceReadData();
void AdvanceReadVariableLengthPCJump();
......
......@@ -1899,30 +1899,34 @@ static void RedirectActivationsToRecompiledCodeOnThread(
}
// Iterate over the RelocInfo in the original code to compute the sum of the
// constant pools sizes. (See Assembler::CheckConstPool())
// Note that this is only useful for architectures using constant pools.
int constpool_mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL);
int frame_const_pool_size = 0;
for (RelocIterator it(*frame_code, constpool_mask); !it.done(); it.next()) {
// constant pools and veneer pools sizes. (See Assembler::CheckConstPool()
// and Assembler::CheckVeneerPool())
// Note that this is only useful for architectures using constant pools or
// veneer pools.
int pool_mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) |
RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
int frame_pool_size = 0;
for (RelocIterator it(*frame_code, pool_mask); !it.done(); it.next()) {
RelocInfo* info = it.rinfo();
if (info->pc() >= frame->pc()) break;
frame_const_pool_size += static_cast<int>(info->data());
frame_pool_size += static_cast<int>(info->data());
}
intptr_t frame_offset =
frame->pc() - frame_code->instruction_start() - frame_const_pool_size;
frame->pc() - frame_code->instruction_start() - frame_pool_size;
// Iterate over the RelocInfo for new code to find the number of bytes
// generated for debug slots and constant pools.
int debug_break_slot_bytes = 0;
int new_code_const_pool_size = 0;
int new_code_pool_size = 0;
int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
RelocInfo::ModeMask(RelocInfo::CONST_POOL);
RelocInfo::ModeMask(RelocInfo::CONST_POOL) |
RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
for (RelocIterator it(*new_code, mask); !it.done(); it.next()) {
// Check if the pc in the new code with debug break
// slots is before this slot.
RelocInfo* info = it.rinfo();
intptr_t new_offset = info->pc() - new_code->instruction_start() -
new_code_const_pool_size - debug_break_slot_bytes;
new_code_pool_size - debug_break_slot_bytes;
if (new_offset >= frame_offset) {
break;
}
......@@ -1931,14 +1935,14 @@ static void RedirectActivationsToRecompiledCodeOnThread(
debug_break_slot_bytes += Assembler::kDebugBreakSlotLength;
} else {
ASSERT(RelocInfo::IsConstPool(info->rmode()));
// The size of the constant pool is encoded in the data.
new_code_const_pool_size += static_cast<int>(info->data());
// The size of the pools is encoded in the data.
new_code_pool_size += static_cast<int>(info->data());
}
}
// Compute the equivalent pc in the new code.
byte* new_pc = new_code->instruction_start() + frame_offset +
debug_break_slot_bytes + new_code_const_pool_size;
debug_break_slot_bytes + new_code_pool_size;
if (FLAG_trace_deopt) {
PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
......
......@@ -10030,3 +10030,58 @@ TEST(abs) {
AbsHelperW(kWMinInt);
AbsHelperW(kWMaxInt);
}
TEST(pool_size) {
INIT_V8();
SETUP();
// This test does not execute any code. It only tests that the size of the
// pools is read correctly from the RelocInfo.
Label exit;
__ b(&exit);
const unsigned constant_pool_size = 312;
const unsigned veneer_pool_size = 184;
__ RecordConstPool(constant_pool_size);
for (unsigned i = 0; i < constant_pool_size / 4; ++i) {
__ dc32(0);
}
__ RecordVeneerPool(masm.pc_offset(), veneer_pool_size);
for (unsigned i = 0; i < veneer_pool_size / kInstructionSize; ++i) {
__ nop();
}
__ bind(&exit);
Heap* heap = isolate->heap();
CodeDesc desc;
Object* code_object = NULL;
Code* code;
masm.GetCode(&desc);
MaybeObject* maybe_code = heap->CreateCode(desc, 0, masm.CodeObject());
maybe_code->ToObject(&code_object);
code = Code::cast(code_object);
unsigned pool_count = 0;
int pool_mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) |
RelocInfo::ModeMask(RelocInfo::VENEER_POOL);
for (RelocIterator it(code, pool_mask); !it.done(); it.next()) {
RelocInfo* info = it.rinfo();
if (RelocInfo::IsConstPool(info->rmode())) {
ASSERT(info->data() == constant_pool_size);
++pool_count;
}
if (RelocInfo::IsVeneerPool(info->rmode())) {
ASSERT(info->data() == veneer_pool_size);
++pool_count;
}
}
ASSERT(pool_count == 2);
TEARDOWN();
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment