Commit 7b1d583d authored by mbrandy's avatar mbrandy Committed by Commit bot

PPC: Limit unbound label tracking to branch references.

Labels which are not associated with branches (e.g. labels which
record the location of the embedded constant pool or jump tables)
should not be tracked for the purpose of trampoline generation.

This also improves management of the high water mark in the buffer
which triggers trampoline generation such that it is reset whenever
the number of tracked branches drops to zero.

These changes should help minimize unnecessary trampoline and
(subsequent) slow branch generation.

R=dstence@us.ibm.com, michael_dawson@ca.ibm.com
BUG=

Review URL: https://codereview.chromium.org/1237213002

Cr-Commit-Position: refs/heads/master@{#29659}
parent 5a9722b2
......@@ -465,8 +465,33 @@ void Assembler::CheckBuffer() {
}
}
void Assembler::TrackBranch() {
DCHECK(!trampoline_emitted_);
int count = tracked_branch_count_++;
if (count == 0) {
// We leave space (kMaxBlockTrampolineSectionSize)
// for BlockTrampolinePoolScope buffer.
next_trampoline_check_ =
pc_offset() + kMaxCondBranchReach - kMaxBlockTrampolineSectionSize;
} else {
next_trampoline_check_ -= kTrampolineSlotsSize;
}
}
void Assembler::UntrackBranch() {
DCHECK(!trampoline_emitted_);
DCHECK(tracked_branch_count_ > 0);
int count = --tracked_branch_count_;
if (count == 0) {
// Reset
next_trampoline_check_ = kMaxInt;
} else {
next_trampoline_check_ += kTrampolineSlotsSize;
}
}
void Assembler::CheckTrampolinePoolQuick() {
if (pc_offset() >= next_buffer_check_) {
if (pc_offset() >= next_trampoline_check_) {
CheckTrampolinePool();
}
}
......
......@@ -212,16 +212,12 @@ Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
no_trampoline_pool_before_ = 0;
trampoline_pool_blocked_nesting_ = 0;
constant_pool_entry_sharing_blocked_nesting_ = 0;
// We leave space (kMaxBlockTrampolineSectionSize)
// for BlockTrampolinePoolScope buffer.
next_buffer_check_ =
FLAG_force_long_branches ? kMaxInt : kMaxCondBranchReach -
kMaxBlockTrampolineSectionSize;
next_trampoline_check_ = kMaxInt;
internal_trampoline_exception_ = false;
last_bound_pos_ = 0;
optimizable_cmpi_pos_ = -1;
trampoline_emitted_ = FLAG_force_long_branches;
unbound_labels_count_ = 0;
tracked_branch_count_ = 0;
ClearRecordedAstId();
relocations_.reserve(128);
}
......@@ -427,10 +423,14 @@ int Assembler::target_at(int pos) {
}
void Assembler::target_at_put(int pos, int target_pos) {
void Assembler::target_at_put(int pos, int target_pos, bool* is_branch) {
Instr instr = instr_at(pos);
int opcode = instr & kOpcodeMask;
if (is_branch != nullptr) {
*is_branch = (opcode == BX || opcode == BCX);
}
switch (opcode) {
case BX: {
int imm26 = target_pos - pos;
......@@ -528,11 +528,7 @@ int Assembler::max_reach_from(int pos) {
void Assembler::bind_to(Label* L, int pos) {
DCHECK(0 <= pos && pos <= pc_offset()); // must have a valid binding position
int32_t trampoline_pos = kInvalidSlotPos;
if (L->is_linked() && !trampoline_emitted_) {
unbound_labels_count_--;
next_buffer_check_ += kTrampolineSlotsSize;
}
bool is_branch = false;
while (L->is_linked()) {
int fixup_pos = L->pos();
int32_t offset = pos - fixup_pos;
......@@ -546,11 +542,15 @@ void Assembler::bind_to(Label* L, int pos) {
}
target_at_put(fixup_pos, trampoline_pos);
} else {
target_at_put(fixup_pos, pos);
target_at_put(fixup_pos, pos, &is_branch);
}
}
L->bind_to(pos);
if (!trampoline_emitted_ && is_branch) {
UntrackBranch();
}
// Keep track of the last bound label so we don't eliminate any instructions
// before a bound label.
if (pos > last_bound_pos_) last_bound_pos_ = pos;
......@@ -673,10 +673,6 @@ int Assembler::link(Label* L) {
// should avoid most instances of branch offset overflow. See
// target_at() for where this is converted back to kEndOfChain.
position = pc_offset();
if (!trampoline_emitted_) {
unbound_labels_count_++;
next_buffer_check_ -= kTrampolineSlotsSize;
}
}
L->link_to(pc_offset());
}
......@@ -2406,46 +2402,29 @@ void Assembler::CheckTrampolinePool() {
// either trampoline_pool_blocked_nesting_ or no_trampoline_pool_before_,
// which are both checked here. Also, recursive calls to CheckTrampolinePool
// are blocked by trampoline_pool_blocked_nesting_.
if ((trampoline_pool_blocked_nesting_ > 0) ||
(pc_offset() < no_trampoline_pool_before_)) {
// Emission is currently blocked; make sure we try again as soon as
// possible.
if (trampoline_pool_blocked_nesting_ > 0) {
next_buffer_check_ = pc_offset() + kInstrSize;
} else {
next_buffer_check_ = no_trampoline_pool_before_;
}
if (trampoline_pool_blocked_nesting_ > 0) return;
if (pc_offset() < no_trampoline_pool_before_) {
next_trampoline_check_ = no_trampoline_pool_before_;
return;
}
DCHECK(!trampoline_emitted_);
DCHECK(unbound_labels_count_ >= 0);
if (unbound_labels_count_ > 0) {
// First we emit jump, then we emit trampoline pool.
{
BlockTrampolinePoolScope block_trampoline_pool(this);
Label after_pool;
b(&after_pool);
int pool_start = pc_offset();
for (int i = 0; i < unbound_labels_count_; i++) {
b(&after_pool);
}
bind(&after_pool);
trampoline_ = Trampoline(pool_start, unbound_labels_count_);
if (tracked_branch_count_ > 0) {
int size = tracked_branch_count_ * kInstrSize;
// As we are only going to emit trampoline once, we need to prevent any
// further emission.
trampoline_emitted_ = true;
next_trampoline_check_ = kMaxInt;
trampoline_emitted_ = true;
// As we are only going to emit trampoline once, we need to prevent any
// further emission.
next_buffer_check_ = kMaxInt;
// First we emit jump, then we emit trampoline pool.
b(size + kInstrSize, LeaveLK);
for (int i = size; i > 0; i -= kInstrSize) {
b(i, LeaveLK);
}
} else {
// Number of branches to unbound label at this point is zero, so we can
// move next buffer check to maximum.
next_buffer_check_ =
pc_offset() + kMaxCondBranchReach - kMaxBlockTrampolineSectionSize;
trampoline_ = Trampoline(pc_offset() - size, tracked_branch_count_);
}
return;
}
......
......@@ -592,8 +592,10 @@ class Assembler : public AssemblerBase {
// Returns the branch offset to the given label from the current code position
// Links the label to the current position if it is still unbound
int branch_offset(Label* L) {
int position = link(L);
return position - pc_offset();
if (L->is_unused() && !trampoline_emitted_) {
TrackBranch();
}
return link(L) - pc_offset();
}
// Puts a labels target address at the given position.
......@@ -1423,11 +1425,12 @@ class Assembler : public AssemblerBase {
int buffer_space() const { return reloc_info_writer.pos() - pc_; }
// Decode branch instruction at pos and return branch target pos
// Decode instruction(s) at pos and return backchain to previous
// label reference or kEndOfChain.
int target_at(int pos);
// Patch branch instruction at pos to branch to given branch target pos
void target_at_put(int pos, int target_pos);
// Patch instruction(s) at pos to target target_pos (e.g. branch)
void target_at_put(int pos, int target_pos, bool* is_branch = nullptr);
// Record reloc info for current pc_
void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0);
......@@ -1479,7 +1482,7 @@ class Assembler : public AssemblerBase {
// Repeated checking whether the trampoline pool should be emitted is rather
// expensive. By default we only check again once a number of instructions
// has been generated.
int next_buffer_check_; // pc offset of next buffer check.
int next_trampoline_check_; // pc offset of next buffer check.
// Emission of the trampoline pool may be blocked in some code sequences.
int trampoline_pool_blocked_nesting_; // Block emission if this is not zero.
......@@ -1506,6 +1509,8 @@ class Assembler : public AssemblerBase {
inline void CheckBuffer();
void GrowBuffer(int needed = 0);
inline void emit(Instr x);
inline void TrackBranch();
inline void UntrackBranch();
inline void CheckTrampolinePoolQuick();
// Instruction generation
......@@ -1559,7 +1564,7 @@ class Assembler : public AssemblerBase {
};
int32_t get_trampoline_entry();
int unbound_labels_count_;
int tracked_branch_count_;
// If trampoline is emitted, generated code is becoming large. As
// this is already a slow case which can possibly break our code
// generation for the extreme case, we use this information to
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment