A64: Fixes for the veneers emission.

This patch includes 3 fixes for veneers emission.

1) Block veneer pools emission in the PatchingAssembler.
2) Fix the check for veneer pool emission just before a constant pool.
3) Forbid copy of labels. The list of JumpTableEntry used to track the
   deoptimization table entries would make copies of the labels when growing.
   Doing so, it would confuse the Assembler that was tracking the labels via
   pointers.

R=ulan@chromium.org

Review URL: https://codereview.chromium.org/200133002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@19941 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent f1f6d309
......@@ -2548,7 +2548,7 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
// Emit veneers for branches that would go out of range during emission of the
// constant pool.
CheckVeneerPool(require_jump, kVeneerDistanceMargin - pool_size);
CheckVeneerPool(require_jump, kVeneerDistanceMargin + pool_size);
Label size_check;
bind(&size_check);
......
......@@ -2175,20 +2175,19 @@ class PatchingAssembler : public Assembler {
: Assembler(NULL,
reinterpret_cast<byte*>(start),
count * kInstructionSize + kGap) {
// Block constant pool emission.
StartBlockConstPool();
StartBlockPools();
}
PatchingAssembler(byte* start, unsigned count)
: Assembler(NULL, start, count * kInstructionSize + kGap) {
// Block constant pool emission.
StartBlockConstPool();
StartBlockPools();
}
~PatchingAssembler() {
// Const pool should still be blocked.
ASSERT(is_const_pool_blocked());
EndBlockConstPool();
EndBlockPools();
// Verify we have generated the number of instruction we expected.
ASSERT((pc_offset() + kGap) == buffer_size_);
// Verify no relocation information has been emitted.
......
......@@ -841,16 +841,16 @@ bool LCodeGen::GenerateDeoptJumpTable() {
__ bind(&table_start);
Label needs_frame;
for (int i = 0; i < deopt_jump_table_.length(); i++) {
__ Bind(&deopt_jump_table_[i].label);
Address entry = deopt_jump_table_[i].address;
Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type;
__ Bind(&deopt_jump_table_[i]->label);
Address entry = deopt_jump_table_[i]->address;
Deoptimizer::BailoutType type = deopt_jump_table_[i]->bailout_type;
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
if (id == Deoptimizer::kNotDeoptimizationEntry) {
Comment(";;; jump table entry %d.", i);
} else {
Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
}
if (deopt_jump_table_[i].needs_frame) {
if (deopt_jump_table_[i]->needs_frame) {
ASSERT(!info()->saves_caller_doubles());
UseScratchRegisterScope temps(masm());
......@@ -1039,15 +1039,16 @@ void LCodeGen::DeoptimizeBranch(
// We often have several deopts to the same entry, reuse the last
// jump entry if this is the case.
if (deopt_jump_table_.is_empty() ||
(deopt_jump_table_.last().address != entry) ||
(deopt_jump_table_.last().bailout_type != bailout_type) ||
(deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
Deoptimizer::JumpTableEntry table_entry(entry,
bailout_type,
!frame_is_built_);
(deopt_jump_table_.last()->address != entry) ||
(deopt_jump_table_.last()->bailout_type != bailout_type) ||
(deopt_jump_table_.last()->needs_frame != !frame_is_built_)) {
Deoptimizer::JumpTableEntry* table_entry =
new(zone()) Deoptimizer::JumpTableEntry(entry,
bailout_type,
!frame_is_built_);
deopt_jump_table_.Add(table_entry, zone());
}
__ B(&deopt_jump_table_.last().label,
__ B(&deopt_jump_table_.last()->label,
branch_type, reg, bit);
}
}
......
......@@ -347,7 +347,7 @@ class LCodeGen: public LCodeGenBase {
void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
ZoneList<LEnvironment*> deoptimizations_;
ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_;
ZoneList<Deoptimizer::JumpTableEntry*> deopt_jump_table_;
ZoneList<Handle<Object> > deoptimization_literals_;
int inlined_function_count_;
Scope* const scope_;
......
......@@ -210,6 +210,12 @@ class Label BASE_EMBEDDED {
friend class Assembler;
friend class Displacement;
friend class RegExpMacroAssemblerIrregexp;
#if V8_TARGET_ARCH_A64
// On A64, the Assembler keeps track of pointers to Labels to resolve branches
// to distant targets. Copying labels would confuse the Assembler.
DISALLOW_COPY_AND_ASSIGN(Label); // NOLINT
#endif
};
......
......@@ -134,7 +134,7 @@ class Deoptimizer : public Malloced {
static const int kBailoutTypesWithCodeEntry = SOFT + 1;
struct JumpTableEntry {
struct JumpTableEntry : public ZoneObject {
inline JumpTableEntry(Address entry,
Deoptimizer::BailoutType type,
bool frame)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment