Commit 01538bff authored by balazs.kilvady's avatar balazs.kilvady Committed by Commit bot

MIPS64: Assembler support for internal references.

Port 49cbe537

BUG=

Review URL: https://codereview.chromium.org/930623003

Cr-Commit-Position: refs/heads/master@{#26700}
parent 24026a99
......@@ -118,10 +118,10 @@ int FPURegister::ToAllocationIndex(FPURegister reg) {
// RelocInfo.
void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
if (IsInternalReference(rmode_)) {
if (IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_)) {
// Absolute code pointer inside code object moves with the code object.
byte* p = reinterpret_cast<byte*>(pc_);
int count = Assembler::RelocateInternalReference(p, delta);
int count = Assembler::RelocateInternalReference(rmode_, p, delta);
CpuFeatures::FlushICache(p, count * sizeof(uint32_t));
}
}
......
......@@ -175,7 +175,8 @@ Register ToRegister(int num) {
// Implementation of RelocInfo.
const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
1 << RelocInfo::INTERNAL_REFERENCE;
1 << RelocInfo::INTERNAL_REFERENCE |
1 << RelocInfo::INTERNAL_REFERENCE_ENCODED;
bool RelocInfo::IsCodedSpecially() {
......@@ -634,7 +635,19 @@ bool Assembler::IsAndImmediate(Instr instr) {
}
int64_t Assembler::target_at(int64_t pos) {
int64_t Assembler::target_at(int64_t pos, bool is_internal) {
if (is_internal) {
int64_t* p = reinterpret_cast<int64_t*>(buffer_ + pos);
int64_t address = *p;
if (address == kEndOfJumpChain) {
return kEndOfChain;
} else {
int64_t instr_address = reinterpret_cast<int64_t>(p);
int64_t delta = instr_address - address;
DCHECK(pos > delta);
return pos - delta;
}
}
Instr instr = instr_at(pos);
if ((instr & ~kImm16Mask) == 0) {
// Emitted label constant, not part of a branch.
......@@ -696,7 +709,13 @@ int64_t Assembler::target_at(int64_t pos) {
}
void Assembler::target_at_put(int64_t pos, int64_t target_pos) {
void Assembler::target_at_put(int64_t pos, int64_t target_pos,
bool is_internal) {
if (is_internal) {
uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos;
*reinterpret_cast<uint64_t*>(buffer_ + pos) = imm;
return;
}
Instr instr = instr_at(pos);
if ((instr & ~kImm16Mask) == 0) {
DCHECK(target_pos == kEndOfChain || target_pos >= 0);
......@@ -766,7 +785,8 @@ void Assembler::print(Label* L) {
} else {
PrintF("%d\n", instr);
}
next(&l);
next(&l, internal_reference_positions_.find(l.pos()) !=
internal_reference_positions_.end());
}
} else {
PrintF("label in inconsistent state (pos = %d)\n", L->pos_);
......@@ -777,6 +797,7 @@ void Assembler::print(Label* L) {
void Assembler::bind_to(Label* L, int pos) {
DCHECK(0 <= pos && pos <= pc_offset()); // Must have valid binding position.
int32_t trampoline_pos = kInvalidSlotPos;
bool is_internal = false;
if (L->is_linked() && !trampoline_emitted_) {
unbound_labels_count_--;
next_buffer_check_ += kTrampolineSlotsSize;
......@@ -785,23 +806,28 @@ void Assembler::bind_to(Label* L, int pos) {
while (L->is_linked()) {
int32_t fixup_pos = L->pos();
int32_t dist = pos - fixup_pos;
next(L); // Call next before overwriting link with target at fixup_pos.
is_internal = internal_reference_positions_.find(fixup_pos) !=
internal_reference_positions_.end();
next(L, is_internal); // Call next before overwriting link with target at
// fixup_pos.
Instr instr = instr_at(fixup_pos);
if (IsBranch(instr)) {
if (is_internal) {
target_at_put(fixup_pos, pos, is_internal);
} else if (IsBranch(instr)) {
if (dist > kMaxBranchOffset) {
if (trampoline_pos == kInvalidSlotPos) {
trampoline_pos = get_trampoline_entry(fixup_pos);
CHECK(trampoline_pos != kInvalidSlotPos);
}
DCHECK((trampoline_pos - fixup_pos) <= kMaxBranchOffset);
target_at_put(fixup_pos, trampoline_pos);
target_at_put(fixup_pos, trampoline_pos, false);
fixup_pos = trampoline_pos;
dist = pos - fixup_pos;
}
target_at_put(fixup_pos, pos);
target_at_put(fixup_pos, pos, false);
} else {
DCHECK(IsJ(instr) || IsLui(instr) || IsEmittedConstant(instr));
target_at_put(fixup_pos, pos);
target_at_put(fixup_pos, pos, false);
}
}
L->bind_to(pos);
......@@ -819,9 +845,9 @@ void Assembler::bind(Label* L) {
}
void Assembler::next(Label* L) {
void Assembler::next(Label* L, bool is_internal) {
DCHECK(L->is_linked());
int link = target_at(L->pos());
int link = target_at(L->pos(), is_internal);
if (link == kEndOfChain) {
L->Unuse();
} else {
......@@ -2559,8 +2585,18 @@ void Assembler::bc1t(int16_t offset, uint16_t cc) {
// Debugging.
int Assembler::RelocateInternalReference(byte* pc, intptr_t pc_delta) {
int Assembler::RelocateInternalReference(RelocInfo::Mode rmode, byte* pc,
intptr_t pc_delta) {
if (RelocInfo::IsInternalReference(rmode)) {
int64_t* p = reinterpret_cast<int64_t*>(pc);
if (*p == kEndOfJumpChain) {
return 0; // Number of instructions patched.
}
*p += pc_delta;
return 2; // Number of instructions patched.
}
Instr instr = instr_at(pc);
DCHECK(RelocInfo::IsInternalReferenceEncoded(rmode));
DCHECK(IsJ(instr) || IsLui(instr));
if (IsLui(instr)) {
Instr instr_lui = instr_at(pc + 0 * Assembler::kInstrSize);
......@@ -2649,12 +2685,12 @@ void Assembler::GrowBuffer() {
// Relocate runtime entries.
for (RelocIterator it(desc); !it.done(); it.next()) {
RelocInfo::Mode rmode = it.rinfo()->rmode();
if (rmode == RelocInfo::INTERNAL_REFERENCE) {
if (rmode == RelocInfo::INTERNAL_REFERENCE_ENCODED ||
rmode == RelocInfo::INTERNAL_REFERENCE) {
byte* p = reinterpret_cast<byte*>(it.rinfo()->pc());
RelocateInternalReference(p, pc_delta);
RelocateInternalReference(rmode, p, pc_delta);
}
}
DCHECK(!overflow());
}
......@@ -2673,6 +2709,21 @@ void Assembler::dd(uint32_t data) {
}
void Assembler::dd(Label* label) {
CheckBuffer();
RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
if (label->is_bound()) {
uint64_t data = reinterpret_cast<uint64_t>(buffer_ + label->pos());
*reinterpret_cast<uint64_t*>(pc_) = data;
pc_ += sizeof(uint64_t);
} else {
uint64_t target_pos = jump_address(label);
emit(target_pos);
internal_reference_positions_.insert(label->pos());
}
}
void Assembler::emit_code_stub_address(Code* stub) {
CheckBuffer();
*reinterpret_cast<uint64_t*>(pc_) =
......@@ -2753,7 +2804,7 @@ void Assembler::CheckTrampolinePool() {
// Buffer growth (and relocation) must be blocked for internal
// references until associated instructions are emitted and available
// to be patched.
RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
// TODO(plind): Verify this, presume I cannot use macro-assembler
// here.
lui(at, (imm64 >> 32) & kImm16Mask);
......
......@@ -37,6 +37,9 @@
#define V8_MIPS_ASSEMBLER_MIPS_H_
#include <stdio.h>
#include <set>
#include "src/assembler.h"
#include "src/mips64/constants-mips64.h"
#include "src/serialize.h"
......@@ -1059,12 +1062,14 @@ class Assembler : public AssemblerBase {
// Use --trace-deopt to enable.
void RecordDeoptReason(const int reason, const int raw_position);
static int RelocateInternalReference(byte* pc, intptr_t pc_delta);
static int RelocateInternalReference(RelocInfo::Mode rmode, byte* pc,
intptr_t pc_delta);
// Writes a single byte or word of data in the code stream. Used for
// inline tables, e.g., jump-tables.
void db(uint8_t data);
void dd(uint32_t data);
void dd(Label* label);
// Emits the address of the code stub's first instruction.
void emit_code_stub_address(Code* stub);
......@@ -1163,10 +1168,10 @@ class Assembler : public AssemblerBase {
int64_t buffer_space() const { return reloc_info_writer.pos() - pc_; }
// Decode branch instruction at pos and return branch target pos.
int64_t target_at(int64_t pos);
int64_t target_at(int64_t pos, bool is_internal);
// Patch branch instruction at pos to branch to given branch target pos.
void target_at_put(int64_t pos, int64_t target_pos);
void target_at_put(int64_t pos, int64_t target_pos, bool is_internal);
// Say if we need to relocate with this mode.
bool MustUseReg(RelocInfo::Mode rmode);
......@@ -1336,7 +1341,7 @@ class Assembler : public AssemblerBase {
// Labels.
void print(Label* L);
void bind_to(Label* L, int pos);
void next(Label* L);
void next(Label* L, bool is_internal);
// One trampoline consists of:
// - space for trampoline slots,
......@@ -1401,6 +1406,10 @@ class Assembler : public AssemblerBase {
static const int kMaxBranchOffset = (1 << (18 - 1)) - 1;
static const int kInvalidSlotPos = -1;
// Internal reference positions, required for unbounded internal reference
// labels.
std::set<int64_t> internal_reference_positions_;
Trampoline trampoline_;
bool internal_trampoline_exception_;
......
......@@ -3058,7 +3058,7 @@ void MacroAssembler::J(Label* L, BranchDelaySlot bdslot) {
{ BlockGrowBufferScope block_buf_growth(this);
// Buffer growth (and relocation) must be blocked for internal references
// until associated instructions are emitted and available to be patched.
RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
j(imm28);
}
// Emit a nop in the branch delay slot if required.
......@@ -3075,7 +3075,7 @@ void MacroAssembler::Jr(Label* L, BranchDelaySlot bdslot) {
{ BlockGrowBufferScope block_buf_growth(this);
// Buffer growth (and relocation) must be blocked for internal references
// until associated instructions are emitted and available to be patched.
RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
li(at, Operand(imm64), ADDRESS_LOAD);
}
jr(at);
......@@ -3094,7 +3094,7 @@ void MacroAssembler::Jalr(Label* L, BranchDelaySlot bdslot) {
{ BlockGrowBufferScope block_buf_growth(this);
// Buffer growth (and relocation) must be blocked for internal references
// until associated instructions are emitted and available to be patched.
RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
li(at, Operand(imm64), ADDRESS_LOAD);
}
jalr(at);
......
......@@ -25,6 +25,8 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <iostream> // NOLINT(readability/streams)
#include "src/v8.h"
#include "src/disassembler.h"
......@@ -1383,4 +1385,228 @@ TEST(MIPS16) {
CHECK_EQ(static_cast<int64_t>(0x000000003333bbccL), t.r6);
}
TEST(jump_tables1) {
// Test jump tables with forward jumps.
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
HandleScope scope(isolate);
Assembler assm(isolate, nullptr, 0);
const int kNumCases = 512;
int values[kNumCases];
isolate->random_number_generator()->NextBytes(values, sizeof(values));
Label labels[kNumCases];
__ daddiu(sp, sp, -8);
__ sd(ra, MemOperand(sp));
if ((assm.pc_offset() & 7) == 0) {
__ nop();
}
Label done;
{
PredictableCodeSizeScope predictable(
&assm, (kNumCases * 2 + 7) * Assembler::kInstrSize);
Label here;
__ bal(&here);
__ nop();
__ bind(&here);
__ dsll(at, a0, 3);
__ daddu(at, at, ra);
__ ld(at, MemOperand(at, 5 * Assembler::kInstrSize));
__ jr(at);
__ nop();
for (int i = 0; i < kNumCases; ++i) {
__ dd(&labels[i]);
}
}
for (int i = 0; i < kNumCases; ++i) {
__ bind(&labels[i]);
__ lui(v0, (values[i] >> 16) & 0xffff);
__ ori(v0, v0, values[i] & 0xffff);
__ b(&done);
__ nop();
}
__ bind(&done);
__ ld(ra, MemOperand(sp));
__ daddiu(sp, sp, 8);
__ jr(ra);
__ nop();
CodeDesc desc;
assm.GetCode(&desc);
Handle<Code> code = isolate->factory()->NewCode(
desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
#ifdef OBJECT_PRINT
code->Print(std::cout);
#endif
F1 f = FUNCTION_CAST<F1>(code->entry());
for (int i = 0; i < kNumCases; ++i) {
int res = reinterpret_cast<int64_t>(CALL_GENERATED_CODE(f, i, 0, 0, 0, 0));
::printf("f(%d) = %d\n", i, res);
CHECK_EQ(values[i], static_cast<int>(res));
}
}
TEST(jump_tables2) {
// Test jump tables with backward jumps.
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
HandleScope scope(isolate);
Assembler assm(isolate, nullptr, 0);
const int kNumCases = 512;
int values[kNumCases];
isolate->random_number_generator()->NextBytes(values, sizeof(values));
Label labels[kNumCases];
__ daddiu(sp, sp, -8);
__ sd(ra, MemOperand(sp));
Label done, dispatch;
__ b(&dispatch);
__ nop();
for (int i = 0; i < kNumCases; ++i) {
__ bind(&labels[i]);
__ lui(v0, (values[i] >> 16) & 0xffff);
__ ori(v0, v0, values[i] & 0xffff);
__ b(&done);
__ nop();
}
if ((assm.pc_offset() & 7) == 0) {
__ nop();
}
__ bind(&dispatch);
{
PredictableCodeSizeScope predictable(
&assm, (kNumCases * 2 + 7) * Assembler::kInstrSize);
Label here;
__ bal(&here);
__ nop();
__ bind(&here);
__ dsll(at, a0, 3);
__ daddu(at, at, ra);
__ ld(at, MemOperand(at, 5 * Assembler::kInstrSize));
__ jr(at);
__ nop();
for (int i = 0; i < kNumCases; ++i) {
__ dd(&labels[i]);
}
}
__ bind(&done);
__ ld(ra, MemOperand(sp));
__ daddiu(sp, sp, 8);
__ jr(ra);
__ nop();
CodeDesc desc;
assm.GetCode(&desc);
Handle<Code> code = isolate->factory()->NewCode(
desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
#ifdef OBJECT_PRINT
code->Print(std::cout);
#endif
F1 f = FUNCTION_CAST<F1>(code->entry());
for (int i = 0; i < kNumCases; ++i) {
int res = reinterpret_cast<int64_t>(CALL_GENERATED_CODE(f, i, 0, 0, 0, 0));
::printf("f(%d) = %d\n", i, res);
CHECK_EQ(values[i], res);
}
}
TEST(jump_tables3) {
// Test jump tables with backward jumps and embedded heap objects.
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
HandleScope scope(isolate);
Assembler assm(isolate, nullptr, 0);
const int kNumCases = 512;
Handle<Object> values[kNumCases];
for (int i = 0; i < kNumCases; ++i) {
double value = isolate->random_number_generator()->NextDouble();
values[i] = isolate->factory()->NewHeapNumber(value, IMMUTABLE, TENURED);
}
Label labels[kNumCases];
Object* obj;
int64_t imm64;
__ daddiu(sp, sp, -8);
__ sd(ra, MemOperand(sp));
Label done, dispatch;
__ b(&dispatch);
for (int i = 0; i < kNumCases; ++i) {
__ bind(&labels[i]);
obj = *values[i];
imm64 = reinterpret_cast<intptr_t>(obj);
__ lui(v0, (imm64 >> 32) & kImm16Mask);
__ ori(v0, v0, (imm64 >> 16) & kImm16Mask);
__ dsll(v0, v0, 16);
__ ori(v0, v0, imm64 & kImm16Mask);
__ b(&done);
__ nop();
}
__ stop("chk");
if ((assm.pc_offset() & 7) == 0) {
__ nop();
}
__ bind(&dispatch);
{
PredictableCodeSizeScope predictable(
&assm, (kNumCases * 2 + 7) * Assembler::kInstrSize);
Label here;
__ bal(&here);
__ nop();
__ bind(&here);
__ dsll(at, a0, 3);
__ daddu(at, at, ra);
__ ld(at, MemOperand(at, 5 * Assembler::kInstrSize));
__ jr(at);
__ nop();
for (int i = 0; i < kNumCases; ++i) {
__ dd(&labels[i]);
}
}
__ bind(&done);
__ ld(ra, MemOperand(sp));
__ addiu(sp, sp, 8);
__ jr(ra);
__ nop();
CodeDesc desc;
assm.GetCode(&desc);
Handle<Code> code = isolate->factory()->NewCode(
desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
#ifdef OBJECT_PRINT
// code->Print(std::cout);
#endif
F1 f = FUNCTION_CAST<F1>(code->entry());
for (int i = 0; i < kNumCases; ++i) {
Handle<Object> result(CALL_GENERATED_CODE(f, i, 0, 0, 0, 0), isolate);
#ifdef OBJECT_PRINT
::printf("f(%d) = ", i);
result->Print(std::cout);
::printf("\n");
#endif
CHECK(values[i].is_identical_to(result));
}
}
#undef __
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment