assembler-mips64-inl.h 15.7 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42

// Copyright (c) 1994-2006 Sun Microsystems Inc.
// All Rights Reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// - Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// - Redistribution in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// - Neither the name of Sun Microsystems or the names of contributors may
// be used to endorse or promote products derived from this software without
// specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

// The original source code covered by the above license above has been
// modified significantly by Google Inc.
// Copyright 2012 the V8 project authors. All rights reserved.


#ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
#define V8_MIPS_ASSEMBLER_MIPS_INL_H_

#include "src/mips64/assembler-mips64.h"

#include "src/assembler.h"
43
#include "src/debug/debug.h"
44
#include "src/objects-inl.h"
45 46 47 48 49 50 51

namespace v8 {
namespace internal {


bool CpuFeatures::SupportsCrankshaft() { return IsSupported(FPU); }

52
bool CpuFeatures::SupportsWasmSimd128() { return IsSupported(MIPS_SIMD); }
53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90

// -----------------------------------------------------------------------------
// Operand and MemOperand.

Operand::Operand(int64_t immediate, RelocInfo::Mode rmode)  {
  rm_ = no_reg;
  imm64_ = immediate;
  rmode_ = rmode;
}


Operand::Operand(const ExternalReference& f)  {
  rm_ = no_reg;
  imm64_ = reinterpret_cast<int64_t>(f.address());
  rmode_ = RelocInfo::EXTERNAL_REFERENCE;
}


Operand::Operand(Smi* value) {
  rm_ = no_reg;
  imm64_ =  reinterpret_cast<intptr_t>(value);
  rmode_ = RelocInfo::NONE32;
}


Operand::Operand(Register rm) {
  rm_ = rm;
}


bool Operand::is_reg() const {
  return rm_.is_valid();
}


// -----------------------------------------------------------------------------
// RelocInfo.

91
void RelocInfo::apply(intptr_t delta) {
92
  if (IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_)) {
93 94
    // Absolute code pointer inside code object moves with the code object.
    byte* p = reinterpret_cast<byte*>(pc_);
95
    Assembler::RelocateInternalReference(rmode_, p, delta);
96 97 98 99 100
  }
}


Address RelocInfo::target_address() {
101
  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
102 103 104 105
  return Assembler::target_address_at(pc_, host_);
}

Address RelocInfo::target_address_address() {
106
  DCHECK(IsCodeTarget(rmode_) ||
107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139
         IsRuntimeEntry(rmode_) ||
         rmode_ == EMBEDDED_OBJECT ||
         rmode_ == EXTERNAL_REFERENCE);
  // Read the address of the word containing the target_address in an
  // instruction stream.
  // The only architecture-independent user of this function is the serializer.
  // The serializer uses it to find out how many raw bytes of instruction to
  // output before the next target.
  // For an instruction like LUI/ORI where the target bits are mixed into the
  // instruction bits, the size of the target will be zero, indicating that the
  // serializer should not step forward in memory after a target is resolved
  // and written. In this case the target_address_address function should
  // return the end of the instructions to be patched, allowing the
  // deserializer to deserialize the instructions as raw bytes and put them in
  // place, ready to be patched with the target. After jump optimization,
  // that is the address of the instruction that follows J/JAL/JR/JALR
  // instruction.
  // return reinterpret_cast<Address>(
  //  pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
  return reinterpret_cast<Address>(
    pc_ + Assembler::kInstructionsFor64BitConstant * Assembler::kInstrSize);
}


Address RelocInfo::constant_pool_entry_address() {
  UNREACHABLE();
}


int RelocInfo::target_address_size() {
  return Assembler::kSpecialTargetSize;
}

140 141 142 143 144 145 146 147 148 149 150
Address Assembler::target_address_at(Address pc, Code* code) {
  Address constant_pool = code ? code->constant_pool() : NULL;
  return target_address_at(pc, constant_pool);
}

void Assembler::set_target_address_at(Isolate* isolate, Address pc, Code* code,
                                      Address target,
                                      ICacheFlushMode icache_flush_mode) {
  Address constant_pool = code ? code->constant_pool() : NULL;
  set_target_address_at(isolate, pc, constant_pool, target, icache_flush_mode);
}
151 152 153 154 155 156

Address Assembler::target_address_from_return_address(Address pc) {
  return pc - kCallTargetAddressOffset;
}


157 158
void Assembler::set_target_internal_reference_encoded_at(Address pc,
                                                         Address target) {
159 160 161 162 163 164 165 166 167 168 169
  // Encoded internal references are j/jal instructions.
  Instr instr = Assembler::instr_at(pc + 0 * Assembler::kInstrSize);

  uint64_t imm28 =
      (reinterpret_cast<uint64_t>(target) & static_cast<uint64_t>(kImm28Mask));

  instr &= ~kImm26Mask;
  uint64_t imm26 = imm28 >> 2;
  DCHECK(is_uint26(imm26));

  instr_at_put(pc, instr | (imm26 & kImm26Mask));
170 171 172 173 174
  // Currently used only by deserializer, and all code will be flushed
  // after complete deserialization, no need to flush on each reference.
}


175
void Assembler::deserialization_set_target_internal_reference_at(
176
    Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
177
  if (mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
178
    DCHECK(IsJ(instr_at(pc)));
179
    set_target_internal_reference_encoded_at(pc, target);
180
  } else {
181
    DCHECK(mode == RelocInfo::INTERNAL_REFERENCE);
182 183 184 185
    Memory::Address_at(pc) = target;
  }
}

186
HeapObject* RelocInfo::target_object() {
187
  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
188 189
  return HeapObject::cast(
      reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_)));
190 191
}

192
Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
193
  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
194 195
  return Handle<HeapObject>(
      reinterpret_cast<HeapObject**>(Assembler::target_address_at(pc_, host_)));
196 197
}

198
void RelocInfo::set_target_object(HeapObject* target,
199 200
                                  WriteBarrierMode write_barrier_mode,
                                  ICacheFlushMode icache_flush_mode) {
201
  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
202
  Assembler::set_target_address_at(target->GetIsolate(), pc_, host_,
203 204 205 206 207
                                   reinterpret_cast<Address>(target),
                                   icache_flush_mode);
  if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
      host() != NULL &&
      target->IsHeapObject()) {
208 209
    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
        host(), this, HeapObject::cast(target));
210
    host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
211 212 213 214
  }
}


215
Address RelocInfo::target_external_reference() {
216
  DCHECK(rmode_ == EXTERNAL_REFERENCE);
217 218 219 220
  return Assembler::target_address_at(pc_, host_);
}


221
Address RelocInfo::target_internal_reference() {
222 223 224
  if (rmode_ == INTERNAL_REFERENCE) {
    return Memory::Address_at(pc_);
  } else {
225
    // Encoded internal references are j/jal instructions.
226
    DCHECK(rmode_ == INTERNAL_REFERENCE_ENCODED);
227 228 229 230 231 232
    Instr instr = Assembler::instr_at(pc_ + 0 * Assembler::kInstrSize);
    instr &= kImm26Mask;
    uint64_t imm28 = instr << 2;
    uint64_t segment =
        (reinterpret_cast<uint64_t>(pc_) & ~static_cast<uint64_t>(kImm28Mask));
    return reinterpret_cast<Address>(segment | imm28);
233
  }
234 235 236
}


237 238 239
Address RelocInfo::target_internal_reference_address() {
  DCHECK(rmode_ == INTERNAL_REFERENCE || rmode_ == INTERNAL_REFERENCE_ENCODED);
  return reinterpret_cast<Address>(pc_);
240 241 242
}


243
Address RelocInfo::target_runtime_entry(Assembler* origin) {
244
  DCHECK(IsRuntimeEntry(rmode_));
245 246 247
  return target_address();
}

248
void RelocInfo::set_target_runtime_entry(Isolate* isolate, Address target,
249 250
                                         WriteBarrierMode write_barrier_mode,
                                         ICacheFlushMode icache_flush_mode) {
251
  DCHECK(IsRuntimeEntry(rmode_));
252
  if (target_address() != target)
253
    set_target_address(isolate, target, write_barrier_mode, icache_flush_mode);
254 255 256 257
}


Handle<Cell> RelocInfo::target_cell_handle() {
258
  DCHECK(rmode_ == RelocInfo::CELL);
259 260 261 262 263 264
  Address address = Memory::Address_at(pc_);
  return Handle<Cell>(reinterpret_cast<Cell**>(address));
}


Cell* RelocInfo::target_cell() {
265
  DCHECK(rmode_ == RelocInfo::CELL);
266 267 268 269 270 271 272
  return Cell::FromValueAddress(Memory::Address_at(pc_));
}


void RelocInfo::set_target_cell(Cell* cell,
                                WriteBarrierMode write_barrier_mode,
                                ICacheFlushMode icache_flush_mode) {
273
  DCHECK(rmode_ == RelocInfo::CELL);
274 275 276
  Address address = cell->address() + Cell::kValueOffset;
  Memory::Address_at(pc_) = address;
  if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
277 278
    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(host(), this,
                                                                  cell);
279 280 281 282 283 284
  }
}


static const int kNoCodeAgeSequenceLength = 9 * Assembler::kInstrSize;

285
Handle<Code> RelocInfo::code_age_stub_handle(Assembler* origin) {
286
  UNREACHABLE();  // This should never be reached on Arm.
287
  return Handle<Code>();
288 289 290 291
}


Code* RelocInfo::code_age_stub() {
292
  DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
293 294 295 296 297 298 299
  return Code::GetCodeFromTargetAddress(
      Assembler::target_address_at(pc_ + Assembler::kInstrSize, host_));
}


void RelocInfo::set_code_age_stub(Code* stub,
                                  ICacheFlushMode icache_flush_mode) {
300
  DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
301 302
  Assembler::set_target_address_at(stub->GetIsolate(),
                                   pc_ + Assembler::kInstrSize, host_,
303 304 305 306
                                   stub->instruction_start());
}


307 308 309 310
Address RelocInfo::debug_call_address() {
  // The pc_ offset of 0 assumes patched debug break slot or return
  // sequence.
  DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
311 312 313
  return Assembler::target_address_at(pc_, host_);
}

314
void RelocInfo::set_debug_call_address(Isolate* isolate, Address target) {
315 316 317
  DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
  // The pc_ offset of 0 assumes patched debug break slot or return
  // sequence.
318
  Assembler::set_target_address_at(isolate, pc_, host_, target);
319
  if (host() != NULL) {
320 321 322
    Code* target_code = Code::GetCodeFromTargetAddress(target);
    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(host(), this,
                                                                  target_code);
323 324 325
  }
}

326
void RelocInfo::WipeOut(Isolate* isolate) {
327 328 329 330 331 332
  DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
         IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
         IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
  if (IsInternalReference(rmode_)) {
    Memory::Address_at(pc_) = NULL;
  } else if (IsInternalReferenceEncoded(rmode_)) {
333
    Assembler::set_target_internal_reference_encoded_at(pc_, nullptr);
334
  } else {
335
    Assembler::set_target_address_at(isolate, pc_, host_, NULL);
336
  }
337 338
}

339
template <typename ObjectVisitor>
340 341 342
void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
  RelocInfo::Mode mode = rmode();
  if (mode == RelocInfo::EMBEDDED_OBJECT) {
343
    visitor->VisitEmbeddedPointer(host(), this);
344
  } else if (RelocInfo::IsCodeTarget(mode)) {
345
    visitor->VisitCodeTarget(host(), this);
346
  } else if (mode == RelocInfo::CELL) {
347
    visitor->VisitCellPointer(host(), this);
348
  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
349
    visitor->VisitExternalReference(host(), this);
350 351
  } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
             mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
352
    visitor->VisitInternalReference(host(), this);
353
  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
354
    visitor->VisitCodeAgeSequence(host(), this);
355
  } else if (RelocInfo::IsDebugBreakSlot(mode) &&
356
             IsPatchedDebugBreakSlotSequence()) {
357
    visitor->VisitDebugTarget(host(), this);
358
  } else if (RelocInfo::IsRuntimeEntry(mode)) {
359
    visitor->VisitRuntimeEntry(host(), this);
360 361 362 363 364 365 366 367 368 369 370 371 372 373 374
  }
}


template<typename StaticVisitor>
void RelocInfo::Visit(Heap* heap) {
  RelocInfo::Mode mode = rmode();
  if (mode == RelocInfo::EMBEDDED_OBJECT) {
    StaticVisitor::VisitEmbeddedPointer(heap, this);
  } else if (RelocInfo::IsCodeTarget(mode)) {
    StaticVisitor::VisitCodeTarget(heap, this);
  } else if (mode == RelocInfo::CELL) {
    StaticVisitor::VisitCell(heap, this);
  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
    StaticVisitor::VisitExternalReference(this);
375 376 377
  } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
             mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
    StaticVisitor::VisitInternalReference(this);
378 379
  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
    StaticVisitor::VisitCodeAgeSequence(heap, this);
380
  } else if (RelocInfo::IsDebugBreakSlot(mode) &&
381
             IsPatchedDebugBreakSlotSequence()) {
382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399
    StaticVisitor::VisitDebugTarget(heap, this);
  } else if (RelocInfo::IsRuntimeEntry(mode)) {
    StaticVisitor::VisitRuntimeEntry(this);
  }
}


// -----------------------------------------------------------------------------
// Assembler.


void Assembler::CheckBuffer() {
  if (buffer_space() <= kGap) {
    GrowBuffer();
  }
}


400 401
void Assembler::CheckTrampolinePoolQuick(int extra_instructions) {
  if (pc_offset() >= next_buffer_check_ - extra_instructions * kInstrSize) {
402 403 404 405 406
    CheckTrampolinePool();
  }
}


407
void Assembler::CheckForEmitInForbiddenSlot() {
408 409 410
  if (!is_buffer_growth_blocked()) {
    CheckBuffer();
  }
411 412 413 414 415 416 417 418 419 420 421 422
  if (IsPrevInstrCompactBranch()) {
    // Nop instruction to preceed a CTI in forbidden slot:
    Instr nop = SPECIAL | SLL;
    *reinterpret_cast<Instr*>(pc_) = nop;
    pc_ += kInstrSize;

    ClearCompactBranchState();
  }
}


void Assembler::EmitHelper(Instr x, CompactBranchType is_compact_branch) {
423 424 425 426 427 428 429 430 431
  if (IsPrevInstrCompactBranch()) {
    if (Instruction::IsForbiddenAfterBranchInstr(x)) {
      // Nop instruction to preceed a CTI in forbidden slot:
      Instr nop = SPECIAL | SLL;
      *reinterpret_cast<Instr*>(pc_) = nop;
      pc_ += kInstrSize;
    }
    ClearCompactBranchState();
  }
432 433
  *reinterpret_cast<Instr*>(pc_) = x;
  pc_ += kInstrSize;
434 435 436
  if (is_compact_branch == CompactBranchType::COMPACT_BRANCH) {
    EmittedCompactBranchInstruction();
  }
437 438 439
  CheckTrampolinePoolQuick();
}

440 441
template <>
inline void Assembler::EmitHelper(uint8_t x);
442

443 444 445 446 447 448 449
template <typename T>
void Assembler::EmitHelper(T x) {
  *reinterpret_cast<T*>(pc_) = x;
  pc_ += sizeof(x);
  CheckTrampolinePoolQuick();
}

450 451 452 453 454 455 456 457
template <>
void Assembler::EmitHelper(uint8_t x) {
  *reinterpret_cast<uint8_t*>(pc_) = x;
  pc_ += sizeof(x);
  if (reinterpret_cast<intptr_t>(pc_) % kInstrSize == 0) {
    CheckTrampolinePoolQuick();
  }
}
458 459

void Assembler::emit(Instr x, CompactBranchType is_compact_branch) {
460 461 462
  if (!is_buffer_growth_blocked()) {
    CheckBuffer();
  }
463 464
  EmitHelper(x, is_compact_branch);
}
465

466 467 468 469

void Assembler::emit(uint64_t data) {
  CheckForEmitInForbiddenSlot();
  EmitHelper(data);
470 471 472
}


473 474
}  // namespace internal
}  // namespace v8
475 476

#endif  // V8_MIPS_ASSEMBLER_MIPS_INL_H_