assembler-mips64-inl.h 15.7 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42

// Copyright (c) 1994-2006 Sun Microsystems Inc.
// All Rights Reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// - Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// - Redistribution in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// - Neither the name of Sun Microsystems or the names of contributors may
// be used to endorse or promote products derived from this software without
// specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

// The original source code covered by the above license above has been
// modified significantly by Google Inc.
// Copyright 2012 the V8 project authors. All rights reserved.


#ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
#define V8_MIPS_ASSEMBLER_MIPS_INL_H_

#include "src/mips64/assembler-mips64.h"

#include "src/assembler.h"
43
#include "src/debug/debug.h"
44
#include "src/objects-inl.h"
45 46 47 48 49 50 51

namespace v8 {
namespace internal {


bool CpuFeatures::SupportsCrankshaft() { return IsSupported(FPU); }

52
bool CpuFeatures::SupportsWasmSimd128() { return IsSupported(MIPS_SIMD); }
53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90

// -----------------------------------------------------------------------------
// Operand and MemOperand.

Operand::Operand(int64_t immediate, RelocInfo::Mode rmode)  {
  rm_ = no_reg;
  imm64_ = immediate;
  rmode_ = rmode;
}


Operand::Operand(const ExternalReference& f)  {
  rm_ = no_reg;
  imm64_ = reinterpret_cast<int64_t>(f.address());
  rmode_ = RelocInfo::EXTERNAL_REFERENCE;
}


Operand::Operand(Smi* value) {
  rm_ = no_reg;
  imm64_ =  reinterpret_cast<intptr_t>(value);
  rmode_ = RelocInfo::NONE32;
}


Operand::Operand(Register rm) {
  rm_ = rm;
}


bool Operand::is_reg() const {
  return rm_.is_valid();
}


// -----------------------------------------------------------------------------
// RelocInfo.

91
void RelocInfo::apply(intptr_t delta) {
92
  if (IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_)) {
93 94
    // Absolute code pointer inside code object moves with the code object.
    byte* p = reinterpret_cast<byte*>(pc_);
95
    Assembler::RelocateInternalReference(rmode_, p, delta);
96 97 98 99 100
  }
}


Address RelocInfo::target_address() {
101
  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
102 103 104 105
  return Assembler::target_address_at(pc_, host_);
}

Address RelocInfo::target_address_address() {
106
  DCHECK(IsCodeTarget(rmode_) ||
107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140
         IsRuntimeEntry(rmode_) ||
         rmode_ == EMBEDDED_OBJECT ||
         rmode_ == EXTERNAL_REFERENCE);
  // Read the address of the word containing the target_address in an
  // instruction stream.
  // The only architecture-independent user of this function is the serializer.
  // The serializer uses it to find out how many raw bytes of instruction to
  // output before the next target.
  // For an instruction like LUI/ORI where the target bits are mixed into the
  // instruction bits, the size of the target will be zero, indicating that the
  // serializer should not step forward in memory after a target is resolved
  // and written. In this case the target_address_address function should
  // return the end of the instructions to be patched, allowing the
  // deserializer to deserialize the instructions as raw bytes and put them in
  // place, ready to be patched with the target. After jump optimization,
  // that is the address of the instruction that follows J/JAL/JR/JALR
  // instruction.
  // return reinterpret_cast<Address>(
  //  pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
  return reinterpret_cast<Address>(
    pc_ + Assembler::kInstructionsFor64BitConstant * Assembler::kInstrSize);
}


Address RelocInfo::constant_pool_entry_address() {
  UNREACHABLE();
  return NULL;
}


int RelocInfo::target_address_size() {
  return Assembler::kSpecialTargetSize;
}

141 142 143 144 145 146 147 148 149 150 151
Address Assembler::target_address_at(Address pc, Code* code) {
  Address constant_pool = code ? code->constant_pool() : NULL;
  return target_address_at(pc, constant_pool);
}

void Assembler::set_target_address_at(Isolate* isolate, Address pc, Code* code,
                                      Address target,
                                      ICacheFlushMode icache_flush_mode) {
  Address constant_pool = code ? code->constant_pool() : NULL;
  set_target_address_at(isolate, pc, constant_pool, target, icache_flush_mode);
}
152 153 154 155 156 157

Address Assembler::target_address_from_return_address(Address pc) {
  return pc - kCallTargetAddressOffset;
}


158 159
void Assembler::set_target_internal_reference_encoded_at(Address pc,
                                                         Address target) {
160 161 162 163 164 165 166 167 168 169 170
  // Encoded internal references are j/jal instructions.
  Instr instr = Assembler::instr_at(pc + 0 * Assembler::kInstrSize);

  uint64_t imm28 =
      (reinterpret_cast<uint64_t>(target) & static_cast<uint64_t>(kImm28Mask));

  instr &= ~kImm26Mask;
  uint64_t imm26 = imm28 >> 2;
  DCHECK(is_uint26(imm26));

  instr_at_put(pc, instr | (imm26 & kImm26Mask));
171 172 173 174 175
  // Currently used only by deserializer, and all code will be flushed
  // after complete deserialization, no need to flush on each reference.
}


176
void Assembler::deserialization_set_target_internal_reference_at(
177
    Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
178
  if (mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
179
    DCHECK(IsJ(instr_at(pc)));
180
    set_target_internal_reference_encoded_at(pc, target);
181
  } else {
182
    DCHECK(mode == RelocInfo::INTERNAL_REFERENCE);
183 184 185 186
    Memory::Address_at(pc) = target;
  }
}

187
HeapObject* RelocInfo::target_object() {
188
  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
189 190
  return HeapObject::cast(
      reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_)));
191 192
}

193
Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
194
  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
195 196
  return Handle<HeapObject>(
      reinterpret_cast<HeapObject**>(Assembler::target_address_at(pc_, host_)));
197 198
}

199
void RelocInfo::set_target_object(HeapObject* target,
200 201
                                  WriteBarrierMode write_barrier_mode,
                                  ICacheFlushMode icache_flush_mode) {
202
  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
203
  Assembler::set_target_address_at(target->GetIsolate(), pc_, host_,
204 205 206 207 208
                                   reinterpret_cast<Address>(target),
                                   icache_flush_mode);
  if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
      host() != NULL &&
      target->IsHeapObject()) {
209 210
    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
        host(), this, HeapObject::cast(target));
211
    host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
212 213 214 215
  }
}


216
Address RelocInfo::target_external_reference() {
217
  DCHECK(rmode_ == EXTERNAL_REFERENCE);
218 219 220 221
  return Assembler::target_address_at(pc_, host_);
}


222
Address RelocInfo::target_internal_reference() {
223 224 225
  if (rmode_ == INTERNAL_REFERENCE) {
    return Memory::Address_at(pc_);
  } else {
226
    // Encoded internal references are j/jal instructions.
227
    DCHECK(rmode_ == INTERNAL_REFERENCE_ENCODED);
228 229 230 231 232 233
    Instr instr = Assembler::instr_at(pc_ + 0 * Assembler::kInstrSize);
    instr &= kImm26Mask;
    uint64_t imm28 = instr << 2;
    uint64_t segment =
        (reinterpret_cast<uint64_t>(pc_) & ~static_cast<uint64_t>(kImm28Mask));
    return reinterpret_cast<Address>(segment | imm28);
234
  }
235 236 237
}


238 239 240
Address RelocInfo::target_internal_reference_address() {
  DCHECK(rmode_ == INTERNAL_REFERENCE || rmode_ == INTERNAL_REFERENCE_ENCODED);
  return reinterpret_cast<Address>(pc_);
241 242 243
}


244
Address RelocInfo::target_runtime_entry(Assembler* origin) {
245
  DCHECK(IsRuntimeEntry(rmode_));
246 247 248
  return target_address();
}

249
void RelocInfo::set_target_runtime_entry(Isolate* isolate, Address target,
250 251
                                         WriteBarrierMode write_barrier_mode,
                                         ICacheFlushMode icache_flush_mode) {
252
  DCHECK(IsRuntimeEntry(rmode_));
253
  if (target_address() != target)
254
    set_target_address(isolate, target, write_barrier_mode, icache_flush_mode);
255 256 257 258
}


Handle<Cell> RelocInfo::target_cell_handle() {
259
  DCHECK(rmode_ == RelocInfo::CELL);
260 261 262 263 264 265
  Address address = Memory::Address_at(pc_);
  return Handle<Cell>(reinterpret_cast<Cell**>(address));
}


Cell* RelocInfo::target_cell() {
266
  DCHECK(rmode_ == RelocInfo::CELL);
267 268 269 270 271 272 273
  return Cell::FromValueAddress(Memory::Address_at(pc_));
}


void RelocInfo::set_target_cell(Cell* cell,
                                WriteBarrierMode write_barrier_mode,
                                ICacheFlushMode icache_flush_mode) {
274
  DCHECK(rmode_ == RelocInfo::CELL);
275 276 277
  Address address = cell->address() + Cell::kValueOffset;
  Memory::Address_at(pc_) = address;
  if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
278 279
    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(host(), this,
                                                                  cell);
280 281 282 283 284 285
  }
}


static const int kNoCodeAgeSequenceLength = 9 * Assembler::kInstrSize;

286
Handle<Code> RelocInfo::code_age_stub_handle(Assembler* origin) {
287
  UNREACHABLE();  // This should never be reached on Arm.
288
  return Handle<Code>();
289 290 291 292
}


Code* RelocInfo::code_age_stub() {
293
  DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
294 295 296 297 298 299 300
  return Code::GetCodeFromTargetAddress(
      Assembler::target_address_at(pc_ + Assembler::kInstrSize, host_));
}


void RelocInfo::set_code_age_stub(Code* stub,
                                  ICacheFlushMode icache_flush_mode) {
301
  DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
302 303
  Assembler::set_target_address_at(stub->GetIsolate(),
                                   pc_ + Assembler::kInstrSize, host_,
304 305 306 307
                                   stub->instruction_start());
}


308 309 310 311
Address RelocInfo::debug_call_address() {
  // The pc_ offset of 0 assumes patched debug break slot or return
  // sequence.
  DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
312 313 314
  return Assembler::target_address_at(pc_, host_);
}

315
void RelocInfo::set_debug_call_address(Isolate* isolate, Address target) {
316 317 318
  DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
  // The pc_ offset of 0 assumes patched debug break slot or return
  // sequence.
319
  Assembler::set_target_address_at(isolate, pc_, host_, target);
320
  if (host() != NULL) {
321 322 323
    Code* target_code = Code::GetCodeFromTargetAddress(target);
    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(host(), this,
                                                                  target_code);
324 325 326
  }
}

327
void RelocInfo::WipeOut(Isolate* isolate) {
328 329 330 331 332 333
  DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
         IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
         IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
  if (IsInternalReference(rmode_)) {
    Memory::Address_at(pc_) = NULL;
  } else if (IsInternalReferenceEncoded(rmode_)) {
334
    Assembler::set_target_internal_reference_encoded_at(pc_, nullptr);
335
  } else {
336
    Assembler::set_target_address_at(isolate, pc_, host_, NULL);
337
  }
338 339
}

340
template <typename ObjectVisitor>
341 342 343
void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
  RelocInfo::Mode mode = rmode();
  if (mode == RelocInfo::EMBEDDED_OBJECT) {
344
    visitor->VisitEmbeddedPointer(host(), this);
345
  } else if (RelocInfo::IsCodeTarget(mode)) {
346
    visitor->VisitCodeTarget(host(), this);
347
  } else if (mode == RelocInfo::CELL) {
348
    visitor->VisitCellPointer(host(), this);
349
  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
350
    visitor->VisitExternalReference(host(), this);
351 352
  } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
             mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
353
    visitor->VisitInternalReference(host(), this);
354
  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
355
    visitor->VisitCodeAgeSequence(host(), this);
356
  } else if (RelocInfo::IsDebugBreakSlot(mode) &&
357
             IsPatchedDebugBreakSlotSequence()) {
358
    visitor->VisitDebugTarget(host(), this);
359
  } else if (RelocInfo::IsRuntimeEntry(mode)) {
360
    visitor->VisitRuntimeEntry(host(), this);
361 362 363 364 365 366 367 368 369 370 371 372 373 374 375
  }
}


template<typename StaticVisitor>
void RelocInfo::Visit(Heap* heap) {
  RelocInfo::Mode mode = rmode();
  if (mode == RelocInfo::EMBEDDED_OBJECT) {
    StaticVisitor::VisitEmbeddedPointer(heap, this);
  } else if (RelocInfo::IsCodeTarget(mode)) {
    StaticVisitor::VisitCodeTarget(heap, this);
  } else if (mode == RelocInfo::CELL) {
    StaticVisitor::VisitCell(heap, this);
  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
    StaticVisitor::VisitExternalReference(this);
376 377 378
  } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
             mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
    StaticVisitor::VisitInternalReference(this);
379 380
  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
    StaticVisitor::VisitCodeAgeSequence(heap, this);
381
  } else if (RelocInfo::IsDebugBreakSlot(mode) &&
382
             IsPatchedDebugBreakSlotSequence()) {
383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400
    StaticVisitor::VisitDebugTarget(heap, this);
  } else if (RelocInfo::IsRuntimeEntry(mode)) {
    StaticVisitor::VisitRuntimeEntry(this);
  }
}


// -----------------------------------------------------------------------------
// Assembler.


void Assembler::CheckBuffer() {
  if (buffer_space() <= kGap) {
    GrowBuffer();
  }
}


401 402
void Assembler::CheckTrampolinePoolQuick(int extra_instructions) {
  if (pc_offset() >= next_buffer_check_ - extra_instructions * kInstrSize) {
403 404 405 406 407
    CheckTrampolinePool();
  }
}


408
void Assembler::CheckForEmitInForbiddenSlot() {
409 410 411
  if (!is_buffer_growth_blocked()) {
    CheckBuffer();
  }
412 413 414 415 416 417 418 419 420 421 422 423
  if (IsPrevInstrCompactBranch()) {
    // Nop instruction to preceed a CTI in forbidden slot:
    Instr nop = SPECIAL | SLL;
    *reinterpret_cast<Instr*>(pc_) = nop;
    pc_ += kInstrSize;

    ClearCompactBranchState();
  }
}


void Assembler::EmitHelper(Instr x, CompactBranchType is_compact_branch) {
424 425 426 427 428 429 430 431 432
  if (IsPrevInstrCompactBranch()) {
    if (Instruction::IsForbiddenAfterBranchInstr(x)) {
      // Nop instruction to preceed a CTI in forbidden slot:
      Instr nop = SPECIAL | SLL;
      *reinterpret_cast<Instr*>(pc_) = nop;
      pc_ += kInstrSize;
    }
    ClearCompactBranchState();
  }
433 434
  *reinterpret_cast<Instr*>(pc_) = x;
  pc_ += kInstrSize;
435 436 437
  if (is_compact_branch == CompactBranchType::COMPACT_BRANCH) {
    EmittedCompactBranchInstruction();
  }
438 439 440
  CheckTrampolinePoolQuick();
}

441 442
template <>
inline void Assembler::EmitHelper(uint8_t x);
443

444 445 446 447 448 449 450
template <typename T>
void Assembler::EmitHelper(T x) {
  *reinterpret_cast<T*>(pc_) = x;
  pc_ += sizeof(x);
  CheckTrampolinePoolQuick();
}

451 452 453 454 455 456 457 458
template <>
void Assembler::EmitHelper(uint8_t x) {
  *reinterpret_cast<uint8_t*>(pc_) = x;
  pc_ += sizeof(x);
  if (reinterpret_cast<intptr_t>(pc_) % kInstrSize == 0) {
    CheckTrampolinePoolQuick();
  }
}
459 460

void Assembler::emit(Instr x, CompactBranchType is_compact_branch) {
461 462 463
  if (!is_buffer_growth_blocked()) {
    CheckBuffer();
  }
464 465
  EmitHelper(x, is_compact_branch);
}
466

467 468 469 470

void Assembler::emit(uint64_t data) {
  CheckForEmitInForbiddenSlot();
  EmitHelper(data);
471 472 473
}


474 475
}  // namespace internal
}  // namespace v8
476 477

#endif  // V8_MIPS_ASSEMBLER_MIPS_INL_H_