assembler-arm-inl.h 17.2 KB
Newer Older
1 2 3
// Copyright (c) 1994-2006 Sun Microsystems Inc.
// All Rights Reserved.
//
4
// Redistribution and use in source and binary forms, with or without
5 6 7 8 9 10 11 12 13 14
// modification, are permitted provided that the following conditions
// are met:
//
// - Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// - Redistribution in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the
// distribution.
15
//
16 17 18
// - Neither the name of Sun Microsystems or the names of contributors may
// be used to endorse or promote products derived from this software without
// specific prior written permission.
19 20 21
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 23 24 25 26 27 28 29 30 31 32 33 34
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
// OF THE POSSIBILITY OF SUCH DAMAGE.

// The original source code covered by the above license above has been modified
// significantly by Google Inc.
35
// Copyright 2012 the V8 project authors. All rights reserved.
36

37 38
#ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
#define V8_ARM_ASSEMBLER_ARM_INL_H_
39

40
#include "arm/assembler-arm.h"
41

42
#include "cpu.h"
43
#include "debug.h"
44 45


46 47
namespace v8 {
namespace internal {
48 49


50 51 52 53 54 55 56 57 58 59 60
int Register::NumAllocatableRegisters() {
  if (CpuFeatures::IsSupported(VFP2)) {
    return kMaxNumAllocatableRegisters;
  } else {
    return kMaxNumAllocatableRegisters - kGPRsPerNonVFP2Double;
  }
}


int DwVfpRegister::NumRegisters() {
  if (CpuFeatures::IsSupported(VFP2)) {
61
    return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
62 63 64 65 66 67 68 69
  } else {
    return 1;
  }
}


int DwVfpRegister::NumAllocatableRegisters() {
  if (CpuFeatures::IsSupported(VFP2)) {
70
    return NumRegisters() - kNumReservedRegisters;
71 72 73 74 75 76
  } else {
    return 1;
  }
}


77 78 79
int DwVfpRegister::ToAllocationIndex(DwVfpRegister reg) {
  ASSERT(!reg.is(kDoubleRegZero));
  ASSERT(!reg.is(kScratchDoubleReg));
80 81 82
  if (reg.code() > kDoubleRegZero.code()) {
    return reg.code() - kNumReservedRegisters;
  }
83 84 85 86
  return reg.code();
}


87 88 89 90 91 92 93 94 95 96 97
DwVfpRegister DwVfpRegister::FromAllocationIndex(int index) {
  ASSERT(index >= 0 && index < NumAllocatableRegisters());
  ASSERT(kScratchDoubleReg.code() - kDoubleRegZero.code() ==
         kNumReservedRegisters - 1);
  if (index >= kDoubleRegZero.code()) {
    return from_code(index + kNumReservedRegisters);
  }
  return from_code(index);
}


98
void RelocInfo::apply(intptr_t delta) {
99
  if (RelocInfo::IsInternalReference(rmode_)) {
100 101 102 103 104 105
    // absolute code pointer inside code object moves with the code object.
    int32_t* p = reinterpret_cast<int32_t*>(pc_);
    *p += delta;  // relocate entry
  }
  // We do not use pc relative addressing on ARM, so there is
  // nothing else to do.
106 107 108 109
}


Address RelocInfo::target_address() {
110
  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
111 112 113 114
  return Assembler::target_address_at(pc_);
}


115
Address RelocInfo::target_address_address() {
116
  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
117 118
                              || rmode_ == EMBEDDED_OBJECT
                              || rmode_ == EXTERNAL_REFERENCE);
119
  return reinterpret_cast<Address>(Assembler::target_pointer_address_at(pc_));
120 121 122
}


123
int RelocInfo::target_address_size() {
124
  return kPointerSize;
125 126 127
}


128
void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
129
  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
130
  Assembler::set_target_address_at(pc_, target);
131
  if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
132 133 134 135
    Object* target_code = Code::GetCodeFromTargetAddress(target);
    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
        host(), this, HeapObject::cast(target_code));
  }
136 137 138 139
}


Object* RelocInfo::target_object() {
140
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
141
  return reinterpret_cast<Object*>(Assembler::target_pointer_at(pc_));
142 143 144
}


145
Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
146
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
147 148
  return Handle<Object>(reinterpret_cast<Object**>(
      Assembler::target_pointer_at(pc_)));
149 150 151 152
}


Object** RelocInfo::target_object_address() {
153 154
  // Provide a "natural pointer" to the embedded object,
  // which can be de-referenced during heap iteration.
155
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
156 157 158
  reconstructed_obj_ptr_ =
      reinterpret_cast<Object*>(Assembler::target_pointer_at(pc_));
  return &reconstructed_obj_ptr_;
159 160 161
}


162
void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
163
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
164
  Assembler::set_target_pointer_at(pc_, reinterpret_cast<Address>(target));
165 166 167
  if (mode == UPDATE_WRITE_BARRIER &&
      host() != NULL &&
      target->IsHeapObject()) {
168 169 170
    host()->GetHeap()->incremental_marking()->RecordWrite(
        host(), &Memory::Object_at(pc_), HeapObject::cast(target));
  }
171 172 173 174
}


Address* RelocInfo::target_reference_address() {
175
  ASSERT(rmode_ == EXTERNAL_REFERENCE);
176 177
  reconstructed_adr_ptr_ = Assembler::target_address_at(pc_);
  return &reconstructed_adr_ptr_;
178 179 180
}


181 182 183 184 185 186 187 188 189 190 191 192 193
Address RelocInfo::target_runtime_entry(Assembler* origin) {
  ASSERT(IsRuntimeEntry(rmode_));
  return target_address();
}


void RelocInfo::set_target_runtime_entry(Address target,
                                         WriteBarrierMode mode) {
  ASSERT(IsRuntimeEntry(rmode_));
  if (target_address() != target) set_target_address(target, mode);
}


194 195 196 197 198 199 200 201 202 203
Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
  ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
  Address address = Memory::Address_at(pc_);
  return Handle<JSGlobalPropertyCell>(
      reinterpret_cast<JSGlobalPropertyCell**>(address));
}


JSGlobalPropertyCell* RelocInfo::target_cell() {
  ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
204
  return JSGlobalPropertyCell::FromValueAddress(Memory::Address_at(pc_));
205 206 207
}


208 209
void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
                                WriteBarrierMode mode) {
210 211 212
  ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
  Address address = cell->address() + JSGlobalPropertyCell::kValueOffset;
  Memory::Address_at(pc_) = address;
213
  if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
214 215 216 217 218
    // TODO(1550) We are passing NULL as a slot because cell can never be on
    // evacuation candidate.
    host()->GetHeap()->incremental_marking()->RecordWrite(
        host(), NULL, cell);
  }
219 220 221
}


222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239
static const int kNoCodeAgeSequenceLength = 3;

Code* RelocInfo::code_age_stub() {
  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
  return Code::GetCodeFromTargetAddress(
      Memory::Address_at(pc_ + Assembler::kInstrSize *
                         (kNoCodeAgeSequenceLength - 1)));
}


void RelocInfo::set_code_age_stub(Code* stub) {
  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
  Memory::Address_at(pc_ + Assembler::kInstrSize *
                     (kNoCodeAgeSequenceLength - 1)) =
      stub->instruction_start();
}


240
Address RelocInfo::call_address() {
241 242 243 244
  // The 2 instructions offset assumes patched debug break slot or return
  // sequence.
  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
245
  return Memory::Address_at(pc_ + 2 * Assembler::kInstrSize);
246 247 248 249
}


void RelocInfo::set_call_address(Address target) {
250 251
  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
252
  Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target;
253 254 255 256 257
  if (host() != NULL) {
    Object* target_code = Code::GetCodeFromTargetAddress(target);
    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
        host(), this, HeapObject::cast(target_code));
  }
258 259 260 261
}


Object* RelocInfo::call_object() {
262
  return *call_object_address();
263 264 265
}


266 267
void RelocInfo::set_call_object(Object* target) {
  *call_object_address() = target;
268 269 270
}


271 272 273 274
Object** RelocInfo::call_object_address() {
  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
  return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
275 276 277
}


278
bool RelocInfo::IsPatchedReturnSequence() {
279 280 281 282 283 284 285 286 287 288 289 290 291 292 293
  Instr current_instr = Assembler::instr_at(pc_);
  Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize);
#ifdef USE_BLX
  // A patched return sequence is:
  //  ldr ip, [pc, #0]
  //  blx ip
  return ((current_instr & kLdrPCMask) == kLdrPCPattern)
          && ((next_instr & kBlxRegMask) == kBlxRegPattern);
#else
  // A patched return sequence is:
  //  mov lr, pc
  //  ldr pc, [pc, #-4]
  return (current_instr == kMovLrPc)
          && ((next_instr & kLdrPCMask) == kLdrPCPattern);
#endif
294 295 296
}


297 298
bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
  Instr current_instr = Assembler::instr_at(pc_);
299
  return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
300 301 302
}


303 304 305
void RelocInfo::Visit(ObjectVisitor* visitor) {
  RelocInfo::Mode mode = rmode();
  if (mode == RelocInfo::EMBEDDED_OBJECT) {
306
    visitor->VisitEmbeddedPointer(this);
307 308
  } else if (RelocInfo::IsCodeTarget(mode)) {
    visitor->VisitCodeTarget(this);
309 310
  } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
    visitor->VisitGlobalPropertyCell(this);
311
  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
312
    visitor->VisitExternalReference(this);
313 314
  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
    visitor->VisitCodeAgeSequence(this);
315
#ifdef ENABLE_DEBUGGER_SUPPORT
316 317
  // TODO(isolates): Get a cached isolate below.
  } else if (((RelocInfo::IsJSReturn(mode) &&
318 319
              IsPatchedReturnSequence()) ||
             (RelocInfo::IsDebugBreakSlot(mode) &&
320 321
              IsPatchedDebugBreakSlotSequence())) &&
             Isolate::Current()->debug()->has_break_points()) {
322 323
    visitor->VisitDebugTarget(this);
#endif
324
  } else if (RelocInfo::IsRuntimeEntry(mode)) {
325 326 327 328 329
    visitor->VisitRuntimeEntry(this);
  }
}


330
template<typename StaticVisitor>
331
void RelocInfo::Visit(Heap* heap) {
332 333
  RelocInfo::Mode mode = rmode();
  if (mode == RelocInfo::EMBEDDED_OBJECT) {
334
    StaticVisitor::VisitEmbeddedPointer(heap, this);
335
  } else if (RelocInfo::IsCodeTarget(mode)) {
336
    StaticVisitor::VisitCodeTarget(heap, this);
337
  } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
338
    StaticVisitor::VisitGlobalPropertyCell(heap, this);
339
  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
340
    StaticVisitor::VisitExternalReference(this);
341 342
  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
    StaticVisitor::VisitCodeAgeSequence(heap, this);
343
#ifdef ENABLE_DEBUGGER_SUPPORT
344
  } else if (heap->isolate()->debug()->has_break_points() &&
345 346 347 348
             ((RelocInfo::IsJSReturn(mode) &&
              IsPatchedReturnSequence()) ||
             (RelocInfo::IsDebugBreakSlot(mode) &&
              IsPatchedDebugBreakSlotSequence()))) {
349
    StaticVisitor::VisitDebugTarget(heap, this);
350
#endif
351
  } else if (RelocInfo::IsRuntimeEntry(mode)) {
352 353 354 355 356
    StaticVisitor::VisitRuntimeEntry(this);
  }
}


357
Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
358 359 360 361 362 363 364 365 366
  rm_ = no_reg;
  imm32_ = immediate;
  rmode_ = rmode;
}


Operand::Operand(const ExternalReference& f)  {
  rm_ = no_reg;
  imm32_ = reinterpret_cast<int32_t>(f.address());
367
  rmode_ = RelocInfo::EXTERNAL_REFERENCE;
368 369 370 371 372 373
}


Operand::Operand(Smi* value) {
  rm_ = no_reg;
  imm32_ =  reinterpret_cast<intptr_t>(value);
374
  rmode_ = RelocInfo::NONE32;
375 376 377 378 379 380 381 382 383 384 385
}


Operand::Operand(Register rm) {
  rm_ = rm;
  rs_ = no_reg;
  shift_op_ = LSL;
  shift_imm_ = 0;
}


386 387 388 389 390 391 392 393
bool Operand::is_reg() const {
  return rm_.is_valid() &&
         rs_.is(no_reg) &&
         shift_op_ == LSL &&
         shift_imm_ == 0;
}


394 395 396 397
void Assembler::CheckBuffer() {
  if (buffer_space() <= kGap) {
    GrowBuffer();
  }
lrn@chromium.org's avatar
lrn@chromium.org committed
398
  if (pc_offset() >= next_buffer_check_) {
399 400 401 402 403 404 405 406 407 408 409 410
    CheckConstPool(false, true);
  }
}


void Assembler::emit(Instr x) {
  CheckBuffer();
  *reinterpret_cast<Instr*>(pc_) = x;
  pc_ += kInstrSize;
}


411
Address Assembler::target_pointer_address_at(Address pc) {
412 413 414 415 416 417 418 419 420 421
  Address target_pc = pc;
  Instr instr = Memory::int32_at(target_pc);
  // If we have a bx instruction, the instruction before the bx is
  // what we need to patch.
  static const int32_t kBxInstMask = 0x0ffffff0;
  static const int32_t kBxInstPattern = 0x012fff10;
  if ((instr & kBxInstMask) == kBxInstPattern) {
    target_pc -= kInstrSize;
    instr = Memory::int32_at(target_pc);
  }
422 423 424 425 426 427 428 429 430 431

#ifdef USE_BLX
  // If we have a blx instruction, the instruction before it is
  // what needs to be patched.
  if ((instr & kBlxRegMask) == kBlxRegPattern) {
    target_pc -= kInstrSize;
    instr = Memory::int32_at(target_pc);
  }
#endif

432
  ASSERT(IsLdrPcImmediateOffset(instr));
433 434 435 436
  int offset = instr & 0xfff;  // offset_12 is unsigned
  if ((instr & (1 << 23)) == 0) offset = -offset;  // U bit defines offset sign
  // Verify that the constant pool comes after the instruction referencing it.
  ASSERT(offset >= -4);
437
  return target_pc + offset + 8;
438 439 440
}


441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497
Address Assembler::target_pointer_at(Address pc) {
  if (IsMovW(Memory::int32_at(pc))) {
    ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
    Instruction* instr = Instruction::At(pc);
    Instruction* next_instr = Instruction::At(pc + kInstrSize);
    return reinterpret_cast<Address>(
        (next_instr->ImmedMovwMovtValue() << 16) |
        instr->ImmedMovwMovtValue());
  }
  return Memory::Address_at(target_pointer_address_at(pc));
}


Address Assembler::target_address_from_return_address(Address pc) {
  // Returns the address of the call target from the return address that will
  // be returned to after a call.
#ifdef USE_BLX
  // Call sequence on V7 or later is :
  //  movw  ip, #... @ call address low 16
  //  movt  ip, #... @ call address high 16
  //  blx   ip
  //                      @ return address
  // Or pre-V7 or cases that need frequent patching:
  //  ldr   ip, [pc, #...] @ call address
  //  blx   ip
  //                      @ return address
  Address candidate = pc - 2 * Assembler::kInstrSize;
  Instr candidate_instr(Memory::int32_at(candidate));
  if (IsLdrPcImmediateOffset(candidate_instr)) {
    return candidate;
  }
  candidate = pc - 3 * Assembler::kInstrSize;
  ASSERT(IsMovW(Memory::int32_at(candidate)) &&
         IsMovT(Memory::int32_at(candidate + kInstrSize)));
  return candidate;
#else
  // Call sequence is:
  //  mov  lr, pc
  //  ldr  pc, [pc, #...] @ call address
  //                      @ return address
  return pc - kInstrSize;
#endif
}


Address Assembler::return_address_from_call_start(Address pc) {
#ifdef USE_BLX
  if (IsLdrPcImmediateOffset(Memory::int32_at(pc))) {
    return pc + kInstrSize * 2;
  } else {
    ASSERT(IsMovW(Memory::int32_at(pc)));
    ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
    return pc + kInstrSize * 3;
  }
#else
  return pc + kInstrSize;
#endif
498 499 500
}


501 502 503 504 505 506 507 508
void Assembler::deserialization_set_special_target_at(
    Address constant_pool_entry, Address target) {
  Memory::Address_at(constant_pool_entry) = target;
}


void Assembler::set_external_target_at(Address constant_pool_entry,
                                       Address target) {
509 510 511 512
  Memory::Address_at(constant_pool_entry) = target;
}


513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550
static Instr EncodeMovwImmediate(uint32_t immediate) {
  ASSERT(immediate < 0x10000);
  return ((immediate & 0xf000) << 4) | (immediate & 0xfff);
}


void Assembler::set_target_pointer_at(Address pc, Address target) {
  if (IsMovW(Memory::int32_at(pc))) {
    ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
    uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
    uint32_t immediate = reinterpret_cast<uint32_t>(target);
    uint32_t intermediate = instr_ptr[0];
    intermediate &= ~EncodeMovwImmediate(0xFFFF);
    intermediate |= EncodeMovwImmediate(immediate & 0xFFFF);
    instr_ptr[0] = intermediate;
    intermediate = instr_ptr[1];
    intermediate &= ~EncodeMovwImmediate(0xFFFF);
    intermediate |= EncodeMovwImmediate(immediate >> 16);
    instr_ptr[1] = intermediate;
    ASSERT(IsMovW(Memory::int32_at(pc)));
    ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
    CPU::FlushICache(pc, 2 * kInstrSize);
  } else {
    ASSERT(IsLdrPcImmediateOffset(Memory::int32_at(pc)));
    Memory::Address_at(target_pointer_address_at(pc)) = target;
    // Intuitively, we would think it is necessary to always flush the
    // instruction cache after patching a target address in the code as follows:
    //   CPU::FlushICache(pc, sizeof(target));
    // However, on ARM, no instruction is actually patched in the case
    // of embedded constants of the form:
    // ldr   ip, [pc, #...]
    // since the instruction accessing this address in the constant pool remains
    // unchanged.
  }
}


Address Assembler::target_address_at(Address pc) {
551
  return target_pointer_at(pc);
552 553 554
}


555
void Assembler::set_target_address_at(Address pc, Address target) {
556
  set_target_pointer_at(pc, target);
557 558
}

559

560 561
} }  // namespace v8::internal

562
#endif  // V8_ARM_ASSEMBLER_ARM_INL_H_