assembler-x64-inl.h 13.4 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5 6 7
#ifndef V8_X64_ASSEMBLER_X64_INL_H_
#define V8_X64_ASSEMBLER_X64_INL_H_

8
#include "src/x64/assembler-x64.h"
9

10
#include "src/base/cpu.h"
11
#include "src/common/v8memory.h"
12
#include "src/debug/debug.h"
13
#include "src/objects/objects-inl.h"
14

15 16
namespace v8 {
namespace internal {
17

18
bool CpuFeatures::SupportsOptimizer() { return true; }
19

20
bool CpuFeatures::SupportsWasmSimd128() { return IsSupported(SSE4_1); }
21

22 23 24 25
// -----------------------------------------------------------------------------
// Implementation of Assembler


26
void Assembler::emitl(uint32_t x) {
27
  WriteUnalignedValue(reinterpret_cast<Address>(pc_), x);
28 29
  pc_ += sizeof(uint32_t);
}
30

31
void Assembler::emitq(uint64_t x) {
32
  WriteUnalignedValue(reinterpret_cast<Address>(pc_), x);
lrn@chromium.org's avatar
lrn@chromium.org committed
33
  pc_ += sizeof(uint64_t);
34 35
}

36
void Assembler::emitw(uint16_t x) {
37
  WriteUnalignedValue(reinterpret_cast<Address>(pc_), x);
38 39 40
  pc_ += sizeof(uint16_t);
}

41
void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) {
42
  DCHECK(RelocInfo::IsRuntimeEntry(rmode));
43
  RecordRelocInfo(rmode);
44
  emitl(static_cast<uint32_t>(entry - options().code_range_start));
45 46
}

47 48 49 50 51 52
void Assembler::emit(Immediate x) {
  if (!RelocInfo::IsNone(x.rmode_)) {
    RecordRelocInfo(x.rmode_);
  }
  emitl(x.value_);
}
53

54 55 56 57 58 59 60
void Assembler::emit(Immediate64 x) {
  if (!RelocInfo::IsNone(x.rmode_)) {
    RecordRelocInfo(x.rmode_);
  }
  emitq(static_cast<uint64_t>(x.value_));
}

61
void Assembler::emit_rex_64(Register reg, Register rm_reg) {
62
  emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
63 64
}

65 66 67 68
void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
  emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
}

69 70 71 72
void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
  emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
}

73 74 75 76
void Assembler::emit_rex_64(XMMRegister reg, XMMRegister rm_reg) {
  emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
}

77
void Assembler::emit_rex_64(Register reg, Operand op) {
78
  emit(0x48 | reg.high_bit() << 2 | op.data().rex);
79 80
}

81
void Assembler::emit_rex_64(XMMRegister reg, Operand op) {
82
  emit(0x48 | (reg.code() & 0x8) >> 1 | op.data().rex);
83 84 85
}


86
void Assembler::emit_rex_64(Register rm_reg) {
87
  DCHECK_EQ(rm_reg.code() & 0xf, rm_reg.code());
88
  emit(0x48 | rm_reg.high_bit());
89 90
}

91
void Assembler::emit_rex_64(Operand op) { emit(0x48 | op.data().rex); }
92

93
void Assembler::emit_rex_32(Register reg, Register rm_reg) {
94
  emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
95 96
}

97
void Assembler::emit_rex_32(Register reg, Operand op) {
98
  emit(0x40 | reg.high_bit() << 2 | op.data().rex);
99 100 101
}


102
void Assembler::emit_rex_32(Register rm_reg) {
103
  emit(0x40 | rm_reg.high_bit());
104 105
}

106
void Assembler::emit_rex_32(Operand op) { emit(0x40 | op.data().rex); }
107

108
void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
109
  byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
110
  if (rex_bits != 0) emit(0x40 | rex_bits);
111 112
}

113
void Assembler::emit_optional_rex_32(Register reg, Operand op) {
114
  byte rex_bits = reg.high_bit() << 2 | op.data().rex;
115 116 117
  if (rex_bits != 0) emit(0x40 | rex_bits);
}

118
void Assembler::emit_optional_rex_32(XMMRegister reg, Operand op) {
119
  byte rex_bits = (reg.code() & 0x8) >> 1 | op.data().rex;
120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135
  if (rex_bits != 0) emit(0x40 | rex_bits);
}


void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
  byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
  if (rex_bits != 0) emit(0x40 | rex_bits);
}


void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
  byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
  if (rex_bits != 0) emit(0x40 | rex_bits);
}


136 137 138 139 140 141
void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
  byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
  if (rex_bits != 0) emit(0x40 | rex_bits);
}


142
void Assembler::emit_optional_rex_32(Register rm_reg) {
143
  if (rm_reg.high_bit()) emit(0x41);
144 145
}

146 147
void Assembler::emit_optional_rex_32(XMMRegister rm_reg) {
  if (rm_reg.high_bit()) emit(0x41);
148 149
}

150
void Assembler::emit_optional_rex_32(Operand op) {
151
  if (op.data().rex != 0) emit(0x40 | op.data().rex);
152 153 154
}


155 156 157
// byte 1 of 3-byte VEX
void Assembler::emit_vex3_byte1(XMMRegister reg, XMMRegister rm,
                                LeadingOpcode m) {
158
  byte rxb = static_cast<byte>(~((reg.high_bit() << 2) | rm.high_bit())) << 5;
159 160 161 162 163
  emit(rxb | m);
}


// byte 1 of 3-byte VEX
164
void Assembler::emit_vex3_byte1(XMMRegister reg, Operand rm, LeadingOpcode m) {
165
  byte rxb = static_cast<byte>(~((reg.high_bit() << 2) | rm.data().rex)) << 5;
166 167 168 169 170 171 172
  emit(rxb | m);
}


// byte 1 of 2-byte VEX
void Assembler::emit_vex2_byte1(XMMRegister reg, XMMRegister v, VectorLength l,
                                SIMDPrefix pp) {
173
  byte rv = static_cast<byte>(~((reg.high_bit() << 4) | v.code())) << 3;
174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198
  emit(rv | l | pp);
}


// byte 2 of 3-byte VEX
void Assembler::emit_vex3_byte2(VexW w, XMMRegister v, VectorLength l,
                                SIMDPrefix pp) {
  emit(w | ((~v.code() & 0xf) << 3) | l | pp);
}


void Assembler::emit_vex_prefix(XMMRegister reg, XMMRegister vreg,
                                XMMRegister rm, VectorLength l, SIMDPrefix pp,
                                LeadingOpcode mm, VexW w) {
  if (rm.high_bit() || mm != k0F || w != kW0) {
    emit_vex3_byte0();
    emit_vex3_byte1(reg, rm, mm);
    emit_vex3_byte2(w, vreg, l, pp);
  } else {
    emit_vex2_byte0();
    emit_vex2_byte1(reg, vreg, l, pp);
  }
}


199 200 201
void Assembler::emit_vex_prefix(Register reg, Register vreg, Register rm,
                                VectorLength l, SIMDPrefix pp, LeadingOpcode mm,
                                VexW w) {
202 203 204
  XMMRegister ireg = XMMRegister::from_code(reg.code());
  XMMRegister ivreg = XMMRegister::from_code(vreg.code());
  XMMRegister irm = XMMRegister::from_code(rm.code());
205 206 207
  emit_vex_prefix(ireg, ivreg, irm, l, pp, mm, w);
}

208 209 210
void Assembler::emit_vex_prefix(XMMRegister reg, XMMRegister vreg, Operand rm,
                                VectorLength l, SIMDPrefix pp, LeadingOpcode mm,
                                VexW w) {
211
  if (rm.data().rex || mm != k0F || w != kW0) {
212 213 214 215 216 217 218 219 220
    emit_vex3_byte0();
    emit_vex3_byte1(reg, rm, mm);
    emit_vex3_byte2(w, vreg, l, pp);
  } else {
    emit_vex2_byte0();
    emit_vex2_byte1(reg, vreg, l, pp);
  }
}

221
void Assembler::emit_vex_prefix(Register reg, Register vreg, Operand rm,
222 223
                                VectorLength l, SIMDPrefix pp, LeadingOpcode mm,
                                VexW w) {
224 225
  XMMRegister ireg = XMMRegister::from_code(reg.code());
  XMMRegister ivreg = XMMRegister::from_code(vreg.code());
226 227 228 229
  emit_vex_prefix(ireg, ivreg, rm, l, pp, mm, w);
}


230
Address Assembler::target_address_at(Address pc, Address constant_pool) {
231
  return ReadUnalignedValue<int32_t>(pc) + pc + 4;
232 233
}

234 235
void Assembler::set_target_address_at(Address pc, Address constant_pool,
                                      Address target,
236
                                      ICacheFlushMode icache_flush_mode) {
237
  WriteUnalignedValue(pc, static_cast<int32_t>(target - pc - 4));
238
  if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
239
    FlushInstructionCache(pc, sizeof(int32_t));
240
  }
241 242
}

243
void Assembler::deserialization_set_target_internal_reference_at(
244
    Address pc, Address target, RelocInfo::Mode mode) {
245
  WriteUnalignedValue(pc, target);
246 247
}

248
void Assembler::deserialization_set_special_target_at(
249
    Address instruction_payload, Code code, Address target) {
250
  set_target_address_at(instruction_payload,
251
                        !code.is_null() ? code.constant_pool() : kNullAddress,
252
                        target);
253
}
254

255 256 257 258 259
int Assembler::deserialization_special_target_size(
    Address instruction_payload) {
  return kSpecialTargetSize;
}

260
Handle<Code> Assembler::code_target_object_handle_at(Address pc) {
261
  return GetCodeTarget(ReadUnalignedValue<int32_t>(pc));
262
}
263

264 265 266 267
Handle<HeapObject> Assembler::compressed_embedded_object_handle_at(Address pc) {
  return GetCompressedEmbeddedObject(ReadUnalignedValue<int32_t>(pc));
}

268
Address Assembler::runtime_entry_at(Address pc) {
269
  return ReadUnalignedValue<int32_t>(pc) + options().code_range_start;
270 271
}

272 273 274
// -----------------------------------------------------------------------------
// Implementation of RelocInfo

275
// The modes possibly affected by apply must be in kApplyMask.
276 277
void RelocInfo::apply(intptr_t delta) {
  if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
278 279
    WriteUnalignedValue(
        pc_, ReadUnalignedValue<int32_t>(pc_) - static_cast<int32_t>(delta));
280
  } else if (IsInternalReference(rmode_)) {
281 282
    // Absolute code pointer inside code object moves with the code object.
    WriteUnalignedValue(pc_, ReadUnalignedValue<Address>(pc_) + delta);
283 284 285 286
  }
}


287
Address RelocInfo::target_address() {
288
  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_));
289
  return Assembler::target_address_at(pc_, constant_pool_);
290 291 292
}

Address RelocInfo::target_address_address() {
293
  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_) ||
294 295 296
         IsWasmStubCall(rmode_) || IsFullEmbeddedObject(rmode_) ||
         IsCompressedEmbeddedObject(rmode_) || IsExternalReference(rmode_) ||
         IsOffHeapTarget(rmode_));
297
  return pc_;
298 299 300
}


301 302 303 304 305
Address RelocInfo::constant_pool_entry_address() {
  UNREACHABLE();
}


306 307
int RelocInfo::target_address_size() {
  if (IsCodedSpecially()) {
308
    return Assembler::kSpecialTargetSize;
309
  } else {
310 311
    return IsCompressedEmbeddedObject(rmode_) ? kTaggedSize
                                              : kSystemPointerSize;
312 313 314
  }
}

315
HeapObject RelocInfo::target_object() {
316 317 318 319 320 321 322 323 324 325 326 327 328
  DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_));
  if (IsCompressedEmbeddedObject(rmode_)) {
    CHECK(!host_.is_null());
    Object o = static_cast<Object>(DecompressTaggedPointer(
        host_.ptr(), ReadUnalignedValue<Tagged_t>(pc_)));
    return HeapObject::cast(o);
  }
  return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_)));
}

HeapObject RelocInfo::target_object_no_host(Isolate* isolate) {
  DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_));
  if (IsCompressedEmbeddedObject(rmode_)) {
329 330 331 332
    Tagged_t compressed = ReadUnalignedValue<Tagged_t>(pc_);
    DCHECK(!HAS_SMI_TAG(compressed));
    Object obj(DecompressTaggedPointer(isolate, compressed));
    return HeapObject::cast(obj);
333
  }
334
  return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_)));
335 336
}

337
Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
338 339
  DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_));
  if (IsCodeTarget(rmode_)) {
340
    return origin->code_target_object_handle_at(pc_);
341 342 343 344 345
  } else {
    if (IsCompressedEmbeddedObject(rmode_)) {
      return origin->compressed_embedded_object_handle_at(pc_);
    }
    return Handle<HeapObject>::cast(ReadUnalignedValue<Handle<Object>>(pc_));
346
  }
347 348
}

349
Address RelocInfo::target_external_reference() {
350
  DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
351
  return ReadUnalignedValue<Address>(pc_);
352 353
}

354 355 356
void RelocInfo::set_target_external_reference(
    Address target, ICacheFlushMode icache_flush_mode) {
  DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
357
  WriteUnalignedValue(pc_, target);
358
  if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
359
    FlushInstructionCache(pc_, sizeof(Address));
360 361
  }
}
362

363 364
Address RelocInfo::target_internal_reference() {
  DCHECK(rmode_ == INTERNAL_REFERENCE);
365
  return ReadUnalignedValue<Address>(pc_);
366 367 368
}


369
Address RelocInfo::target_internal_reference_address() {
370
  DCHECK(rmode_ == INTERNAL_REFERENCE);
371
  return pc_;
372 373
}

374
void RelocInfo::set_target_object(Heap* heap, HeapObject target,
375 376
                                  WriteBarrierMode write_barrier_mode,
                                  ICacheFlushMode icache_flush_mode) {
377 378
  DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_));
  if (IsCompressedEmbeddedObject(rmode_)) {
379 380
    DCHECK(COMPRESS_POINTERS_BOOL);
    Tagged_t tagged = CompressTagged(target.ptr());
381 382
    WriteUnalignedValue(pc_, tagged);
  } else {
383
    WriteUnalignedValue(pc_, target.ptr());
384
  }
385
  if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
386
    FlushInstructionCache(pc_, sizeof(Address));
387
  }
388
  if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null()) {
389
    WriteBarrierForCode(host(), this, target);
390
  }
391 392
}

393
Address RelocInfo::target_runtime_entry(Assembler* origin) {
394
  DCHECK(IsRuntimeEntry(rmode_));
395 396 397
  return origin->runtime_entry_at(pc_);
}

398
void RelocInfo::set_target_runtime_entry(Address target,
399 400
                                         WriteBarrierMode write_barrier_mode,
                                         ICacheFlushMode icache_flush_mode) {
401
  DCHECK(IsRuntimeEntry(rmode_));
402
  if (target_address() != target) {
403
    set_target_address(target, write_barrier_mode, icache_flush_mode);
404
  }
405 406
}

407 408
Address RelocInfo::target_off_heap_target() {
  DCHECK(IsOffHeapTarget(rmode_));
409
  return ReadUnalignedValue<Address>(pc_);
410 411
}

412
void RelocInfo::WipeOut() {
413
  if (IsFullEmbeddedObject(rmode_) || IsExternalReference(rmode_) ||
414
      IsInternalReference(rmode_) || IsOffHeapTarget(rmode_)) {
415
    WriteUnalignedValue(pc_, kNullAddress);
416 417 418
  } else if (IsCompressedEmbeddedObject(rmode_)) {
    Address smi_address = Smi::FromInt(0).ptr();
    WriteUnalignedValue(pc_, CompressTagged(smi_address));
419 420
  } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
    // Effectively write zero into the relocation.
421
    Assembler::set_target_address_at(pc_, constant_pool_,
422
                                     pc_ + sizeof(int32_t));
423 424 425 426 427
  } else {
    UNREACHABLE();
  }
}

428 429
}  // namespace internal
}  // namespace v8
430 431

#endif  // V8_X64_ASSEMBLER_X64_INL_H_