jump-table-assembler.cc 7.22 KB
Newer Older
1 2 3 4 5 6
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "src/wasm/jump-table-assembler.h"

7 8
#include "src/codegen/assembler-inl.h"
#include "src/codegen/macro-assembler-inl.h"
9 10 11 12 13

namespace v8 {
namespace internal {
namespace wasm {

14 15 16 17 18 19
// The implementation is compact enough to implement it inline here. If it gets
// much bigger, we might want to split it in a separate file per architecture.
#if V8_TARGET_ARCH_X64
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
                                                 Address lazy_compile_target) {
  // Use a push, because mov to an extended register takes 6 bytes.
20 21
  pushq(Immediate(func_index));       // max 5 bytes
  EmitJumpSlot(lazy_compile_target);  // always 5 bytes
22 23
}

24 25 26 27
void JumpTableAssembler::EmitRuntimeStubSlot(Address builtin_target) {
  JumpToInstructionStream(builtin_target);
}

28
void JumpTableAssembler::EmitJumpSlot(Address target) {
29 30 31 32 33 34
  // On x64, all code is allocated within a single code section, so we can use
  // relative jumps.
  static_assert(kMaxWasmCodeMemory <= size_t{2} * GB, "can use relative jump");
  intptr_t displacement = static_cast<intptr_t>(
      reinterpret_cast<byte*>(target) - pc_ - kNearJmpInstrSize);
  near_jmp(displacement, RelocInfo::NONE);
35 36 37 38 39 40 41 42 43 44
}

void JumpTableAssembler::NopBytes(int bytes) {
  DCHECK_LE(0, bytes);
  Nop(bytes);
}

#elif V8_TARGET_ARCH_IA32
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
                                                 Address lazy_compile_target) {
45
  mov(kWasmCompileLazyFuncIndexRegister, func_index);  // 5 bytes
46 47 48
  jmp(lazy_compile_target, RelocInfo::NONE);  // 5 bytes
}

49 50 51 52
void JumpTableAssembler::EmitRuntimeStubSlot(Address builtin_target) {
  JumpToInstructionStream(builtin_target);
}

53 54 55 56 57 58 59 60 61 62 63 64
void JumpTableAssembler::EmitJumpSlot(Address target) {
  jmp(target, RelocInfo::NONE);
}

void JumpTableAssembler::NopBytes(int bytes) {
  DCHECK_LE(0, bytes);
  Nop(bytes);
}

#elif V8_TARGET_ARCH_ARM
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
                                                 Address lazy_compile_target) {
65
  // Load function index to a register.
66 67
  // This generates [movw, movt] on ARMv7 and later, [ldr, constant pool marker,
  // constant] on ARMv6.
68
  Move32BitImmediate(kWasmCompileLazyFuncIndexRegister, Operand(func_index));
69 70 71 72 73 74
  // EmitJumpSlot emits either [b], [movw, movt, mov] (ARMv7+), or [ldr,
  // constant].
  // In total, this is <=5 instructions on all architectures.
  // TODO(arm): Optimize this for code size; lazy compile is not performance
  // critical, as it's only executed once per function.
  EmitJumpSlot(lazy_compile_target);
75 76
}

77 78 79 80 81
void JumpTableAssembler::EmitRuntimeStubSlot(Address builtin_target) {
  JumpToInstructionStream(builtin_target);
  CheckConstPool(true, false);  // force emit of const pool
}

82
void JumpTableAssembler::EmitJumpSlot(Address target) {
83 84 85
  // Note that {Move32BitImmediate} emits [ldr, constant] for the relocation
  // mode used below, we need this to allow concurrent patching of this slot.
  Move32BitImmediate(pc, Operand(target, RelocInfo::WASM_CALL));
86
  CheckConstPool(true, false);  // force emit of const pool
87 88 89 90 91 92 93 94 95 96 97 98 99
}

void JumpTableAssembler::NopBytes(int bytes) {
  DCHECK_LE(0, bytes);
  DCHECK_EQ(0, bytes % kInstrSize);
  for (; bytes > 0; bytes -= kInstrSize) {
    nop();
  }
}

#elif V8_TARGET_ARCH_ARM64
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
                                                 Address lazy_compile_target) {
100
  Mov(kWasmCompileLazyFuncIndexRegister.W(), func_index);  // max. 2 instr
101 102 103
  Jump(lazy_compile_target, RelocInfo::NONE);  // 1 instr
}

104 105 106 107 108
void JumpTableAssembler::EmitRuntimeStubSlot(Address builtin_target) {
  JumpToInstructionStream(builtin_target);
  CheckConstPool(true, false);  // force emit of const pool
}

109
void JumpTableAssembler::EmitJumpSlot(Address target) {
110 111 112
  // TODO(wasm): Currently this is guaranteed to be a {near_call} and hence is
  // patchable concurrently. Once {kMaxWasmCodeMemory} is raised on ARM64, make
  // sure concurrent patching is still supported.
113 114 115
  DCHECK(TurboAssembler::IsNearCallOffset(
      (reinterpret_cast<byte*>(target) - pc_) / kInstrSize));

116 117 118 119 120
  Jump(target, RelocInfo::NONE);
}

void JumpTableAssembler::NopBytes(int bytes) {
  DCHECK_LE(0, bytes);
121 122
  DCHECK_EQ(0, bytes % kInstrSize);
  for (; bytes > 0; bytes -= kInstrSize) {
123 124 125 126
    nop();
  }
}

127
#elif V8_TARGET_ARCH_S390X
128 129 130
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
                                                 Address lazy_compile_target) {
  // Load function index to r7. 6 bytes
131
  lgfi(kWasmCompileLazyFuncIndexRegister, Operand(func_index));
132 133 134 135 136
  // Jump to {lazy_compile_target}. 6 bytes or 12 bytes
  mov(r1, Operand(lazy_compile_target));
  b(r1);  // 2 bytes
}

137 138 139 140
void JumpTableAssembler::EmitRuntimeStubSlot(Address builtin_target) {
  JumpToInstructionStream(builtin_target);
}

141 142 143 144 145 146 147 148 149 150 151 152 153
void JumpTableAssembler::EmitJumpSlot(Address target) {
  mov(r1, Operand(target));
  b(r1);
}

void JumpTableAssembler::NopBytes(int bytes) {
  DCHECK_LE(0, bytes);
  DCHECK_EQ(0, bytes % 2);
  for (; bytes > 0; bytes -= 2) {
    nop(0);
  }
}

154 155 156
#elif V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
                                                 Address lazy_compile_target) {
157
  li(kWasmCompileLazyFuncIndexRegister, func_index);  // max. 2 instr
158 159 160 161 162
  // Jump produces max. 4 instructions for 32-bit platform
  // and max. 6 instructions for 64-bit platform.
  Jump(lazy_compile_target, RelocInfo::NONE);
}

Yu Yin's avatar
Yu Yin committed
163 164 165 166
void JumpTableAssembler::EmitRuntimeStubSlot(Address builtin_target) {
  JumpToInstructionStream(builtin_target);
}

167 168 169 170 171 172 173 174 175 176 177 178
void JumpTableAssembler::EmitJumpSlot(Address target) {
  Jump(target, RelocInfo::NONE);
}

void JumpTableAssembler::NopBytes(int bytes) {
  DCHECK_LE(0, bytes);
  DCHECK_EQ(0, bytes % kInstrSize);
  for (; bytes > 0; bytes -= kInstrSize) {
    nop();
  }
}

179
#elif V8_TARGET_ARCH_PPC64
180 181
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
                                                 Address lazy_compile_target) {
182 183
  // Load function index to register. max 5 instrs
  mov(kWasmCompileLazyFuncIndexRegister, Operand(func_index));
184 185 186 187 188 189
  // Jump to {lazy_compile_target}. max 5 instrs
  mov(r0, Operand(lazy_compile_target));
  mtctr(r0);
  bctr();
}

190 191 192 193
void JumpTableAssembler::EmitRuntimeStubSlot(Address builtin_target) {
  JumpToInstructionStream(builtin_target);
}

194 195 196 197 198 199 200 201 202 203 204 205 206 207
void JumpTableAssembler::EmitJumpSlot(Address target) {
  mov(r0, Operand(target));
  mtctr(r0);
  bctr();
}

void JumpTableAssembler::NopBytes(int bytes) {
  DCHECK_LE(0, bytes);
  DCHECK_EQ(0, bytes % 4);
  for (; bytes > 0; bytes -= 4) {
    nop(0);
  }
}

208 209 210 211 212 213
#else
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
                                                 Address lazy_compile_target) {
  UNIMPLEMENTED();
}

214 215 216 217
void JumpTableAssembler::EmitRuntimeStubSlot(Address builtin_target) {
  UNIMPLEMENTED();
}

218 219 220 221 222 223 224 225
void JumpTableAssembler::EmitJumpSlot(Address target) { UNIMPLEMENTED(); }

void JumpTableAssembler::NopBytes(int bytes) {
  DCHECK_LE(0, bytes);
  UNIMPLEMENTED();
}
#endif

226 227 228
}  // namespace wasm
}  // namespace internal
}  // namespace v8