builtins.cc 17.9 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5
#include "src/builtins/builtins.h"
6

7
#include "src/api/api-inl.h"
8
#include "src/builtins/builtins-descriptors.h"
9
#include "src/codegen/assembler-inl.h"
10
#include "src/codegen/callable.h"
11
#include "src/codegen/macro-assembler-inl.h"
12 13
#include "src/codegen/macro-assembler.h"
#include "src/diagnostics/code-tracer.h"
14
#include "src/execution/isolate.h"
15 16 17
#include "src/interpreter/bytecodes.h"
#include "src/logging/code-events.h"  // For CodeCreateEvent.
#include "src/logging/log.h"          // For Logger.
18
#include "src/objects/fixed-array.h"
19
#include "src/objects/objects-inl.h"
20
#include "src/objects/visitors.h"
21
#include "src/snapshot/embedded/embedded-data.h"
22
#include "src/utils/ostreams.h"
23

24 25
namespace v8 {
namespace internal {
26

27 28
// Forward declarations for C++ builtins.
#define FORWARD_DECLARE(Name) \
29
  Address Builtin_##Name(int argc, Address* args, Isolate* isolate);
30
BUILTIN_LIST_C(FORWARD_DECLARE)
31
#undef FORWARD_DECLARE
32

33 34 35 36 37 38
namespace {

// TODO(jgruber): Pack in CallDescriptors::Key.
struct BuiltinMetadata {
  const char* name;
  Builtins::Kind kind;
39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67

  struct BytecodeAndScale {
    interpreter::Bytecode bytecode : 8;
    interpreter::OperandScale scale : 8;
  };

  STATIC_ASSERT(sizeof(interpreter::Bytecode) == 1);
  STATIC_ASSERT(sizeof(interpreter::OperandScale) == 1);
  STATIC_ASSERT(sizeof(BytecodeAndScale) <= sizeof(Address));

  // The `data` field has kind-specific contents.
  union KindSpecificData {
    // TODO(jgruber): Union constructors are needed since C++11 does not support
    // designated initializers (e.g.: {.parameter_count = count}). Update once
    // we're at C++20 :)
    // The constructors are marked constexpr to avoid the need for a static
    // initializer for builtins.cc (see check-static-initializers.sh).
    constexpr KindSpecificData() : cpp_entry(kNullAddress) {}
    constexpr KindSpecificData(Address cpp_entry) : cpp_entry(cpp_entry) {}
    constexpr KindSpecificData(int parameter_count,
                               int /* To disambiguate from above */)
        : parameter_count(static_cast<int16_t>(parameter_count)) {}
    constexpr KindSpecificData(interpreter::Bytecode bytecode,
                               interpreter::OperandScale scale)
        : bytecode_and_scale{bytecode, scale} {}
    Address cpp_entry;                    // For CPP builtins.
    int16_t parameter_count;              // For TFJ builtins.
    BytecodeAndScale bytecode_and_scale;  // For BCH builtins.
  } data;
68 69
};

70
#define DECL_CPP(Name, ...) \
71 72 73 74 75 76 77 78
  {#Name, Builtins::CPP, {FUNCTION_ADDR(Builtin_##Name)}},
#define DECL_TFJ(Name, Count, ...) {#Name, Builtins::TFJ, {Count, 0}},
#define DECL_TFC(Name, ...) {#Name, Builtins::TFC, {}},
#define DECL_TFS(Name, ...) {#Name, Builtins::TFS, {}},
#define DECL_TFH(Name, ...) {#Name, Builtins::TFH, {}},
#define DECL_BCH(Name, OperandScale, Bytecode) \
  {#Name, Builtins::BCH, {Bytecode, OperandScale}},
#define DECL_ASM(Name, ...) {#Name, Builtins::ASM, {}},
79 80
const BuiltinMetadata builtin_metadata[] = {BUILTIN_LIST(
    DECL_CPP, DECL_TFJ, DECL_TFC, DECL_TFS, DECL_TFH, DECL_BCH, DECL_ASM)};
81 82 83 84 85
#undef DECL_CPP
#undef DECL_TFJ
#undef DECL_TFC
#undef DECL_TFS
#undef DECL_TFH
86
#undef DECL_BCH
87 88 89 90
#undef DECL_ASM

}  // namespace

91
BytecodeOffset Builtins::GetContinuationBytecodeOffset(Name name) {
92 93
  DCHECK(Builtins::KindOf(name) == TFJ || Builtins::KindOf(name) == TFC ||
         Builtins::KindOf(name) == TFS);
94
  return BytecodeOffset(BytecodeOffset::kFirstBuiltinContinuationId + name);
95 96
}

97 98
Builtins::Name Builtins::GetBuiltinFromBytecodeOffset(BytecodeOffset id) {
  int builtin_index = id.ToInt() - BytecodeOffset::kFirstBuiltinContinuationId;
99
  DCHECK(Builtins::KindOf(builtin_index) == TFJ ||
100 101
         Builtins::KindOf(builtin_index) == TFC ||
         Builtins::KindOf(builtin_index) == TFS);
102
  return static_cast<Name>(builtin_index);
103 104
}

105
void Builtins::TearDown() { initialized_ = false; }
106

107
const char* Builtins::Lookup(Address pc) {
108
  // Off-heap pc's can be looked up through binary search.
109 110
  Code maybe_builtin = InstructionStream::TryLookupCode(isolate_, pc);
  if (!maybe_builtin.is_null()) return name(maybe_builtin.builtin_index());
111 112

  // May be called during initialization (disassembler).
113
  if (initialized_) {
114
    for (int i = 0; i < builtin_count; i++) {
115
      if (isolate_->heap()->builtin(i).contains(pc)) return name(i);
116 117
    }
  }
118
  return nullptr;
119 120
}

121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144
Handle<Code> Builtins::CallFunction(ConvertReceiverMode mode) {
  switch (mode) {
    case ConvertReceiverMode::kNullOrUndefined:
      return builtin_handle(kCallFunction_ReceiverIsNullOrUndefined);
    case ConvertReceiverMode::kNotNullOrUndefined:
      return builtin_handle(kCallFunction_ReceiverIsNotNullOrUndefined);
    case ConvertReceiverMode::kAny:
      return builtin_handle(kCallFunction_ReceiverIsAny);
  }
  UNREACHABLE();
}

Handle<Code> Builtins::Call(ConvertReceiverMode mode) {
  switch (mode) {
    case ConvertReceiverMode::kNullOrUndefined:
      return builtin_handle(kCall_ReceiverIsNullOrUndefined);
    case ConvertReceiverMode::kNotNullOrUndefined:
      return builtin_handle(kCall_ReceiverIsNotNullOrUndefined);
    case ConvertReceiverMode::kAny:
      return builtin_handle(kCall_ReceiverIsAny);
  }
  UNREACHABLE();
}

145 146 147
Handle<Code> Builtins::NonPrimitiveToPrimitive(ToPrimitiveHint hint) {
  switch (hint) {
    case ToPrimitiveHint::kDefault:
148
      return builtin_handle(kNonPrimitiveToPrimitive_Default);
149
    case ToPrimitiveHint::kNumber:
150
      return builtin_handle(kNonPrimitiveToPrimitive_Number);
151
    case ToPrimitiveHint::kString:
152
      return builtin_handle(kNonPrimitiveToPrimitive_String);
153 154 155 156 157 158 159
  }
  UNREACHABLE();
}

Handle<Code> Builtins::OrdinaryToPrimitive(OrdinaryToPrimitiveHint hint) {
  switch (hint) {
    case OrdinaryToPrimitiveHint::kNumber:
160
      return builtin_handle(kOrdinaryToPrimitive_Number);
161
    case OrdinaryToPrimitiveHint::kString:
162
      return builtin_handle(kOrdinaryToPrimitive_String);
163 164 165 166
  }
  UNREACHABLE();
}

167
void Builtins::set_builtin(int index, Code builtin) {
168
  isolate_->heap()->set_builtin(index, builtin);
169 170
}

171
Code Builtins::builtin(int index) { return isolate_->heap()->builtin(index); }
172

173 174
Handle<Code> Builtins::builtin_handle(int index) {
  DCHECK(IsBuiltinId(index));
175
  return Handle<Code>(
176
      reinterpret_cast<Address*>(isolate_->heap()->builtin_address(index)));
177 178
}

179
// static
180 181
int Builtins::GetStackParameterCount(Name name) {
  DCHECK(Builtins::KindOf(name) == TFJ);
182
  return builtin_metadata[name].data.parameter_count;
183 184
}

185
// static
186
CallInterfaceDescriptor Builtins::CallInterfaceDescriptorFor(Name name) {
187
  CallDescriptors::Key key;
188
  switch (name) {
189 190
// This macro is deliberately crafted so as to emit very little code,
// in order to keep binary size of this function under control.
191
#define CASE_OTHER(Name, ...)                          \
192 193 194
  case k##Name: {                                      \
    key = Builtin_##Name##_InterfaceDescriptor::key(); \
    break;                                             \
195
  }
196 197
    BUILTIN_LIST(IGNORE_BUILTIN, IGNORE_BUILTIN, CASE_OTHER, CASE_OTHER,
                 CASE_OTHER, IGNORE_BUILTIN, CASE_OTHER)
198
#undef CASE_OTHER
199
    default:
200
      Builtins::Kind kind = Builtins::KindOf(name);
201
      DCHECK_NE(BCH, kind);
202
      if (kind == TFJ || kind == CPP) {
203
        return JSTrampolineDescriptor{};
204
      }
205 206
      UNREACHABLE();
  }
207 208 209 210 211 212 213
  return CallInterfaceDescriptor{key};
}

// static
Callable Builtins::CallableFor(Isolate* isolate, Name name) {
  Handle<Code> code = isolate->builtins()->builtin_handle(name);
  return Callable{code, CallInterfaceDescriptorFor(name)};
214 215
}

216 217 218 219 220 221 222
// static
bool Builtins::HasJSLinkage(int builtin_index) {
  Name name = static_cast<Name>(builtin_index);
  DCHECK_NE(BCH, Builtins::KindOf(name));
  return CallInterfaceDescriptorFor(name) == JSTrampolineDescriptor{};
}

223
// static
224
const char* Builtins::name(int index) {
225
  DCHECK(IsBuiltinId(index));
226
  return builtin_metadata[index].name;
227 228
}

229 230 231 232 233 234 235 236 237 238
void Builtins::PrintBuiltinCode() {
  DCHECK(FLAG_print_builtin_code);
#ifdef ENABLE_DISASSEMBLER
  for (int i = 0; i < builtin_count; i++) {
    const char* builtin_name = name(i);
    Handle<Code> code = builtin_handle(i);
    if (PassesFilter(CStrVector(builtin_name),
                     CStrVector(FLAG_print_builtin_code_filter))) {
      CodeTracer::Scope trace_scope(isolate_->GetCodeTracer());
      OFStream os(trace_scope.file());
239
      code->Disassemble(builtin_name, os, isolate_);
240 241 242 243 244 245 246 247 248 249 250 251 252
      os << "\n";
    }
  }
#endif
}

void Builtins::PrintBuiltinSize() {
  DCHECK(FLAG_print_builtin_size);
  for (int i = 0; i < builtin_count; i++) {
    const char* builtin_name = name(i);
    const char* kind = KindNameOf(i);
    Code code = builtin(i);
    PrintF(stdout, "%s Builtin, %s, %d\n", kind, builtin_name,
253
           code.InstructionSize());
254 255 256
  }
}

257 258
// static
Address Builtins::CppEntryOf(int index) {
259
  DCHECK(Builtins::IsCpp(index));
260
  return builtin_metadata[index].data.cpp_entry;
261 262
}

263
// static
264
bool Builtins::IsBuiltin(const Code code) {
265
  return Builtins::IsBuiltinId(code.builtin_index());
266 267
}

268 269 270 271 272 273
bool Builtins::IsBuiltinHandle(Handle<HeapObject> maybe_code,
                               int* index) const {
  Heap* heap = isolate_->heap();
  Address handle_location = maybe_code.address();
  Address start = heap->builtin_address(0);
  Address end = heap->builtin_address(Builtins::builtin_count);
274 275
  if (handle_location >= end) return false;
  if (handle_location < start) return false;
276
  *index = static_cast<int>(handle_location - start) >> kSystemPointerSizeLog2;
277 278 279 280
  DCHECK(Builtins::IsBuiltinId(*index));
  return true;
}

281
// static
282
bool Builtins::IsIsolateIndependentBuiltin(const Code code) {
283 284 285
  const int builtin_index = code.builtin_index();
  return Builtins::IsBuiltinId(builtin_index) &&
         Builtins::IsIsolateIndependent(builtin_index);
286 287
}

288
// static
289
void Builtins::InitializeBuiltinEntryTable(Isolate* isolate) {
290
  EmbeddedData d = EmbeddedData::FromBlob();
291 292
  Address* builtin_entry_table = isolate->builtin_entry_table();
  for (int i = 0; i < builtin_count; i++) {
293 294 295 296 297
    // TODO(jgruber,chromium:1020986): Remove the CHECK once the linked issue is
    // resolved.
    CHECK(Builtins::IsBuiltinId(isolate->heap()->builtin(i).builtin_index()));
    DCHECK(isolate->heap()->builtin(i).is_off_heap_trampoline());
    builtin_entry_table[i] = d.InstructionStartOfBuiltin(i);
298 299 300
  }
}

301 302 303 304 305 306 307 308 309
// static
void Builtins::EmitCodeCreateEvents(Isolate* isolate) {
  if (!isolate->logger()->is_listening_to_code_events() &&
      !isolate->is_profiling()) {
    return;  // No need to iterate the entire table in this case.
  }

  Address* builtins = isolate->builtins_table();
  int i = 0;
310
  HandleScope scope(isolate);
311
  for (; i < kFirstBytecodeHandler; i++) {
312
    Handle<AbstractCode> code(AbstractCode::cast(Object(builtins[i])), isolate);
313 314 315 316 317 318
    PROFILE(isolate, CodeCreateEvent(CodeEventListener::BUILTIN_TAG, code,
                                     Builtins::name(i)));
  }

  STATIC_ASSERT(kLastBytecodeHandlerPlusOne == builtin_count);
  for (; i < builtin_count; i++) {
319
    Handle<AbstractCode> code(AbstractCode::cast(Object(builtins[i])), isolate);
320 321 322 323 324 325 326 327 328 329 330
    interpreter::Bytecode bytecode =
        builtin_metadata[i].data.bytecode_and_scale.bytecode;
    interpreter::OperandScale scale =
        builtin_metadata[i].data.bytecode_and_scale.scale;
    PROFILE(isolate,
            CodeCreateEvent(
                CodeEventListener::BYTECODE_HANDLER_TAG, code,
                interpreter::Bytecodes::ToString(bytecode, scale).c_str()));
  }
}

331
namespace {
332
enum TrampolineType { kAbort, kJump };
333 334 335 336 337

class OffHeapTrampolineGenerator {
 public:
  explicit OffHeapTrampolineGenerator(Isolate* isolate)
      : isolate_(isolate),
338 339
        masm_(isolate, AssemblerOptions::DefaultForOffHeapTrampoline(isolate),
              CodeObjectRequired::kYes,
340
              ExternalAssemblerBuffer(buffer_, kBufferSize)) {}
341

342
  CodeDesc Generate(Address off_heap_entry, TrampolineType type) {
343 344 345 346
    // Generate replacement code that simply tail-calls the off-heap code.
    DCHECK(!masm_.has_frame());
    {
      FrameScope scope(&masm_, StackFrame::NONE);
347
      if (type == TrampolineType::kJump) {
348
        masm_.CodeEntry();
349 350
        masm_.JumpToInstructionStream(off_heap_entry);
      } else {
351
        DCHECK_EQ(type, TrampolineType::kAbort);
352 353
        masm_.Trap();
      }
354 355 356 357 358 359 360 361 362 363 364 365
    }

    CodeDesc desc;
    masm_.GetCode(isolate_, &desc);
    return desc;
  }

  Handle<HeapObject> CodeObject() { return masm_.CodeObject(); }

 private:
  Isolate* isolate_;
  // Enough to fit the single jmp.
366
  static constexpr int kBufferSize = 256;
367
  byte buffer_[kBufferSize];
368 369 370
  MacroAssembler masm_;
};

371 372
constexpr int OffHeapTrampolineGenerator::kBufferSize;

373 374
}  // namespace

375
// static
376
Handle<Code> Builtins::GenerateOffHeapTrampolineFor(
377 378
    Isolate* isolate, Address off_heap_entry, int32_t kind_specfic_flags,
    bool generate_jump_to_instruction_stream) {
379 380
  DCHECK_NOT_NULL(isolate->embedded_blob_code());
  DCHECK_NE(0, isolate->embedded_blob_code_size());
381

382
  OffHeapTrampolineGenerator generator(isolate);
383 384 385 386 387

  CodeDesc desc =
      generator.Generate(off_heap_entry, generate_jump_to_instruction_stream
                                             ? TrampolineType::kJump
                                             : TrampolineType::kAbort);
388

389
  return Factory::CodeBuilder(isolate, desc, CodeKind::BUILTIN)
390
      .set_read_only_data_container(kind_specfic_flags)
391 392
      .set_self_reference(generator.CodeObject())
      .set_is_executable(generate_jump_to_instruction_stream)
393
      .Build();
394
}
395

396 397 398 399 400 401
// static
Handle<ByteArray> Builtins::GenerateOffHeapTrampolineRelocInfo(
    Isolate* isolate) {
  OffHeapTrampolineGenerator generator(isolate);
  // Generate a jump to a dummy address as we're not actually interested in the
  // generated instruction stream.
402
  CodeDesc desc = generator.Generate(kNullAddress, TrampolineType::kJump);
403

404 405
  Handle<ByteArray> reloc_info = isolate->factory()->NewByteArray(
      desc.reloc_size, AllocationType::kReadOnly);
406 407 408
  Code::CopyRelocInfoToByteArray(*reloc_info, desc);

  return reloc_info;
409 410
}

411
// static
412
Builtins::Kind Builtins::KindOf(int index) {
413
  DCHECK(IsBuiltinId(index));
414
  return builtin_metadata[index].kind;
415 416 417
}

// static
418 419 420 421 422 423 424 425 426
const char* Builtins::KindNameOf(int index) {
  Kind kind = Builtins::KindOf(index);
  // clang-format off
  switch (kind) {
    case CPP: return "CPP";
    case TFJ: return "TFJ";
    case TFC: return "TFC";
    case TFS: return "TFS";
    case TFH: return "TFH";
427
    case BCH: return "BCH";
428
    case ASM: return "ASM";
429
  }
430
  // clang-format on
431 432 433
  UNREACHABLE();
}

434 435 436
// static
bool Builtins::IsCpp(int index) { return Builtins::KindOf(index) == CPP; }

437 438 439 440 441
// static
bool Builtins::AllowDynamicFunction(Isolate* isolate, Handle<JSFunction> target,
                                    Handle<JSObject> target_global_proxy) {
  if (FLAG_allow_unsafe_function_constructor) return true;
  HandleScopeImplementer* impl = isolate->handle_scope_implementer();
442
  Handle<Context> responsible_context = impl->LastEnteredOrMicrotaskContext();
443
  // TODO(jochen): Remove this.
444
  if (responsible_context.is_null()) {
445
    return true;
446 447 448 449 450
  }
  if (*responsible_context == target->context()) return true;
  return isolate->MayAccess(responsible_context, target_global_proxy);
}

451 452 453 454 455 456 457 458 459 460 461 462
// static
bool Builtins::CodeObjectIsExecutable(int builtin_index) {
  // If the runtime/optimized code always knows when executing a given builtin
  // that it is a builtin, then that builtin does not need an executable Code
  // object. Such Code objects can go in read_only_space (and can even be
  // smaller with no branch instruction), thus saving memory.

  // Builtins with JS linkage will always have executable Code objects since
  // they can be called directly from jitted code with no way of determining
  // that they are builtins at generation time. E.g.
  //   f = Array.of;
  //   f(1, 2, 3);
463 464 465 466 467 468
  // TODO(delphick): This is probably too loose but for now Wasm can call any JS
  // linkage builtin via its Code object. Once Wasm is fixed this can either be
  // tighted or removed completely.
  if (Builtins::KindOf(builtin_index) != BCH && HasJSLinkage(builtin_index)) {
    return true;
  }
469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487

  // There are some other non-TF builtins that also have JS linkage like
  // InterpreterEntryTrampoline which are explicitly allow-listed below.
  // TODO(delphick): Some of these builtins do not fit with the above, but
  // currently cause problems if they're not executable. This list should be
  // pared down as much as possible.
  switch (builtin_index) {
    case Builtins::kInterpreterEntryTrampoline:
    case Builtins::kCompileLazy:
    case Builtins::kCompileLazyDeoptimizedCode:
    case Builtins::kCallFunction_ReceiverIsNullOrUndefined:
    case Builtins::kCallFunction_ReceiverIsNotNullOrUndefined:
    case Builtins::kCallFunction_ReceiverIsAny:
    case Builtins::kCallBoundFunction:
    case Builtins::kCall_ReceiverIsNullOrUndefined:
    case Builtins::kCall_ReceiverIsNotNullOrUndefined:
    case Builtins::kCall_ReceiverIsAny:
    case Builtins::kHandleApiCall:
    case Builtins::kInstantiateAsmJs:
488
    case Builtins::kGenericJSToWasmWrapper:
489 490 491 492

    // TODO(delphick): Remove this when calls to it have the trampoline inlined
    // or are converted to use kCallBuiltinPointer.
    case Builtins::kCEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit:
493 494
      return true;
    default:
495 496 497 498 499 500
#if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64
      // TODO(Loongson): Move non-JS linkage builtins code objects into RO_SPACE
      // caused MIPS platform to crash, and we need some time to handle it. Now
      // disable this change temporarily on MIPS platform.
      return true;
#else
501
      return false;
502
#endif  // V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64
503 504 505
  }
}

506 507 508 509 510 511 512 513 514 515 516 517 518
Builtins::Name ExampleBuiltinForTorqueFunctionPointerType(
    size_t function_pointer_type_id) {
  switch (function_pointer_type_id) {
#define FUNCTION_POINTER_ID_CASE(id, name) \
  case id:                                 \
    return Builtins::k##name;
    TORQUE_FUNCTION_POINTER_TYPE_TO_BUILTIN_MAP(FUNCTION_POINTER_ID_CASE)
#undef FUNCTION_POINTER_ID_CASE
    default:
      UNREACHABLE();
  }
}

519 520
}  // namespace internal
}  // namespace v8