v8-internal.h 19 KB
Newer Older
1 2 3 4 5 6 7 8 9
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef INCLUDE_V8_INTERNAL_H_
#define INCLUDE_V8_INTERNAL_H_

#include <stddef.h>
#include <stdint.h>
10
#include <string.h>
11 12
#include <type_traits>

13 14
#include "v8-version.h"  // NOLINT(build/include_directory)
#include "v8config.h"    // NOLINT(build/include_directory)
15 16 17 18 19 20 21 22 23

namespace v8 {

class Context;
class Data;
class Isolate;

namespace internal {

24 25
class Isolate;

26 27
typedef uintptr_t Address;
static const Address kNullAddress = 0;
28 29 30 31

/**
 * Configuration of tagging scheme.
 */
32 33
const int kApiSystemPointerSize = sizeof(void*);
const int kApiDoubleSize = sizeof(double);
34
const int kApiInt32Size = sizeof(int32_t);
35
const int kApiInt64Size = sizeof(int64_t);
36
const int kApiSizetSize = sizeof(size_t);
37 38 39 40 41 42 43

// Tag information for HeapObject.
const int kHeapObjectTag = 1;
const int kWeakHeapObjectTag = 3;
const int kHeapObjectTagSize = 2;
const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;

44 45 46 47 48 49 50
// Tag information for fowarding pointers stored in object headers.
// 0b00 at the lowest 2 bits in the header indicates that the map word is a
// forwarding pointer.
const int kForwardingTag = 0;
const int kForwardingTagSize = 2;
const intptr_t kForwardingTagMask = (1 << kForwardingTagSize) - 1;

51 52 53 54 55 56 57 58
// Tag information for Smi.
const int kSmiTag = 0;
const int kSmiTagSize = 1;
const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1;

template <size_t tagged_ptr_size>
struct SmiTagging;

59 60 61 62
constexpr intptr_t kIntptrAllBitsSet = intptr_t{-1};
constexpr uintptr_t kUintptrAllBitsSet =
    static_cast<uintptr_t>(kIntptrAllBitsSet);

63 64 65 66
// Smi constants for systems where tagged pointer is a 32-bit value.
template <>
struct SmiTagging<4> {
  enum { kSmiShiftSize = 0, kSmiValueSize = 31 };
67 68 69 70 71

  static constexpr intptr_t kSmiMinValue =
      static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
  static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);

72
  V8_INLINE static int SmiToInt(const internal::Address value) {
73
    int shift_bits = kSmiTagSize + kSmiShiftSize;
74 75
    // Truncate and shift down (requires >> to be sign extending).
    return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
76 77
  }
  V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
78 79 80 81 82 83 84
    // Is value in range [kSmiMinValue, kSmiMaxValue].
    // Use unsigned operations in order to avoid undefined behaviour in case of
    // signed integer overflow.
    return (static_cast<uintptr_t>(value) -
            static_cast<uintptr_t>(kSmiMinValue)) <=
           (static_cast<uintptr_t>(kSmiMaxValue) -
            static_cast<uintptr_t>(kSmiMinValue));
85 86 87 88 89 90 91
  }
};

// Smi constants for systems where tagged pointer is a 64-bit value.
template <>
struct SmiTagging<8> {
  enum { kSmiShiftSize = 31, kSmiValueSize = 32 };
92 93 94 95 96

  static constexpr intptr_t kSmiMinValue =
      static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
  static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);

97
  V8_INLINE static int SmiToInt(const internal::Address value) {
98 99
    int shift_bits = kSmiTagSize + kSmiShiftSize;
    // Shift down and throw away top 32 bits.
100
    return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
101 102 103 104 105 106 107
  }
  V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
    // To be representable as a long smi, the value must be a 32-bit integer.
    return (value == static_cast<int32_t>(value));
  }
};

108
#ifdef V8_COMPRESS_POINTERS
109
static_assert(
110
    kApiSystemPointerSize == kApiInt64Size,
111
    "Pointer compression can be enabled only for 64-bit architectures");
112 113 114
const int kApiTaggedSize = kApiInt32Size;
#else
const int kApiTaggedSize = kApiSystemPointerSize;
115 116
#endif

117 118 119 120
constexpr bool PointerCompressionIsEnabled() {
  return kApiTaggedSize != kApiSystemPointerSize;
}

121 122 123 124 125 126 127 128 129 130
constexpr bool HeapSandboxIsEnabled() {
#ifdef V8_HEAP_SANDBOX
  return true;
#else
  return false;
#endif
}

using ExternalPointer_t = Address;

131
// If the heap sandbox is enabled, these tag values will be ORed with the
132
// external pointers in the external pointer table to prevent use of pointers of
133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148
// the wrong type. When a pointer is loaded, it is ANDed with the inverse of the
// expected type's tag. The tags are constructed in a way that guarantees that a
// failed type check will result in one or more of the top bits of the pointer
// to be set, rendering the pointer inacessible. This construction allows
// performing the type check and removing GC marking bits from the pointer at
// the same time.
enum ExternalPointerTag : uint64_t {
  kExternalPointerNullTag = 0x0000000000000000,
  kArrayBufferBackingStoreTag = 0x00ff000000000000,      // 0b000000011111111
  kTypedArrayExternalPointerTag = 0x017f000000000000,    // 0b000000101111111
  kDataViewDataPointerTag = 0x01bf000000000000,          // 0b000000110111111
  kExternalStringResourceTag = 0x01df000000000000,       // 0b000000111011111
  kExternalStringResourceDataTag = 0x01ef000000000000,   // 0b000000111101111
  kForeignForeignAddressTag = 0x01f7000000000000,        // 0b000000111110111
  kNativeContextMicrotaskQueueTag = 0x01fb000000000000,  // 0b000000111111011
  kEmbedderDataSlotPayloadTag = 0x01fd000000000000,      // 0b000000111111101
149
  kCodeEntryPointTag = 0x01fe000000000000,               // 0b000000111111110
150 151
};

152 153
constexpr uint64_t kExternalPointerTagMask = 0xffff000000000000;

154
#ifdef V8_31BIT_SMIS_ON_64BIT_ARCH
155
using PlatformSmiTagging = SmiTagging<kApiInt32Size>;
156
#else
157
using PlatformSmiTagging = SmiTagging<kApiTaggedSize>;
158
#endif
159

160 161
// TODO(ishell): Consinder adding kSmiShiftBits = kSmiShiftSize + kSmiTagSize
// since it's used much more often than the inividual constants.
162 163
const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize;
const int kSmiValueSize = PlatformSmiTagging::kSmiValueSize;
164 165
const int kSmiMinValue = static_cast<int>(PlatformSmiTagging::kSmiMinValue);
const int kSmiMaxValue = static_cast<int>(PlatformSmiTagging::kSmiMaxValue);
166 167 168
constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; }
constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; }

169 170 171 172 173
V8_INLINE static constexpr internal::Address IntToSmi(int value) {
  return (static_cast<Address>(value) << (kSmiTagSize + kSmiShiftSize)) |
         kSmiTag;
}

174 175
// Converts encoded external pointer to address.
V8_EXPORT Address DecodeExternalPointerImpl(const Isolate* isolate,
176 177
                                            ExternalPointer_t pointer,
                                            ExternalPointerTag tag);
178

179 180 181 182 183 184 185 186 187
// {obj} must be the raw tagged pointer representation of a HeapObject
// that's guaranteed to never be in ReadOnlySpace.
V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj);

// Returns if we need to throw when an error occurs. This infers the language
// mode based on the current context and the closure. This returns true if the
// language mode is strict.
V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate);

188 189 190 191 192 193
/**
 * This class exports constants and functionality from within v8 that
 * is necessary to implement inline functions in the v8 api.  Don't
 * depend on functions and constants defined here.
 */
class Internals {
194 195 196 197 198 199 200 201
#ifdef V8_MAP_PACKING
  V8_INLINE static constexpr internal::Address UnpackMapWord(
      internal::Address mapword) {
    // TODO(wenyuzhao): Clear header metadata.
    return mapword ^ kMapWordXorMask;
  }
#endif

202 203 204 205
 public:
  // These values match non-compiler-dependent values defined within
  // the implementation of v8.
  static const int kHeapObjectMapOffset = 0;
206 207 208
  static const int kMapInstanceTypeOffset = 1 * kApiTaggedSize + kApiInt32Size;
  static const int kStringResourceOffset =
      1 * kApiTaggedSize + 2 * kApiInt32Size;
209 210 211 212 213

  static const int kOddballKindOffset = 4 * kApiTaggedSize + kApiDoubleSize;
  static const int kJSObjectHeaderSize = 3 * kApiTaggedSize;
  static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
  static const int kEmbedderDataArrayHeaderSize = 2 * kApiTaggedSize;
214
  static const int kEmbedderDataSlotSize = kApiSystemPointerSize;
215 216 217
#ifdef V8_HEAP_SANDBOX
  static const int kEmbedderDataSlotRawPayloadOffset = kApiTaggedSize;
#endif
218
  static const int kNativeContextEmbedderDataOffset = 6 * kApiTaggedSize;
219 220 221 222 223
  static const int kFullStringRepresentationMask = 0x0f;
  static const int kStringEncodingMask = 0x8;
  static const int kExternalTwoByteRepresentationTag = 0x02;
  static const int kExternalOneByteRepresentationTag = 0x0a;

224 225
  static const uint32_t kNumIsolateDataSlots = 4;

226
  // IsolateData layout guarantees.
227
  static const int kIsolateEmbedderDataOffset = 0;
228
  static const int kIsolateFastCCallCallerFpOffset =
229
      kNumIsolateDataSlots * kApiSystemPointerSize;
230 231
  static const int kIsolateFastCCallCallerPcOffset =
      kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
232
  static const int kIsolateFastApiCallTargetOffset =
233
      kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize;
234
  static const int kIsolateCageBaseOffset =
235
      kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
236
  static const int kIsolateLongTaskStatsCounterOffset =
237
      kIsolateCageBaseOffset + kApiSystemPointerSize;
238 239
  static const int kIsolateStackGuardOffset =
      kIsolateLongTaskStatsCounterOffset + kApiSizetSize;
240 241
  static const int kIsolateRootsOffset =
      kIsolateStackGuardOffset + 7 * kApiSystemPointerSize;
242

243 244 245 246 247 248
  static const int kExternalPointerTableBufferOffset = 0;
  static const int kExternalPointerTableLengthOffset =
      kExternalPointerTableBufferOffset + kApiSystemPointerSize;
  static const int kExternalPointerTableCapacityOffset =
      kExternalPointerTableLengthOffset + kApiInt32Size;

249 250 251 252 253 254 255
  static const int kUndefinedValueRootIndex = 4;
  static const int kTheHoleValueRootIndex = 5;
  static const int kNullValueRootIndex = 6;
  static const int kTrueValueRootIndex = 7;
  static const int kFalseValueRootIndex = 8;
  static const int kEmptyStringRootIndex = 9;

256 257
  static const int kNodeClassIdOffset = 1 * kApiSystemPointerSize;
  static const int kNodeFlagsOffset = 1 * kApiSystemPointerSize + 3;
258 259 260 261
  static const int kNodeStateMask = 0x7;
  static const int kNodeStateIsWeakValue = 2;
  static const int kNodeStateIsPendingValue = 3;

262 263
  static const int kFirstNonstringType = 0x40;
  static const int kOddballType = 0x43;
264
  static const int kForeignType = 0x46;
265 266 267 268 269 270 271
  static const int kJSSpecialApiObjectType = 0x410;
  static const int kJSApiObjectType = 0x420;
  static const int kJSObjectType = 0x421;

  static const int kUndefinedOddballKind = 5;
  static const int kNullOddballKind = 3;

272 273 274 275 276 277
  // Constants used by PropertyCallbackInfo to check if we should throw when an
  // error occurs.
  static const int kThrowOnError = 0;
  static const int kDontThrow = 1;
  static const int kInferShouldThrowMode = 2;

278 279 280 281
  // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
  // incremental GC once the external memory reaches this limit.
  static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;

282 283 284 285 286 287 288 289 290 291 292
#ifdef V8_MAP_PACKING
  static const uintptr_t kMapWordMetadataMask = 0xffffULL << 48;
  // The lowest two bits of mapwords are always `0b10`
  static const uintptr_t kMapWordSignature = 0b10;
  // XORing a (non-compressed) map with this mask ensures that the two
  // low-order bits are 0b10. The 0 at the end makes this look like a Smi,
  // although real Smis have all lower 32 bits unset. We only rely on these
  // values passing as Smis in very few places.
  static const int kMapWordXorMask = 0b11;
#endif

293 294 295 296 297 298 299
  V8_EXPORT static void CheckInitializedImpl(v8::Isolate* isolate);
  V8_INLINE static void CheckInitialized(v8::Isolate* isolate) {
#ifdef V8_ENABLE_CHECKS
    CheckInitializedImpl(isolate);
#endif
  }

300 301
  V8_INLINE static bool HasHeapObjectTag(const internal::Address value) {
    return (value & kHeapObjectTagMask) == static_cast<Address>(kHeapObjectTag);
302 303
  }

304
  V8_INLINE static int SmiValue(const internal::Address value) {
305 306 307
    return PlatformSmiTagging::SmiToInt(value);
  }

308 309
  V8_INLINE static constexpr internal::Address IntToSmi(int value) {
    return internal::IntToSmi(value);
310 311 312 313 314 315
  }

  V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
    return PlatformSmiTagging::IsValidSmi(value);
  }

316 317
  V8_INLINE static int GetInstanceType(const internal::Address obj) {
    typedef internal::Address A;
318
    A map = ReadTaggedPointerField(obj, kHeapObjectMapOffset);
319 320 321
#ifdef V8_MAP_PACKING
    map = UnpackMapWord(map);
#endif
322
    return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
323 324
  }

325
  V8_INLINE static int GetOddballKind(const internal::Address obj) {
326
    return SmiValue(ReadTaggedSignedField(obj, kOddballKindOffset));
327 328 329 330 331 332 333
  }

  V8_INLINE static bool IsExternalTwoByteString(int instance_type) {
    int representation = (instance_type & kFullStringRepresentationMask);
    return representation == kExternalTwoByteRepresentationTag;
  }

334
  V8_INLINE static uint8_t GetNodeFlag(internal::Address* obj, int shift) {
335 336 337 338
    uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
    return *addr & static_cast<uint8_t>(1U << shift);
  }

339
  V8_INLINE static void UpdateNodeFlag(internal::Address* obj, bool value,
340 341 342 343 344 345
                                       int shift) {
    uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
    uint8_t mask = static_cast<uint8_t>(1U << shift);
    *addr = static_cast<uint8_t>((*addr & ~mask) | (value << shift));
  }

346
  V8_INLINE static uint8_t GetNodeState(internal::Address* obj) {
347 348 349 350
    uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
    return *addr & kNodeStateMask;
  }

351
  V8_INLINE static void UpdateNodeState(internal::Address* obj, uint8_t value) {
352 353 354 355 356 357
    uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
    *addr = static_cast<uint8_t>((*addr & ~kNodeStateMask) | value);
  }

  V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot,
                                        void* data) {
358 359
    internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
                             kIsolateEmbedderDataOffset +
360
                             slot * kApiSystemPointerSize;
361 362 363 364 365
    *reinterpret_cast<void**>(addr) = data;
  }

  V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate,
                                         uint32_t slot) {
366 367
    internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
                             kIsolateEmbedderDataOffset +
368
                             slot * kApiSystemPointerSize;
369 370 371
    return *reinterpret_cast<void* const*>(addr);
  }

372 373 374 375 376 377
  V8_INLINE static void IncrementLongTasksStatsCounter(v8::Isolate* isolate) {
    internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
                             kIsolateLongTaskStatsCounterOffset;
    ++(*reinterpret_cast<size_t*>(addr));
  }

378
  V8_INLINE static internal::Address* GetRoot(v8::Isolate* isolate, int index) {
379 380 381 382
    internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
                             kIsolateRootsOffset +
                             index * kApiSystemPointerSize;
    return reinterpret_cast<internal::Address*>(addr);
383 384 385
  }

  template <typename T>
386 387
  V8_INLINE static T ReadRawField(internal::Address heap_object_ptr,
                                  int offset) {
388
    internal::Address addr = heap_object_ptr + offset - kHeapObjectTag;
389 390 391 392 393 394 395 396 397 398 399
#ifdef V8_COMPRESS_POINTERS
    if (sizeof(T) > kApiTaggedSize) {
      // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
      // fields (external pointers, doubles and BigInt data) are only
      // kTaggedSize aligned so we have to use unaligned pointer friendly way of
      // accessing them in order to avoid undefined behavior in C++ code.
      T r;
      memcpy(&r, reinterpret_cast<void*>(addr), sizeof(T));
      return r;
    }
#endif
400 401 402
    return *reinterpret_cast<const T*>(addr);
  }

403 404 405
  V8_INLINE static internal::Address ReadTaggedPointerField(
      internal::Address heap_object_ptr, int offset) {
#ifdef V8_COMPRESS_POINTERS
406
    uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
407 408 409
    internal::Address base =
        GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
    return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
410 411 412 413 414 415 416 417
#else
    return ReadRawField<internal::Address>(heap_object_ptr, offset);
#endif
  }

  V8_INLINE static internal::Address ReadTaggedSignedField(
      internal::Address heap_object_ptr, int offset) {
#ifdef V8_COMPRESS_POINTERS
418 419
    uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
    return static_cast<internal::Address>(static_cast<uintptr_t>(value));
420 421 422 423 424
#else
    return ReadRawField<internal::Address>(heap_object_ptr, offset);
#endif
  }

425 426 427 428 429 430 431 432 433 434
  V8_INLINE static internal::Isolate* GetIsolateForHeapSandbox(
      internal::Address obj) {
#ifdef V8_HEAP_SANDBOX
    return internal::IsolateFromNeverReadOnlySpaceObject(obj);
#else
    // Not used in non-sandbox mode.
    return nullptr;
#endif
  }

435
  V8_INLINE static Address DecodeExternalPointer(
436 437
      const Isolate* isolate, ExternalPointer_t encoded_pointer,
      ExternalPointerTag tag) {
438
#ifdef V8_HEAP_SANDBOX
439
    return internal::DecodeExternalPointerImpl(isolate, encoded_pointer, tag);
440 441 442 443 444
#else
    return encoded_pointer;
#endif
  }

445
  V8_INLINE static internal::Address ReadExternalPointerField(
446 447
      internal::Isolate* isolate, internal::Address heap_object_ptr, int offset,
      ExternalPointerTag tag) {
448
#ifdef V8_HEAP_SANDBOX
449 450
    internal::ExternalPointer_t encoded_value =
        ReadRawField<uint32_t>(heap_object_ptr, offset);
451
    // We currently have to treat zero as nullptr in embedder slots.
452 453
    return encoded_value ? DecodeExternalPointer(isolate, encoded_value, tag)
                         : 0;
454 455
#else
    return ReadRawField<Address>(heap_object_ptr, offset);
456 457 458
#endif
  }

459
#ifdef V8_COMPRESS_POINTERS
460
  // See v8:7703 or src/ptr-compr.* for details about pointer compression.
461 462
  static constexpr size_t kPtrComprCageReservationSize = size_t{1} << 32;
  static constexpr size_t kPtrComprCageBaseAlignment = size_t{1} << 32;
463

464
  V8_INLINE static internal::Address GetPtrComprCageBaseFromOnHeapAddress(
465
      internal::Address addr) {
466
    return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
467 468
  }

469
  V8_INLINE static internal::Address DecompressTaggedAnyField(
470
      internal::Address heap_object_ptr, uint32_t value) {
471 472 473
    internal::Address base =
        GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
    return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
474
  }
475

476
#endif  // V8_COMPRESS_POINTERS
477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498
};

// Only perform cast check for types derived from v8::Data since
// other types do not implement the Cast method.
template <bool PerformCheck>
struct CastCheck {
  template <class T>
  static void Perform(T* data);
};

template <>
template <class T>
void CastCheck<true>::Perform(T* data) {
  T::Cast(data);
}

template <>
template <class T>
void CastCheck<false>::Perform(T* data) {}

template <class T>
V8_INLINE void PerformCastCheck(T* data) {
499
  CastCheck<std::is_base_of<Data, T>::value &&
500
            !std::is_same<Data, std::remove_cv_t<T>>::value>::Perform(data);
501 502
}

503 504 505 506
// A base class for backing stores, which is needed due to vagaries of
// how static casts work with std::shared_ptr.
class BackingStoreBase {};

507 508 509 510
}  // namespace internal
}  // namespace v8

#endif  // INCLUDE_V8_INTERNAL_H_