v8-internal.h 21.7 KB
Newer Older
1 2 3 4 5 6 7 8 9
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef INCLUDE_V8_INTERNAL_H_
#define INCLUDE_V8_INTERNAL_H_

#include <stddef.h>
#include <stdint.h>
10
#include <string.h>
11 12
#include <type_traits>

13 14
#include "v8-version.h"  // NOLINT(build/include_directory)
#include "v8config.h"    // NOLINT(build/include_directory)
15 16 17

namespace v8 {

18
class Array;
19 20 21
class Context;
class Data;
class Isolate;
22 23
template <typename T>
class Local;
24 25 26

namespace internal {

27 28
class Isolate;

29 30
typedef uintptr_t Address;
static const Address kNullAddress = 0;
31 32 33 34

/**
 * Configuration of tagging scheme.
 */
35 36
const int kApiSystemPointerSize = sizeof(void*);
const int kApiDoubleSize = sizeof(double);
37
const int kApiInt32Size = sizeof(int32_t);
38
const int kApiInt64Size = sizeof(int64_t);
39
const int kApiSizetSize = sizeof(size_t);
40 41 42 43 44 45 46

// Tag information for HeapObject.
const int kHeapObjectTag = 1;
const int kWeakHeapObjectTag = 3;
const int kHeapObjectTagSize = 2;
const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;

47 48 49 50 51 52 53
// Tag information for fowarding pointers stored in object headers.
// 0b00 at the lowest 2 bits in the header indicates that the map word is a
// forwarding pointer.
const int kForwardingTag = 0;
const int kForwardingTagSize = 2;
const intptr_t kForwardingTagMask = (1 << kForwardingTagSize) - 1;

54 55 56 57 58 59 60 61
// Tag information for Smi.
const int kSmiTag = 0;
const int kSmiTagSize = 1;
const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1;

template <size_t tagged_ptr_size>
struct SmiTagging;

62 63 64 65
constexpr intptr_t kIntptrAllBitsSet = intptr_t{-1};
constexpr uintptr_t kUintptrAllBitsSet =
    static_cast<uintptr_t>(kIntptrAllBitsSet);

66 67 68 69
// Smi constants for systems where tagged pointer is a 32-bit value.
template <>
struct SmiTagging<4> {
  enum { kSmiShiftSize = 0, kSmiValueSize = 31 };
70 71 72 73 74

  static constexpr intptr_t kSmiMinValue =
      static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
  static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);

75
  V8_INLINE static int SmiToInt(const internal::Address value) {
76
    int shift_bits = kSmiTagSize + kSmiShiftSize;
77 78
    // Truncate and shift down (requires >> to be sign extending).
    return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
79 80
  }
  V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
81 82 83 84 85 86 87
    // Is value in range [kSmiMinValue, kSmiMaxValue].
    // Use unsigned operations in order to avoid undefined behaviour in case of
    // signed integer overflow.
    return (static_cast<uintptr_t>(value) -
            static_cast<uintptr_t>(kSmiMinValue)) <=
           (static_cast<uintptr_t>(kSmiMaxValue) -
            static_cast<uintptr_t>(kSmiMinValue));
88 89 90 91 92 93 94
  }
};

// Smi constants for systems where tagged pointer is a 64-bit value.
template <>
struct SmiTagging<8> {
  enum { kSmiShiftSize = 31, kSmiValueSize = 32 };
95 96 97 98 99

  static constexpr intptr_t kSmiMinValue =
      static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
  static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);

100
  V8_INLINE static int SmiToInt(const internal::Address value) {
101 102
    int shift_bits = kSmiTagSize + kSmiShiftSize;
    // Shift down and throw away top 32 bits.
103
    return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
104 105 106 107 108 109 110
  }
  V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
    // To be representable as a long smi, the value must be a 32-bit integer.
    return (value == static_cast<int32_t>(value));
  }
};

111
#ifdef V8_COMPRESS_POINTERS
112
static_assert(
113
    kApiSystemPointerSize == kApiInt64Size,
114
    "Pointer compression can be enabled only for 64-bit architectures");
115 116 117
const int kApiTaggedSize = kApiInt32Size;
#else
const int kApiTaggedSize = kApiSystemPointerSize;
118 119
#endif

120 121 122 123
constexpr bool PointerCompressionIsEnabled() {
  return kApiTaggedSize != kApiSystemPointerSize;
}

124 125 126 127 128 129 130 131 132 133
constexpr bool HeapSandboxIsEnabled() {
#ifdef V8_HEAP_SANDBOX
  return true;
#else
  return false;
#endif
}

using ExternalPointer_t = Address;

134
// If the heap sandbox is enabled, these tag values will be ORed with the
135
// external pointers in the external pointer table to prevent use of pointers of
136 137 138 139 140 141 142 143
// the wrong type. When a pointer is loaded, it is ANDed with the inverse of the
// expected type's tag. The tags are constructed in a way that guarantees that a
// failed type check will result in one or more of the top bits of the pointer
// to be set, rendering the pointer inacessible. This construction allows
// performing the type check and removing GC marking bits from the pointer at
// the same time.
enum ExternalPointerTag : uint64_t {
  kExternalPointerNullTag = 0x0000000000000000,
144 145 146 147 148 149
  kExternalStringResourceTag = 0x00ff000000000000,       // 0b000000011111111
  kExternalStringResourceDataTag = 0x017f000000000000,   // 0b000000101111111
  kForeignForeignAddressTag = 0x01bf000000000000,        // 0b000000110111111
  kNativeContextMicrotaskQueueTag = 0x01df000000000000,  // 0b000000111011111
  kEmbedderDataSlotPayloadTag = 0x01ef000000000000,      // 0b000000111101111
  kCodeEntryPointTag = 0x01f7000000000000,               // 0b000000111110111
150 151
};

152 153
constexpr uint64_t kExternalPointerTagMask = 0xffff000000000000;

154
#ifdef V8_31BIT_SMIS_ON_64BIT_ARCH
155
using PlatformSmiTagging = SmiTagging<kApiInt32Size>;
156
#else
157
using PlatformSmiTagging = SmiTagging<kApiTaggedSize>;
158
#endif
159

160 161
// TODO(ishell): Consinder adding kSmiShiftBits = kSmiShiftSize + kSmiTagSize
// since it's used much more often than the inividual constants.
162 163
const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize;
const int kSmiValueSize = PlatformSmiTagging::kSmiValueSize;
164 165
const int kSmiMinValue = static_cast<int>(PlatformSmiTagging::kSmiMinValue);
const int kSmiMaxValue = static_cast<int>(PlatformSmiTagging::kSmiMaxValue);
166 167 168
constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; }
constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; }

169 170 171 172 173
V8_INLINE static constexpr internal::Address IntToSmi(int value) {
  return (static_cast<Address>(value) << (kSmiTagSize + kSmiShiftSize)) |
         kSmiTag;
}

174 175
// Converts encoded external pointer to address.
V8_EXPORT Address DecodeExternalPointerImpl(const Isolate* isolate,
176 177
                                            ExternalPointer_t pointer,
                                            ExternalPointerTag tag);
178

179 180 181 182 183 184 185 186 187
// {obj} must be the raw tagged pointer representation of a HeapObject
// that's guaranteed to never be in ReadOnlySpace.
V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj);

// Returns if we need to throw when an error occurs. This infers the language
// mode based on the current context and the closure. This returns true if the
// language mode is strict.
V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate);

188 189
V8_EXPORT bool CanHaveInternalField(int instance_type);

190 191 192 193 194 195
/**
 * This class exports constants and functionality from within v8 that
 * is necessary to implement inline functions in the v8 api.  Don't
 * depend on functions and constants defined here.
 */
class Internals {
196 197 198 199 200 201 202 203
#ifdef V8_MAP_PACKING
  V8_INLINE static constexpr internal::Address UnpackMapWord(
      internal::Address mapword) {
    // TODO(wenyuzhao): Clear header metadata.
    return mapword ^ kMapWordXorMask;
  }
#endif

204 205 206 207
 public:
  // These values match non-compiler-dependent values defined within
  // the implementation of v8.
  static const int kHeapObjectMapOffset = 0;
208 209 210
  static const int kMapInstanceTypeOffset = 1 * kApiTaggedSize + kApiInt32Size;
  static const int kStringResourceOffset =
      1 * kApiTaggedSize + 2 * kApiInt32Size;
211 212 213 214 215

  static const int kOddballKindOffset = 4 * kApiTaggedSize + kApiDoubleSize;
  static const int kJSObjectHeaderSize = 3 * kApiTaggedSize;
  static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
  static const int kEmbedderDataArrayHeaderSize = 2 * kApiTaggedSize;
216
  static const int kEmbedderDataSlotSize = kApiSystemPointerSize;
217 218 219
#ifdef V8_HEAP_SANDBOX
  static const int kEmbedderDataSlotRawPayloadOffset = kApiTaggedSize;
#endif
220
  static const int kNativeContextEmbedderDataOffset = 6 * kApiTaggedSize;
221 222 223 224 225
  static const int kFullStringRepresentationMask = 0x0f;
  static const int kStringEncodingMask = 0x8;
  static const int kExternalTwoByteRepresentationTag = 0x02;
  static const int kExternalOneByteRepresentationTag = 0x0a;

226 227
  static const uint32_t kNumIsolateDataSlots = 4;

228
  // IsolateData layout guarantees.
229
  static const int kIsolateEmbedderDataOffset = 0;
230
  static const int kIsolateFastCCallCallerFpOffset =
231
      kNumIsolateDataSlots * kApiSystemPointerSize;
232 233
  static const int kIsolateFastCCallCallerPcOffset =
      kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
234
  static const int kIsolateFastApiCallTargetOffset =
235
      kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize;
236
  static const int kIsolateCageBaseOffset =
237
      kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
238
  static const int kIsolateLongTaskStatsCounterOffset =
239
      kIsolateCageBaseOffset + kApiSystemPointerSize;
240 241
  static const int kIsolateStackGuardOffset =
      kIsolateLongTaskStatsCounterOffset + kApiSizetSize;
242 243
  static const int kIsolateRootsOffset =
      kIsolateStackGuardOffset + 7 * kApiSystemPointerSize;
244

245 246 247 248 249 250
  static const int kExternalPointerTableBufferOffset = 0;
  static const int kExternalPointerTableLengthOffset =
      kExternalPointerTableBufferOffset + kApiSystemPointerSize;
  static const int kExternalPointerTableCapacityOffset =
      kExternalPointerTableLengthOffset + kApiInt32Size;

251 252 253 254 255 256 257
  static const int kUndefinedValueRootIndex = 4;
  static const int kTheHoleValueRootIndex = 5;
  static const int kNullValueRootIndex = 6;
  static const int kTrueValueRootIndex = 7;
  static const int kFalseValueRootIndex = 8;
  static const int kEmptyStringRootIndex = 9;

258 259
  static const int kNodeClassIdOffset = 1 * kApiSystemPointerSize;
  static const int kNodeFlagsOffset = 1 * kApiSystemPointerSize + 3;
260 261 262 263
  static const int kNodeStateMask = 0x7;
  static const int kNodeStateIsWeakValue = 2;
  static const int kNodeStateIsPendingValue = 3;

264 265
  static const int kFirstNonstringType = 0x40;
  static const int kOddballType = 0x43;
266
  static const int kForeignType = 0x46;
267 268
  static const int kJSSpecialApiObjectType = 0x410;
  static const int kJSObjectType = 0x421;
269 270
  static const int kFirstJSApiObjectType = 0x422;
  static const int kLastJSApiObjectType = 0x80A;
271 272 273 274

  static const int kUndefinedOddballKind = 5;
  static const int kNullOddballKind = 3;

275 276 277 278 279 280
  // Constants used by PropertyCallbackInfo to check if we should throw when an
  // error occurs.
  static const int kThrowOnError = 0;
  static const int kDontThrow = 1;
  static const int kInferShouldThrowMode = 2;

281 282 283 284
  // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
  // incremental GC once the external memory reaches this limit.
  static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;

285 286 287 288 289 290 291 292 293 294 295
#ifdef V8_MAP_PACKING
  static const uintptr_t kMapWordMetadataMask = 0xffffULL << 48;
  // The lowest two bits of mapwords are always `0b10`
  static const uintptr_t kMapWordSignature = 0b10;
  // XORing a (non-compressed) map with this mask ensures that the two
  // low-order bits are 0b10. The 0 at the end makes this look like a Smi,
  // although real Smis have all lower 32 bits unset. We only rely on these
  // values passing as Smis in very few places.
  static const int kMapWordXorMask = 0b11;
#endif

296 297 298 299 300 301 302
  V8_EXPORT static void CheckInitializedImpl(v8::Isolate* isolate);
  V8_INLINE static void CheckInitialized(v8::Isolate* isolate) {
#ifdef V8_ENABLE_CHECKS
    CheckInitializedImpl(isolate);
#endif
  }

303 304
  V8_INLINE static bool HasHeapObjectTag(const internal::Address value) {
    return (value & kHeapObjectTagMask) == static_cast<Address>(kHeapObjectTag);
305 306
  }

307
  V8_INLINE static int SmiValue(const internal::Address value) {
308 309 310
    return PlatformSmiTagging::SmiToInt(value);
  }

311 312
  V8_INLINE static constexpr internal::Address IntToSmi(int value) {
    return internal::IntToSmi(value);
313 314 315 316 317 318
  }

  V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
    return PlatformSmiTagging::IsValidSmi(value);
  }

319 320
  V8_INLINE static int GetInstanceType(const internal::Address obj) {
    typedef internal::Address A;
321
    A map = ReadTaggedPointerField(obj, kHeapObjectMapOffset);
322 323 324
#ifdef V8_MAP_PACKING
    map = UnpackMapWord(map);
#endif
325
    return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
326 327
  }

328
  V8_INLINE static int GetOddballKind(const internal::Address obj) {
329
    return SmiValue(ReadTaggedSignedField(obj, kOddballKindOffset));
330 331 332 333 334 335 336
  }

  V8_INLINE static bool IsExternalTwoByteString(int instance_type) {
    int representation = (instance_type & kFullStringRepresentationMask);
    return representation == kExternalTwoByteRepresentationTag;
  }

337
  V8_INLINE static uint8_t GetNodeFlag(internal::Address* obj, int shift) {
338 339 340 341
    uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
    return *addr & static_cast<uint8_t>(1U << shift);
  }

342
  V8_INLINE static void UpdateNodeFlag(internal::Address* obj, bool value,
343 344 345 346 347 348
                                       int shift) {
    uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
    uint8_t mask = static_cast<uint8_t>(1U << shift);
    *addr = static_cast<uint8_t>((*addr & ~mask) | (value << shift));
  }

349
  V8_INLINE static uint8_t GetNodeState(internal::Address* obj) {
350 351 352 353
    uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
    return *addr & kNodeStateMask;
  }

354
  V8_INLINE static void UpdateNodeState(internal::Address* obj, uint8_t value) {
355 356 357 358 359 360
    uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
    *addr = static_cast<uint8_t>((*addr & ~kNodeStateMask) | value);
  }

  V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot,
                                        void* data) {
361 362
    internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
                             kIsolateEmbedderDataOffset +
363
                             slot * kApiSystemPointerSize;
364 365 366 367 368
    *reinterpret_cast<void**>(addr) = data;
  }

  V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate,
                                         uint32_t slot) {
369 370
    internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
                             kIsolateEmbedderDataOffset +
371
                             slot * kApiSystemPointerSize;
372 373 374
    return *reinterpret_cast<void* const*>(addr);
  }

375 376 377 378 379 380
  V8_INLINE static void IncrementLongTasksStatsCounter(v8::Isolate* isolate) {
    internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
                             kIsolateLongTaskStatsCounterOffset;
    ++(*reinterpret_cast<size_t*>(addr));
  }

381
  V8_INLINE static internal::Address* GetRoot(v8::Isolate* isolate, int index) {
382 383 384 385
    internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
                             kIsolateRootsOffset +
                             index * kApiSystemPointerSize;
    return reinterpret_cast<internal::Address*>(addr);
386 387 388
  }

  template <typename T>
389 390
  V8_INLINE static T ReadRawField(internal::Address heap_object_ptr,
                                  int offset) {
391
    internal::Address addr = heap_object_ptr + offset - kHeapObjectTag;
392 393 394 395 396 397 398 399 400 401 402
#ifdef V8_COMPRESS_POINTERS
    if (sizeof(T) > kApiTaggedSize) {
      // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
      // fields (external pointers, doubles and BigInt data) are only
      // kTaggedSize aligned so we have to use unaligned pointer friendly way of
      // accessing them in order to avoid undefined behavior in C++ code.
      T r;
      memcpy(&r, reinterpret_cast<void*>(addr), sizeof(T));
      return r;
    }
#endif
403 404 405
    return *reinterpret_cast<const T*>(addr);
  }

406 407 408
  V8_INLINE static internal::Address ReadTaggedPointerField(
      internal::Address heap_object_ptr, int offset) {
#ifdef V8_COMPRESS_POINTERS
409
    uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
410 411 412
    internal::Address base =
        GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
    return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
413 414 415 416 417 418 419 420
#else
    return ReadRawField<internal::Address>(heap_object_ptr, offset);
#endif
  }

  V8_INLINE static internal::Address ReadTaggedSignedField(
      internal::Address heap_object_ptr, int offset) {
#ifdef V8_COMPRESS_POINTERS
421 422
    uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
    return static_cast<internal::Address>(static_cast<uintptr_t>(value));
423 424 425 426 427
#else
    return ReadRawField<internal::Address>(heap_object_ptr, offset);
#endif
  }

428 429 430 431 432 433 434 435 436 437
  V8_INLINE static internal::Isolate* GetIsolateForHeapSandbox(
      internal::Address obj) {
#ifdef V8_HEAP_SANDBOX
    return internal::IsolateFromNeverReadOnlySpaceObject(obj);
#else
    // Not used in non-sandbox mode.
    return nullptr;
#endif
  }

438
  V8_INLINE static Address DecodeExternalPointer(
439 440
      const Isolate* isolate, ExternalPointer_t encoded_pointer,
      ExternalPointerTag tag) {
441
#ifdef V8_HEAP_SANDBOX
442
    return internal::DecodeExternalPointerImpl(isolate, encoded_pointer, tag);
443 444 445 446 447
#else
    return encoded_pointer;
#endif
  }

448
  V8_INLINE static internal::Address ReadExternalPointerField(
449 450
      internal::Isolate* isolate, internal::Address heap_object_ptr, int offset,
      ExternalPointerTag tag) {
451
#ifdef V8_HEAP_SANDBOX
452 453
    internal::ExternalPointer_t encoded_value =
        ReadRawField<uint32_t>(heap_object_ptr, offset);
454
    // We currently have to treat zero as nullptr in embedder slots.
455 456
    return encoded_value ? DecodeExternalPointer(isolate, encoded_value, tag)
                         : 0;
457 458
#else
    return ReadRawField<Address>(heap_object_ptr, offset);
459 460 461
#endif
  }

462
#ifdef V8_COMPRESS_POINTERS
463
  // See v8:7703 or src/ptr-compr.* for details about pointer compression.
464 465
  static constexpr size_t kPtrComprCageReservationSize = size_t{1} << 32;
  static constexpr size_t kPtrComprCageBaseAlignment = size_t{1} << 32;
466

467
  V8_INLINE static internal::Address GetPtrComprCageBaseFromOnHeapAddress(
468
      internal::Address addr) {
469
    return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
470 471
  }

472
  V8_INLINE static internal::Address DecompressTaggedAnyField(
473
      internal::Address heap_object_ptr, uint32_t value) {
474 475 476
    internal::Address base =
        GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
    return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
477
  }
478

479
#endif  // V8_COMPRESS_POINTERS
480 481
};

482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526
constexpr bool VirtualMemoryCageIsEnabled() {
#ifdef V8_VIRTUAL_MEMORY_CAGE
  return true;
#else
  return false;
#endif
}

#ifdef V8_VIRTUAL_MEMORY_CAGE
// Size of the pointer compression cage located at the start of the virtual
// memory cage.
constexpr size_t kVirtualMemoryCagePointerCageSize =
    Internals::kPtrComprCageReservationSize;

// Size of the virtual memory cage, excluding the guard regions surrounding it.
constexpr size_t kVirtualMemoryCageSize = size_t{1} << 40;  // 1 TB

static_assert(kVirtualMemoryCageSize > kVirtualMemoryCagePointerCageSize,
              "The virtual memory cage must be larger than the pointer "
              "compression cage contained within it.");

// Required alignment of the virtual memory cage. For simplicity, we require the
// size of the guard regions to be a multiple of this, so that this specifies
// the alignment of the cage including and excluding surrounding guard regions.
// The alignment requirement is due to the pointer compression cage being
// located at the start of the virtual memory cage.
constexpr size_t kVirtualMemoryCageAlignment =
    Internals::kPtrComprCageBaseAlignment;

// Size of the guard regions surrounding the virtual memory cage. This assumes a
// worst-case scenario of a 32-bit unsigned index being used to access an array
// of 64-bit values.
constexpr size_t kVirtualMemoryCageGuardRegionSize = size_t{32} << 30;  // 32 GB

static_assert((kVirtualMemoryCageGuardRegionSize %
               kVirtualMemoryCageAlignment) == 0,
              "The size of the virtual memory cage guard region must be a "
              "multiple of its required alignment.");

// Minimum possible size of the virtual memory cage, excluding the guard regions
// surrounding it. Used by unit tests.
constexpr size_t kVirtualMemoryCageMinimumSize =
    2 * kVirtualMemoryCagePointerCageSize;

// For now, even if the virtual memory cage is enabled, we still allow backing
527 528 529 530 531 532
// stores to be allocated outside of it as fallback. This will simplify the
// initial rollout. However, if the heap sandbox is also enabled, we already use
// the "enforcing mode" of the virtual memory cage. This is useful for testing.
#ifdef V8_HEAP_SANDBOX
constexpr bool kAllowBackingStoresOutsideDataCage = false;
#else
533
constexpr bool kAllowBackingStoresOutsideDataCage = true;
534 535
#endif  // V8_HEAP_SANDBOX

536 537
#endif  // V8_VIRTUAL_MEMORY_CAGE

538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557
// Only perform cast check for types derived from v8::Data since
// other types do not implement the Cast method.
template <bool PerformCheck>
struct CastCheck {
  template <class T>
  static void Perform(T* data);
};

template <>
template <class T>
void CastCheck<true>::Perform(T* data) {
  T::Cast(data);
}

template <>
template <class T>
void CastCheck<false>::Perform(T* data) {}

template <class T>
V8_INLINE void PerformCastCheck(T* data) {
558
  CastCheck<std::is_base_of<Data, T>::value &&
559
            !std::is_same<Data, std::remove_cv_t<T>>::value>::Perform(data);
560 561
}

562 563 564 565
// A base class for backing stores, which is needed due to vagaries of
// how static casts work with std::shared_ptr.
class BackingStoreBase {};

566
}  // namespace internal
567 568 569 570 571 572 573 574 575

V8_EXPORT bool CopyAndConvertArrayToCppBufferInt32(Local<Array> src,
                                                   int32_t* dst,
                                                   uint32_t max_length);

V8_EXPORT bool CopyAndConvertArrayToCppBufferFloat64(Local<Array> src,
                                                     double* dst,
                                                     uint32_t max_length);

576 577 578
}  // namespace v8

#endif  // INCLUDE_V8_INTERNAL_H_