Commit 42ed4928 authored by Samuel Groß's avatar Samuel Groß Committed by V8 LUCI CQ

Use CagedPointers for ArrayBuffer backing stores

This CL turns references to ArrayBuffer backing stores from
JSArrayBuffers, JSTypedArrays, and JSDataViews into CagedPointers
when those are enabled.

CagedPointers cannot generally represent nullptr, as NULL usually lies
outside the cage. As such, nullptr backing stores are replaced with a
special empty backing store value, which, in the current implementation,
points to the end of the cage, right in front of the trailing guard
regions. Due to this, it is no longer correct to compare a backing store
pointer against nullptr.

Bug: chromium:1218005
Change-Id: I4a6c7a82aabb4debcb6bb2babe4035ba2da8e79f
Cq-Include-Trybots: luci.v8.try:v8_linux64_heap_sandbox_dbg_ng,v8_linux_arm64_sim_heap_sandbox_dbg_ng
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3244419
Commit-Queue: Samuel Groß <saelo@chromium.org>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78218}
parent 4f34cd83
...@@ -494,6 +494,11 @@ constexpr bool VirtualMemoryCageIsEnabled() { ...@@ -494,6 +494,11 @@ constexpr bool VirtualMemoryCageIsEnabled() {
#endif #endif
} }
// CagedPointers are guaranteed to point into the virtual memory cage. This is
// achieved for example by storing them as offset from the cage base rather
// than as raw pointers.
using CagedPointer_t = Address;
#ifdef V8_VIRTUAL_MEMORY_CAGE_IS_AVAILABLE #ifdef V8_VIRTUAL_MEMORY_CAGE_IS_AVAILABLE
#define GB (1ULL << 30) #define GB (1ULL << 30)
...@@ -511,17 +516,11 @@ constexpr size_t kVirtualMemoryCageSize = 1ULL << kVirtualMemoryCageSizeLog2; ...@@ -511,17 +516,11 @@ constexpr size_t kVirtualMemoryCageSize = 1ULL << kVirtualMemoryCageSizeLog2;
constexpr size_t kVirtualMemoryCageAlignment = constexpr size_t kVirtualMemoryCageAlignment =
Internals::kPtrComprCageBaseAlignment; Internals::kPtrComprCageBaseAlignment;
#ifdef V8_CAGED_POINTERS // Caged pointers are stored inside the heap as offset from the cage base
// CagedPointers are guaranteed to point into the virtual memory cage. This is // shifted to the left. This way, it is guaranteed that the offset is smaller
// achieved by storing them as offset from the cage base rather than as raw // than the cage size after shifting it to the right again. This constant
// pointers. // specifies the shift amount.
using CagedPointer_t = Address;
// For efficiency, the offset is stored shifted to the left, so that
// it is guaranteed that the offset is smaller than the cage size after
// shifting it to the right again. This constant specifies the shift amount.
constexpr uint64_t kCagedPointerShift = 64 - kVirtualMemoryCageSizeLog2; constexpr uint64_t kCagedPointerShift = 64 - kVirtualMemoryCageSizeLog2;
#endif
// Size of the guard regions surrounding the virtual memory cage. This assumes a // Size of the guard regions surrounding the virtual memory cage. This assumes a
// worst-case scenario of a 32-bit unsigned index being used to access an array // worst-case scenario of a 32-bit unsigned index being used to access an array
......
...@@ -65,8 +65,8 @@ TNode<JSArrayBuffer> TypedArrayBuiltinsAssembler::AllocateEmptyOnHeapBuffer( ...@@ -65,8 +65,8 @@ TNode<JSArrayBuffer> TypedArrayBuiltinsAssembler::AllocateEmptyOnHeapBuffer(
StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kByteLengthOffset, StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kByteLengthOffset,
UintPtrConstant(0)); UintPtrConstant(0));
StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kBackingStoreOffset, StoreCagedPointerToObject(buffer, JSArrayBuffer::kBackingStoreOffset,
PointerConstant(nullptr)); EmptyBackingStoreBufferConstant());
StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kExtensionOffset, StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kExtensionOffset,
IntPtrConstant(0)); IntPtrConstant(0));
for (int offset = JSArrayBuffer::kHeaderSize; for (int offset = JSArrayBuffer::kHeaderSize;
...@@ -437,10 +437,10 @@ void TypedArrayBuiltinsAssembler::SetJSTypedArrayOnHeapDataPtr( ...@@ -437,10 +437,10 @@ void TypedArrayBuiltinsAssembler::SetJSTypedArrayOnHeapDataPtr(
TNode<IntPtrT> ptr_compr_cage_base = TNode<IntPtrT> ptr_compr_cage_base =
IntPtrSub(full_base, Signed(ChangeUint32ToWord(compressed_base))); IntPtrSub(full_base, Signed(ChangeUint32ToWord(compressed_base)));
// Add JSTypedArray::ExternalPointerCompensationForOnHeapArray() to offset. // Add JSTypedArray::ExternalPointerCompensationForOnHeapArray() to offset.
// See JSTypedArray::AddExternalPointerCompensationForDeserialization().
DCHECK_EQ( DCHECK_EQ(
isolate()->cage_base(), isolate()->cage_base(),
JSTypedArray::ExternalPointerCompensationForOnHeapArray(isolate())); JSTypedArray::ExternalPointerCompensationForOnHeapArray(isolate()));
// See JSTypedArray::SetOnHeapDataPtr() for details.
offset = Unsigned(IntPtrAdd(offset, ptr_compr_cage_base)); offset = Unsigned(IntPtrAdd(offset, ptr_compr_cage_base));
} }
......
...@@ -1539,16 +1539,21 @@ void CodeStubAssembler::BranchIfToBooleanIsTrue(TNode<Object> value, ...@@ -1539,16 +1539,21 @@ void CodeStubAssembler::BranchIfToBooleanIsTrue(TNode<Object> value,
} }
} }
#ifdef V8_CAGED_POINTERS TNode<RawPtrT> CodeStubAssembler::LoadCagedPointerFromObject(
TNode<CagedPtrT> CodeStubAssembler::LoadCagedPointerFromObject(
TNode<HeapObject> object, TNode<IntPtrT> field_offset) { TNode<HeapObject> object, TNode<IntPtrT> field_offset) {
return LoadObjectField<CagedPtrT>(object, field_offset); #ifdef V8_CAGED_POINTERS
return ReinterpretCast<RawPtrT>(
LoadObjectField<CagedPtrT>(object, field_offset));
#else
return LoadObjectField<RawPtrT>(object, field_offset);
#endif // V8_CAGED_POINTERS
} }
void CodeStubAssembler::StoreCagedPointerToObject(TNode<HeapObject> object, void CodeStubAssembler::StoreCagedPointerToObject(TNode<HeapObject> object,
TNode<IntPtrT> offset, TNode<IntPtrT> offset,
TNode<CagedPtrT> pointer) { TNode<RawPtrT> pointer) {
#ifdef V8_CAGED_POINTERS
TNode<CagedPtrT> caged_pointer = ReinterpretCast<CagedPtrT>(pointer);
#ifdef DEBUG #ifdef DEBUG
// Verify pointer points into the cage. // Verify pointer points into the cage.
TNode<ExternalReference> cage_base_address = TNode<ExternalReference> cage_base_address =
...@@ -1557,13 +1562,26 @@ void CodeStubAssembler::StoreCagedPointerToObject(TNode<HeapObject> object, ...@@ -1557,13 +1562,26 @@ void CodeStubAssembler::StoreCagedPointerToObject(TNode<HeapObject> object,
ExternalConstant(ExternalReference::virtual_memory_cage_end_address()); ExternalConstant(ExternalReference::virtual_memory_cage_end_address());
TNode<UintPtrT> cage_base = Load<UintPtrT>(cage_base_address); TNode<UintPtrT> cage_base = Load<UintPtrT>(cage_base_address);
TNode<UintPtrT> cage_end = Load<UintPtrT>(cage_end_address); TNode<UintPtrT> cage_end = Load<UintPtrT>(cage_end_address);
CSA_CHECK(this, UintPtrGreaterThanOrEqual(pointer, cage_base)); CSA_DCHECK(this, UintPtrGreaterThanOrEqual(caged_pointer, cage_base));
CSA_CHECK(this, UintPtrLessThan(pointer, cage_end)); CSA_DCHECK(this, UintPtrLessThan(caged_pointer, cage_end));
#endif #endif // DEBUG
StoreObjectFieldNoWriteBarrier<CagedPtrT>(object, offset, pointer); StoreObjectFieldNoWriteBarrier<CagedPtrT>(object, offset, caged_pointer);
#else
StoreObjectFieldNoWriteBarrier<RawPtrT>(object, offset, pointer);
#endif // V8_CAGED_POINTERS
} }
TNode<RawPtrT> CodeStubAssembler::EmptyBackingStoreBufferConstant() {
#ifdef V8_CAGED_POINTERS
// TODO(chromium:1218005) consider creating a LoadCagedPointerConstant() if
// more of these constants are required later on.
TNode<ExternalReference> empty_backing_store_buffer =
ExternalConstant(ExternalReference::empty_backing_store_buffer());
return Load<RawPtrT>(empty_backing_store_buffer);
#else
return ReinterpretCast<RawPtrT>(IntPtrConstant(0));
#endif // V8_CAGED_POINTERS #endif // V8_CAGED_POINTERS
}
TNode<ExternalPointerT> CodeStubAssembler::ChangeUint32ToExternalPointer( TNode<ExternalPointerT> CodeStubAssembler::ChangeUint32ToExternalPointer(
TNode<Uint32T> value) { TNode<Uint32T> value) {
...@@ -13860,8 +13878,8 @@ void CodeStubAssembler::ThrowIfArrayBufferViewBufferIsDetached( ...@@ -13860,8 +13878,8 @@ void CodeStubAssembler::ThrowIfArrayBufferViewBufferIsDetached(
TNode<RawPtrT> CodeStubAssembler::LoadJSArrayBufferBackingStorePtr( TNode<RawPtrT> CodeStubAssembler::LoadJSArrayBufferBackingStorePtr(
TNode<JSArrayBuffer> array_buffer) { TNode<JSArrayBuffer> array_buffer) {
return LoadObjectField<RawPtrT>(array_buffer, return LoadCagedPointerFromObject(array_buffer,
JSArrayBuffer::kBackingStoreOffset); JSArrayBuffer::kBackingStoreOffset);
} }
TNode<JSArrayBuffer> CodeStubAssembler::LoadJSArrayBufferViewBuffer( TNode<JSArrayBuffer> CodeStubAssembler::LoadJSArrayBufferViewBuffer(
......
...@@ -1043,32 +1043,29 @@ class V8_EXPORT_PRIVATE CodeStubAssembler ...@@ -1043,32 +1043,29 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
// Works only with V8_ENABLE_FORCE_SLOW_PATH compile time flag. Nop otherwise. // Works only with V8_ENABLE_FORCE_SLOW_PATH compile time flag. Nop otherwise.
void GotoIfForceSlowPath(Label* if_true); void GotoIfForceSlowPath(Label* if_true);
#ifdef V8_CAGED_POINTERS
// //
// Caged pointer related functionality. // Caged pointer related functionality.
// //
// Load a caged pointer value from an object. // Load a caged pointer value from an object.
TNode<CagedPtrT> LoadCagedPointerFromObject(TNode<HeapObject> object, TNode<RawPtrT> LoadCagedPointerFromObject(TNode<HeapObject> object,
int offset) { int offset) {
return LoadCagedPointerFromObject(object, IntPtrConstant(offset)); return LoadCagedPointerFromObject(object, IntPtrConstant(offset));
} }
TNode<CagedPtrT> LoadCagedPointerFromObject(TNode<HeapObject> object, TNode<RawPtrT> LoadCagedPointerFromObject(TNode<HeapObject> object,
TNode<IntPtrT> offset); TNode<IntPtrT> offset);
// Stored a caged pointer value to an object. // Stored a caged pointer value to an object.
void StoreCagedPointerToObject(TNode<HeapObject> object, int offset, void StoreCagedPointerToObject(TNode<HeapObject> object, int offset,
TNode<CagedPtrT> pointer) { TNode<RawPtrT> pointer) {
StoreCagedPointerToObject(object, IntPtrConstant(offset), pointer); StoreCagedPointerToObject(object, IntPtrConstant(offset), pointer);
} }
void StoreCagedPointerToObject(TNode<HeapObject> object, void StoreCagedPointerToObject(TNode<HeapObject> object,
TNode<IntPtrT> offset, TNode<IntPtrT> offset, TNode<RawPtrT> pointer);
TNode<CagedPtrT> pointer);
#endif // V8_CAGED_POINTERS TNode<RawPtrT> EmptyBackingStoreBufferConstant();
// //
// ExternalPointerT-related functionality. // ExternalPointerT-related functionality.
...@@ -1148,14 +1145,14 @@ class V8_EXPORT_PRIVATE CodeStubAssembler ...@@ -1148,14 +1145,14 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<RawPtrT> LoadJSTypedArrayExternalPointerPtr( TNode<RawPtrT> LoadJSTypedArrayExternalPointerPtr(
TNode<JSTypedArray> holder) { TNode<JSTypedArray> holder) {
return LoadObjectField<RawPtrT>(holder, return LoadCagedPointerFromObject(holder,
JSTypedArray::kExternalPointerOffset); JSTypedArray::kExternalPointerOffset);
} }
void StoreJSTypedArrayExternalPointerPtr(TNode<JSTypedArray> holder, void StoreJSTypedArrayExternalPointerPtr(TNode<JSTypedArray> holder,
TNode<RawPtrT> value) { TNode<RawPtrT> value) {
StoreObjectFieldNoWriteBarrier<RawPtrT>( StoreCagedPointerToObject(holder, JSTypedArray::kExternalPointerOffset,
holder, JSTypedArray::kExternalPointerOffset, value); value);
} }
// Load value from current parent frame by given offset in bytes. // Load value from current parent frame by given offset in bytes.
......
...@@ -226,7 +226,7 @@ ExternalReference ExternalReference::handle_scope_implementer_address( ...@@ -226,7 +226,7 @@ ExternalReference ExternalReference::handle_scope_implementer_address(
return ExternalReference(isolate->handle_scope_implementer_address()); return ExternalReference(isolate->handle_scope_implementer_address());
} }
#ifdef V8_VIRTUAL_MEMORY_CAGE #ifdef V8_CAGED_POINTERS
ExternalReference ExternalReference::virtual_memory_cage_base_address() { ExternalReference ExternalReference::virtual_memory_cage_base_address() {
return ExternalReference(GetProcessWideVirtualMemoryCage()->base_address()); return ExternalReference(GetProcessWideVirtualMemoryCage()->base_address());
} }
...@@ -234,7 +234,13 @@ ExternalReference ExternalReference::virtual_memory_cage_base_address() { ...@@ -234,7 +234,13 @@ ExternalReference ExternalReference::virtual_memory_cage_base_address() {
ExternalReference ExternalReference::virtual_memory_cage_end_address() { ExternalReference ExternalReference::virtual_memory_cage_end_address() {
return ExternalReference(GetProcessWideVirtualMemoryCage()->end_address()); return ExternalReference(GetProcessWideVirtualMemoryCage()->end_address());
} }
#endif
ExternalReference ExternalReference::empty_backing_store_buffer() {
return ExternalReference(GetProcessWideVirtualMemoryCage()
->constants()
.empty_backing_store_buffer_address());
}
#endif // V8_CAGED_POINTERS
#ifdef V8_HEAP_SANDBOX #ifdef V8_HEAP_SANDBOX
ExternalReference ExternalReference::external_pointer_table_address( ExternalReference ExternalReference::external_pointer_table_address(
......
...@@ -318,13 +318,14 @@ class StatsCounter; ...@@ -318,13 +318,14 @@ class StatsCounter;
#define EXTERNAL_REFERENCE_LIST_INTL(V) #define EXTERNAL_REFERENCE_LIST_INTL(V)
#endif // V8_INTL_SUPPORT #endif // V8_INTL_SUPPORT
#ifdef V8_VIRTUAL_MEMORY_CAGE #ifdef V8_CAGED_POINTERS
#define EXTERNAL_REFERENCE_LIST_VIRTUAL_MEMORY_CAGE(V) \ #define EXTERNAL_REFERENCE_LIST_VIRTUAL_MEMORY_CAGE(V) \
V(virtual_memory_cage_base_address, "V8VirtualMemoryCage::base()") \ V(virtual_memory_cage_base_address, "V8VirtualMemoryCage::base()") \
V(virtual_memory_cage_end_address, "V8VirtualMemoryCage::end()") V(virtual_memory_cage_end_address, "V8VirtualMemoryCage::end()") \
V(empty_backing_store_buffer, "EmptyBackingStoreBuffer()")
#else #else
#define EXTERNAL_REFERENCE_LIST_VIRTUAL_MEMORY_CAGE(V) #define EXTERNAL_REFERENCE_LIST_VIRTUAL_MEMORY_CAGE(V)
#endif // V8_VIRTUAL_MEMORY_CAGE #endif // V8_CAGED_POINTERS
#ifdef V8_HEAP_SANDBOX #ifdef V8_HEAP_SANDBOX
#define EXTERNAL_REFERENCE_LIST_HEAP_SANDBOX(V) \ #define EXTERNAL_REFERENCE_LIST_HEAP_SANDBOX(V) \
......
...@@ -40,7 +40,9 @@ enum class MachineRepresentation : uint8_t { ...@@ -40,7 +40,9 @@ enum class MachineRepresentation : uint8_t {
kTagged, // (uncompressed) Object (Smi or HeapObject) kTagged, // (uncompressed) Object (Smi or HeapObject)
kCompressedPointer, // (compressed) HeapObject kCompressedPointer, // (compressed) HeapObject
kCompressed, // (compressed) Object (Smi or HeapObject) kCompressed, // (compressed) Object (Smi or HeapObject)
kCagedPointer, // Guaranteed to point into the virtual memory cage. // A 64-bit pointer encoded in a way (e.g. as offset) that guarantees it will
// point into the virtual memory cage.
kCagedPointer,
// FP and SIMD representations must be last, and in order of increasing size. // FP and SIMD representations must be last, and in order of increasing size.
kFloat32, kFloat32,
kFloat64, kFloat64,
......
...@@ -421,8 +421,13 @@ FieldAccess AccessBuilder::ForJSTypedArrayExternalPointer() { ...@@ -421,8 +421,13 @@ FieldAccess AccessBuilder::ForJSTypedArrayExternalPointer() {
JSTypedArray::kExternalPointerOffset, JSTypedArray::kExternalPointerOffset,
MaybeHandle<Name>(), MaybeHandle<Name>(),
MaybeHandle<Map>(), MaybeHandle<Map>(),
#ifdef V8_CAGED_POINTERS
Type::CagedPointer(),
MachineType::CagedPointer(),
#else
Type::ExternalPointer(), Type::ExternalPointer(),
MachineType::Pointer(), MachineType::Pointer(),
#endif
kNoWriteBarrier, kNoWriteBarrier,
ConstFieldInfo::None(), ConstFieldInfo::None(),
false, false,
...@@ -437,8 +442,13 @@ FieldAccess AccessBuilder::ForJSDataViewDataPointer() { ...@@ -437,8 +442,13 @@ FieldAccess AccessBuilder::ForJSDataViewDataPointer() {
JSDataView::kDataPointerOffset, JSDataView::kDataPointerOffset,
MaybeHandle<Name>(), MaybeHandle<Name>(),
MaybeHandle<Map>(), MaybeHandle<Map>(),
#ifdef V8_CAGED_POINTERS
Type::CagedPointer(),
MachineType::CagedPointer(),
#else
Type::ExternalPointer(), Type::ExternalPointer(),
MachineType::Pointer(), MachineType::Pointer(),
#endif
kNoWriteBarrier, kNoWriteBarrier,
ConstFieldInfo::None(), ConstFieldInfo::None(),
false, false,
......
...@@ -840,13 +840,9 @@ void InstructionSelector::VisitLoad(Node* node) { ...@@ -840,13 +840,9 @@ void InstructionSelector::VisitLoad(Node* node) {
immediate_mode = kLoadStoreImm64; immediate_mode = kLoadStoreImm64;
break; break;
case MachineRepresentation::kCagedPointer: case MachineRepresentation::kCagedPointer:
#ifdef V8_CAGED_POINTERS
opcode = kArm64LdrDecodeCagedPointer; opcode = kArm64LdrDecodeCagedPointer;
immediate_mode = kLoadStoreImm64; immediate_mode = kLoadStoreImm64;
break; break;
#else
UNREACHABLE();
#endif
case MachineRepresentation::kSimd128: case MachineRepresentation::kSimd128:
opcode = kArm64LdrQ; opcode = kArm64LdrQ;
immediate_mode = kNoImmediate; immediate_mode = kNoImmediate;
...@@ -948,13 +944,9 @@ void InstructionSelector::VisitStore(Node* node) { ...@@ -948,13 +944,9 @@ void InstructionSelector::VisitStore(Node* node) {
COMPRESS_POINTERS_BOOL ? kLoadStoreImm32 : kLoadStoreImm64; COMPRESS_POINTERS_BOOL ? kLoadStoreImm32 : kLoadStoreImm64;
break; break;
case MachineRepresentation::kCagedPointer: case MachineRepresentation::kCagedPointer:
#ifdef V8_CAGED_POINTERS
opcode = kArm64StrEncodeCagedPointer; opcode = kArm64StrEncodeCagedPointer;
immediate_mode = kLoadStoreImm64; immediate_mode = kLoadStoreImm64;
break; break;
#else
UNREACHABLE();
#endif
case MachineRepresentation::kWord64: case MachineRepresentation::kWord64:
opcode = kArm64Str; opcode = kArm64Str;
immediate_mode = kLoadStoreImm64; immediate_mode = kLoadStoreImm64;
......
...@@ -298,12 +298,8 @@ ArchOpcode GetLoadOpcode(LoadRepresentation load_rep) { ...@@ -298,12 +298,8 @@ ArchOpcode GetLoadOpcode(LoadRepresentation load_rep) {
opcode = kX64Movq; opcode = kX64Movq;
break; break;
case MachineRepresentation::kCagedPointer: case MachineRepresentation::kCagedPointer:
#ifdef V8_CAGED_POINTERS
opcode = kX64MovqDecodeCagedPointer; opcode = kX64MovqDecodeCagedPointer;
break; break;
#else
UNREACHABLE();
#endif
case MachineRepresentation::kSimd128: case MachineRepresentation::kSimd128:
opcode = kX64Movdqu; opcode = kX64Movdqu;
break; break;
...@@ -341,11 +337,7 @@ ArchOpcode GetStoreOpcode(StoreRepresentation store_rep) { ...@@ -341,11 +337,7 @@ ArchOpcode GetStoreOpcode(StoreRepresentation store_rep) {
case MachineRepresentation::kWord64: case MachineRepresentation::kWord64:
return kX64Movq; return kX64Movq;
case MachineRepresentation::kCagedPointer: case MachineRepresentation::kCagedPointer:
#ifdef V8_CAGED_POINTERS
return kX64MovqEncodeCagedPointer; return kX64MovqEncodeCagedPointer;
#else
UNREACHABLE();
#endif
case MachineRepresentation::kSimd128: case MachineRepresentation::kSimd128:
return kX64Movdqu; return kX64Movdqu;
case MachineRepresentation::kNone: // Fall through. case MachineRepresentation::kNone: // Fall through.
......
...@@ -181,12 +181,6 @@ struct MaybeBoolFlag { ...@@ -181,12 +181,6 @@ struct MaybeBoolFlag {
#define V8_VIRTUAL_MEMORY_CAGE_BOOL false #define V8_VIRTUAL_MEMORY_CAGE_BOOL false
#endif #endif
#ifdef V8_CAGED_POINTERS
#define V8_CAGED_POINTERS_BOOL true
#else
#define V8_CAGED_POINTERS_BOOL false
#endif
// D8's MultiMappedAllocator is only available on Linux, and only if the virtual // D8's MultiMappedAllocator is only available on Linux, and only if the virtual
// memory cage is not enabled. // memory cage is not enabled.
#if V8_OS_LINUX && !V8_VIRTUAL_MEMORY_CAGE_BOOL #if V8_OS_LINUX && !V8_VIRTUAL_MEMORY_CAGE_BOOL
......
...@@ -194,6 +194,8 @@ BackingStore::BackingStore(void* buffer_start, size_t byte_length, ...@@ -194,6 +194,8 @@ BackingStore::BackingStore(void* buffer_start, size_t byte_length,
DCHECK_IMPLIES(is_resizable_, free_on_destruct_); DCHECK_IMPLIES(is_resizable_, free_on_destruct_);
DCHECK_IMPLIES(!is_wasm_memory && !is_resizable_, DCHECK_IMPLIES(!is_wasm_memory && !is_resizable_,
byte_length_ == max_byte_length_); byte_length_ == max_byte_length_);
DCHECK_GE(max_byte_length_, byte_length_);
DCHECK_GE(byte_capacity_, max_byte_length_);
} }
BackingStore::~BackingStore() { BackingStore::~BackingStore() {
...@@ -323,10 +325,9 @@ std::unique_ptr<BackingStore> BackingStore::Allocate( ...@@ -323,10 +325,9 @@ std::unique_ptr<BackingStore> BackingStore::Allocate(
counters->array_buffer_new_size_failures()->AddSample(mb_length); counters->array_buffer_new_size_failures()->AddSample(mb_length);
return {}; return {};
} }
DCHECK(IsValidBackingStorePointer(buffer_start));
} }
DCHECK(IsValidBackingStorePointer(buffer_start));
auto result = new BackingStore(buffer_start, // start auto result = new BackingStore(buffer_start, // start
byte_length, // length byte_length, // length
byte_length, // max length byte_length, // max length
......
...@@ -99,8 +99,8 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase { ...@@ -99,8 +99,8 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase {
bool free_on_destruct() const { return free_on_destruct_; } bool free_on_destruct() const { return free_on_destruct_; }
bool IsEmpty() const { bool IsEmpty() const {
DCHECK_GE(max_byte_length_, byte_length_); DCHECK_GE(byte_capacity_, byte_length_);
return max_byte_length_ == 0; return byte_capacity_ == 0;
} }
enum ResizeOrGrowResult { kSuccess, kFailure, kRace }; enum ResizeOrGrowResult { kSuccess, kFailure, kRace };
......
...@@ -36,12 +36,14 @@ void JSArrayBuffer::set_byte_length(size_t value) { ...@@ -36,12 +36,14 @@ void JSArrayBuffer::set_byte_length(size_t value) {
} }
DEF_GETTER(JSArrayBuffer, backing_store, void*) { DEF_GETTER(JSArrayBuffer, backing_store, void*) {
return reinterpret_cast<void*>(ReadField<Address>(kBackingStoreOffset)); Address value = ReadCagedPointerField(kBackingStoreOffset, cage_base);
return reinterpret_cast<void*>(value);
} }
void JSArrayBuffer::set_backing_store(void* value) { void JSArrayBuffer::set_backing_store(Isolate* isolate, void* value) {
DCHECK(IsValidBackingStorePointer(value)); DCHECK(IsValidBackingStorePointer(value));
WriteField<Address>(kBackingStoreOffset, reinterpret_cast<Address>(value)); Address addr = reinterpret_cast<Address>(value);
WriteCagedPointerField(kBackingStoreOffset, isolate, addr);
} }
std::shared_ptr<BackingStore> JSArrayBuffer::GetBackingStore() const { std::shared_ptr<BackingStore> JSArrayBuffer::GetBackingStore() const {
...@@ -249,16 +251,12 @@ void JSTypedArray::set_length(size_t value) { ...@@ -249,16 +251,12 @@ void JSTypedArray::set_length(size_t value) {
} }
DEF_GETTER(JSTypedArray, external_pointer, Address) { DEF_GETTER(JSTypedArray, external_pointer, Address) {
return ReadField<Address>(kExternalPointerOffset); return ReadCagedPointerField(kExternalPointerOffset, cage_base);
}
DEF_GETTER(JSTypedArray, external_pointer_raw, Address) {
return ReadField<Address>(kExternalPointerOffset);
} }
void JSTypedArray::set_external_pointer(Isolate* isolate, Address value) { void JSTypedArray::set_external_pointer(Isolate* isolate, Address value) {
DCHECK(IsValidBackingStorePointer(reinterpret_cast<void*>(value))); DCHECK(IsValidBackingStorePointer(reinterpret_cast<void*>(value)));
WriteField<Address>(kExternalPointerOffset, value); WriteCagedPointerField(kExternalPointerOffset, isolate, value);
} }
Address JSTypedArray::ExternalPointerCompensationForOnHeapArray( Address JSTypedArray::ExternalPointerCompensationForOnHeapArray(
...@@ -283,19 +281,17 @@ void JSTypedArray::SetExternalBackingStoreRefForSerialization(uint32_t ref) { ...@@ -283,19 +281,17 @@ void JSTypedArray::SetExternalBackingStoreRefForSerialization(uint32_t ref) {
void JSTypedArray::RemoveExternalPointerCompensationForSerialization( void JSTypedArray::RemoveExternalPointerCompensationForSerialization(
Isolate* isolate) { Isolate* isolate) {
DCHECK(is_on_heap()); DCHECK(is_on_heap());
// TODO(v8:10391): once we have an external table, avoid the need for
// compensation by replacing external_pointer and base_pointer fields
// with one data_pointer field which can point to either external data
// backing store or into on-heap backing store.
Address offset = Address offset =
external_pointer() - ExternalPointerCompensationForOnHeapArray(isolate); external_pointer() - ExternalPointerCompensationForOnHeapArray(isolate);
#ifdef V8_HEAP_SANDBOX WriteField<Address>(kExternalPointerOffset, offset);
// Write decompensated offset directly to the external pointer field, thus }
// allowing the offset to be propagated through serialization-deserialization.
WriteField<ExternalPointer_t>(kExternalPointerOffset, offset); void JSTypedArray::AddExternalPointerCompensationForDeserialization(
#else Isolate* isolate) {
set_external_pointer(isolate, offset); DCHECK(is_on_heap());
#endif Address pointer = ReadField<Address>(kExternalPointerOffset) +
ExternalPointerCompensationForOnHeapArray(isolate);
set_external_pointer(isolate, pointer);
} }
void* JSTypedArray::DataPtr() { void* JSTypedArray::DataPtr() {
...@@ -322,14 +318,6 @@ void JSTypedArray::SetOffHeapDataPtr(Isolate* isolate, void* base, ...@@ -322,14 +318,6 @@ void JSTypedArray::SetOffHeapDataPtr(Isolate* isolate, void* base,
DCHECK_EQ(address, reinterpret_cast<Address>(DataPtr())); DCHECK_EQ(address, reinterpret_cast<Address>(DataPtr()));
} }
void JSTypedArray::SetOnHeapDataPtr(Isolate* isolate, HeapObject base,
Address offset) {
set_base_pointer(base);
set_external_pointer(
isolate, offset + ExternalPointerCompensationForOnHeapArray(isolate));
DCHECK_EQ(base.ptr() + offset, reinterpret_cast<Address>(DataPtr()));
}
bool JSTypedArray::is_on_heap() const { bool JSTypedArray::is_on_heap() const {
// Keep synced with `is_on_heap(AcquireLoadTag)`. // Keep synced with `is_on_heap(AcquireLoadTag)`.
DisallowGarbageCollection no_gc; DisallowGarbageCollection no_gc;
...@@ -378,12 +366,14 @@ MaybeHandle<JSTypedArray> JSTypedArray::Validate(Isolate* isolate, ...@@ -378,12 +366,14 @@ MaybeHandle<JSTypedArray> JSTypedArray::Validate(Isolate* isolate,
} }
DEF_GETTER(JSDataView, data_pointer, void*) { DEF_GETTER(JSDataView, data_pointer, void*) {
return reinterpret_cast<void*>(ReadField<Address>(kDataPointerOffset)); Address value = ReadCagedPointerField(kDataPointerOffset, cage_base);
return reinterpret_cast<void*>(value);
} }
void JSDataView::set_data_pointer(Isolate* isolate, void* value) { void JSDataView::set_data_pointer(Isolate* isolate, void* ptr) {
DCHECK(IsValidBackingStorePointer(value)); DCHECK(IsValidBackingStorePointer(ptr));
WriteField<Address>(kDataPointerOffset, reinterpret_cast<Address>(value)); Address value = reinterpret_cast<Address>(ptr);
WriteCagedPointerField(kDataPointerOffset, isolate, value);
} }
} // namespace internal } // namespace internal
......
...@@ -56,7 +56,7 @@ void JSArrayBuffer::Setup(SharedFlag shared, ResizableFlag resizable, ...@@ -56,7 +56,7 @@ void JSArrayBuffer::Setup(SharedFlag shared, ResizableFlag resizable,
} }
set_extension(nullptr); set_extension(nullptr);
if (!backing_store) { if (!backing_store) {
set_backing_store(nullptr); set_backing_store(GetIsolate(), EmptyBackingStoreBuffer());
set_byte_length(0); set_byte_length(0);
set_max_byte_length(0); set_max_byte_length(0);
} else { } else {
...@@ -76,7 +76,16 @@ void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) { ...@@ -76,7 +76,16 @@ void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) {
!backing_store->is_wasm_memory() && !backing_store->is_resizable(), !backing_store->is_wasm_memory() && !backing_store->is_resizable(),
backing_store->byte_length() == backing_store->max_byte_length()); backing_store->byte_length() == backing_store->max_byte_length());
DCHECK(!was_detached()); DCHECK(!was_detached());
set_backing_store(backing_store->buffer_start()); DCHECK(IsValidBackingStorePointer(backing_store->buffer_start()));
Isolate* isolate = GetIsolate();
if (backing_store->IsEmpty()) {
set_backing_store(isolate, EmptyBackingStoreBuffer());
} else {
DCHECK_NE(nullptr, backing_store->buffer_start());
set_backing_store(isolate, backing_store->buffer_start());
}
if (is_shared() && is_resizable()) { if (is_shared() && is_resizable()) {
// GSABs need to read their byte_length from the BackingStore. Maintain the // GSABs need to read their byte_length from the BackingStore. Maintain the
// invariant that their byte_length field is always 0. // invariant that their byte_length field is always 0.
...@@ -91,7 +100,7 @@ void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) { ...@@ -91,7 +100,7 @@ void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) {
size_t bytes = backing_store->PerIsolateAccountingLength(); size_t bytes = backing_store->PerIsolateAccountingLength();
extension->set_accounting_length(bytes); extension->set_accounting_length(bytes);
extension->set_backing_store(std::move(backing_store)); extension->set_backing_store(std::move(backing_store));
GetIsolate()->heap()->AppendArrayBufferExtension(*this, extension); isolate->heap()->AppendArrayBufferExtension(*this, extension);
} }
void JSArrayBuffer::Detach(bool force_for_wasm_memory) { void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
...@@ -120,7 +129,7 @@ void JSArrayBuffer::Detach(bool force_for_wasm_memory) { ...@@ -120,7 +129,7 @@ void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
DCHECK(!is_shared()); DCHECK(!is_shared());
DCHECK(!is_asmjs_memory()); DCHECK(!is_asmjs_memory());
set_backing_store(nullptr); set_backing_store(isolate, EmptyBackingStoreBuffer());
set_byte_length(0); set_byte_length(0);
set_was_detached(true); set_was_detached(true);
} }
......
...@@ -39,7 +39,7 @@ class JSArrayBuffer ...@@ -39,7 +39,7 @@ class JSArrayBuffer
// [backing_store]: backing memory for this array // [backing_store]: backing memory for this array
// It should not be assumed that this will be nullptr for empty ArrayBuffers. // It should not be assumed that this will be nullptr for empty ArrayBuffers.
DECL_GETTER(backing_store, void*) DECL_GETTER(backing_store, void*)
inline void set_backing_store(void* value); inline void set_backing_store(Isolate* isolate, void* value);
// [extension]: extension object used for GC // [extension]: extension object used for GC
DECL_PRIMITIVE_ACCESSORS(extension, ArrayBufferExtension*) DECL_PRIMITIVE_ACCESSORS(extension, ArrayBufferExtension*)
...@@ -289,8 +289,6 @@ class JSTypedArray ...@@ -289,8 +289,6 @@ class JSTypedArray
inline void* DataPtr(); inline void* DataPtr();
inline void SetOffHeapDataPtr(Isolate* isolate, void* base, Address offset); inline void SetOffHeapDataPtr(Isolate* isolate, void* base, Address offset);
inline void SetOnHeapDataPtr(Isolate* isolate, HeapObject base,
Address offset);
// Whether the buffer's backing store is on-heap or off-heap. // Whether the buffer's backing store is on-heap or off-heap.
inline bool is_on_heap() const; inline bool is_on_heap() const;
...@@ -329,6 +327,9 @@ class JSTypedArray ...@@ -329,6 +327,9 @@ class JSTypedArray
// Subtracts external pointer compensation from the external pointer value. // Subtracts external pointer compensation from the external pointer value.
inline void RemoveExternalPointerCompensationForSerialization( inline void RemoveExternalPointerCompensationForSerialization(
Isolate* isolate); Isolate* isolate);
// Adds external pointer compensation to the external pointer value.
inline void AddExternalPointerCompensationForDeserialization(
Isolate* isolate);
static inline MaybeHandle<JSTypedArray> Validate(Isolate* isolate, static inline MaybeHandle<JSTypedArray> Validate(Isolate* isolate,
Handle<Object> receiver, Handle<Object> receiver,
...@@ -365,7 +366,6 @@ class JSTypedArray ...@@ -365,7 +366,6 @@ class JSTypedArray
inline size_t LengthUnchecked() const; inline size_t LengthUnchecked() const;
DECL_GETTER(external_pointer, Address) DECL_GETTER(external_pointer, Address)
DECL_GETTER(external_pointer_raw, ExternalPointer_t)
DECL_SETTER(base_pointer, Object) DECL_SETTER(base_pointer, Object)
DECL_RELEASE_SETTER(base_pointer, Object) DECL_RELEASE_SETTER(base_pointer, Object)
......
...@@ -630,7 +630,6 @@ MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object, ...@@ -630,7 +630,6 @@ MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
return value; return value;
} }
#ifdef V8_CAGED_POINTERS
Address Object::ReadCagedPointerField(size_t offset, Address Object::ReadCagedPointerField(size_t offset,
PtrComprCageBase cage_base) const { PtrComprCageBase cage_base) const {
return i::ReadCagedPointerField(field_address(offset), cage_base); return i::ReadCagedPointerField(field_address(offset), cage_base);
...@@ -646,7 +645,6 @@ void Object::WriteCagedPointerField(size_t offset, Isolate* isolate, ...@@ -646,7 +645,6 @@ void Object::WriteCagedPointerField(size_t offset, Isolate* isolate,
i::WriteCagedPointerField(field_address(offset), PtrComprCageBase(isolate), i::WriteCagedPointerField(field_address(offset), PtrComprCageBase(isolate),
value); value);
} }
#endif // V8_CAGED_POINTERS
void Object::InitExternalPointerField(size_t offset, Isolate* isolate) { void Object::InitExternalPointerField(size_t offset, Isolate* isolate) {
i::InitExternalPointerField(field_address(offset), isolate); i::InitExternalPointerField(field_address(offset), isolate);
......
...@@ -700,16 +700,14 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> { ...@@ -700,16 +700,14 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
} }
// //
// CagedPointer field accessors. // CagedPointer_t field accessors.
// //
#ifdef V8_CAGED_POINTERS
inline Address ReadCagedPointerField(size_t offset, inline Address ReadCagedPointerField(size_t offset,
PtrComprCageBase cage_base) const; PtrComprCageBase cage_base) const;
inline void WriteCagedPointerField(size_t offset, PtrComprCageBase cage_base, inline void WriteCagedPointerField(size_t offset, PtrComprCageBase cage_base,
Address value); Address value);
inline void WriteCagedPointerField(size_t offset, Isolate* isolate, inline void WriteCagedPointerField(size_t offset, Isolate* isolate,
Address value); Address value);
#endif // V8_CAGED_POINTERS
// //
// ExternalPointer_t field accessors. // ExternalPointer_t field accessors.
......
...@@ -12,23 +12,27 @@ ...@@ -12,23 +12,27 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
V8_INLINE Address ReadCagedPointerField(Address field_address,
PtrComprCageBase cage_base) {
#ifdef V8_CAGED_POINTERS #ifdef V8_CAGED_POINTERS
V8_INLINE CagedPointer_t ReadCagedPointerField(Address field_address,
PtrComprCageBase cage_base) {
// Caged pointers are currently only used if the sandbox is enabled. // Caged pointers are currently only used if the sandbox is enabled.
DCHECK(V8_HEAP_SANDBOX_BOOL); DCHECK(V8_HEAP_SANDBOX_BOOL);
Address caged_pointer = base::ReadUnalignedValue<Address>(field_address); CagedPointer_t caged_pointer =
base::ReadUnalignedValue<CagedPointer_t>(field_address);
Address offset = caged_pointer >> kCagedPointerShift; Address offset = caged_pointer >> kCagedPointerShift;
Address pointer = cage_base.address() + offset; Address pointer = cage_base.address() + offset;
return pointer; return pointer;
#else
return base::ReadUnalignedValue<Address>(field_address);
#endif
} }
V8_INLINE void WriteCagedPointerField(Address field_address, V8_INLINE void WriteCagedPointerField(Address field_address,
PtrComprCageBase cage_base, PtrComprCageBase cage_base,
CagedPointer_t pointer) { Address pointer) {
#ifdef V8_CAGED_POINTERS
// Caged pointers are currently only used if the sandbox is enabled. // Caged pointers are currently only used if the sandbox is enabled.
DCHECK(V8_HEAP_SANDBOX_BOOL); DCHECK(V8_HEAP_SANDBOX_BOOL);
...@@ -36,12 +40,13 @@ V8_INLINE void WriteCagedPointerField(Address field_address, ...@@ -36,12 +40,13 @@ V8_INLINE void WriteCagedPointerField(Address field_address,
DCHECK(GetProcessWideVirtualMemoryCage()->Contains(pointer)); DCHECK(GetProcessWideVirtualMemoryCage()->Contains(pointer));
Address offset = pointer - cage_base.address(); Address offset = pointer - cage_base.address();
Address caged_pointer = offset << kCagedPointerShift; CagedPointer_t caged_pointer = offset << kCagedPointerShift;
base::WriteUnalignedValue<Address>(field_address, caged_pointer); base::WriteUnalignedValue<CagedPointer_t>(field_address, caged_pointer);
#else
base::WriteUnalignedValue<Address>(field_address, pointer);
#endif
} }
#endif // V8_CAGED_POINTERS
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -10,16 +10,12 @@ ...@@ -10,16 +10,12 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
#ifdef V8_CAGED_POINTERS V8_INLINE Address ReadCagedPointerField(Address field_address,
PtrComprCageBase cage_base);
V8_INLINE CagedPointer_t ReadCagedPointerField(Address field_address,
PtrComprCageBase cage_base);
V8_INLINE void WriteCagedPointerField(Address field_address, V8_INLINE void WriteCagedPointerField(Address field_address,
PtrComprCageBase cage_base, PtrComprCageBase cage_base,
CagedPointer_t value); Address value);
#endif // V8_CAGED_POINTERS
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -11,6 +11,7 @@ ...@@ -11,6 +11,7 @@
#include "src/base/lazy-instance.h" #include "src/base/lazy-instance.h"
#include "src/base/utils/random-number-generator.h" #include "src/base/utils/random-number-generator.h"
#include "src/flags/flags.h" #include "src/flags/flags.h"
#include "src/security/caged-pointer.h"
#include "src/utils/allocation.h" #include "src/utils/allocation.h"
#if defined(V8_OS_WIN) #if defined(V8_OS_WIN)
...@@ -338,6 +339,8 @@ bool V8VirtualMemoryCage::Initialize(v8::PageAllocator* page_allocator, ...@@ -338,6 +339,8 @@ bool V8VirtualMemoryCage::Initialize(v8::PageAllocator* page_allocator,
initialized_ = true; initialized_ = true;
is_fake_cage_ = false; is_fake_cage_ = false;
InitializeConstants();
return true; return true;
} }
...@@ -400,9 +403,19 @@ bool V8VirtualMemoryCage::InitializeAsFakeCage( ...@@ -400,9 +403,19 @@ bool V8VirtualMemoryCage::InitializeAsFakeCage(
cage_page_allocator_ = std::make_unique<FakeBoundedPageAllocator>( cage_page_allocator_ = std::make_unique<FakeBoundedPageAllocator>(
page_allocator_, base_, size_, reservation_size_); page_allocator_, base_, size_, reservation_size_);
InitializeConstants();
return true; return true;
} }
void V8VirtualMemoryCage::InitializeConstants() {
#ifdef V8_CAGED_POINTERS
// Place the empty backing store buffer at the end of the cage, so that any
// accidental access to it will most likely hit a guard page.
constants_.set_empty_backing_store_buffer(base_ + size_ - 1);
#endif
}
void V8VirtualMemoryCage::TearDown() { void V8VirtualMemoryCage::TearDown() {
if (initialized_) { if (initialized_) {
cage_page_allocator_.reset(); cage_page_allocator_.reset();
...@@ -416,6 +429,9 @@ void V8VirtualMemoryCage::TearDown() { ...@@ -416,6 +429,9 @@ void V8VirtualMemoryCage::TearDown() {
initialized_ = false; initialized_ = false;
is_fake_cage_ = false; is_fake_cage_ = false;
page_allocator_ = nullptr; page_allocator_ = nullptr;
#ifdef V8_CAGED_POINTERS
constants_.Reset();
#endif
} }
disabled_ = false; disabled_ = false;
} }
......
...@@ -92,6 +92,27 @@ class V8_EXPORT_PRIVATE V8VirtualMemoryCage { ...@@ -92,6 +92,27 @@ class V8_EXPORT_PRIVATE V8VirtualMemoryCage {
return Contains(reinterpret_cast<Address>(ptr)); return Contains(reinterpret_cast<Address>(ptr));
} }
#ifdef V8_CAGED_POINTERS
class CagedPointerConstants final {
public:
Address empty_backing_store_buffer() const {
return empty_backing_store_buffer_;
}
Address empty_backing_store_buffer_address() const {
return reinterpret_cast<Address>(&empty_backing_store_buffer_);
}
void set_empty_backing_store_buffer(Address value) {
empty_backing_store_buffer_ = value;
}
void Reset() { empty_backing_store_buffer_ = 0; }
private:
Address empty_backing_store_buffer_ = 0;
};
const CagedPointerConstants& constants() const { return constants_; }
#endif
private: private:
// The SequentialUnmapperTest calls the private Initialize method to create a // The SequentialUnmapperTest calls the private Initialize method to create a
// cage without guard regions, which would otherwise consume too much memory. // cage without guard regions, which would otherwise consume too much memory.
...@@ -114,6 +135,10 @@ class V8_EXPORT_PRIVATE V8VirtualMemoryCage { ...@@ -114,6 +135,10 @@ class V8_EXPORT_PRIVATE V8VirtualMemoryCage {
bool InitializeAsFakeCage(v8::PageAllocator* page_allocator, size_t size, bool InitializeAsFakeCage(v8::PageAllocator* page_allocator, size_t size,
size_t size_to_reserve); size_t size_to_reserve);
// Initialize the caged pointer constants for this cage. Called by the
// Initialize methods above.
void InitializeConstants();
Address base_ = kNullAddress; Address base_ = kNullAddress;
Address end_ = kNullAddress; Address end_ = kNullAddress;
size_t size_ = 0; size_t size_ = 0;
...@@ -132,6 +157,11 @@ class V8_EXPORT_PRIVATE V8VirtualMemoryCage { ...@@ -132,6 +157,11 @@ class V8_EXPORT_PRIVATE V8VirtualMemoryCage {
v8::PageAllocator* page_allocator_ = nullptr; v8::PageAllocator* page_allocator_ = nullptr;
// The allocator to allocate pages inside the cage. // The allocator to allocate pages inside the cage.
std::unique_ptr<v8::PageAllocator> cage_page_allocator_; std::unique_ptr<v8::PageAllocator> cage_page_allocator_;
#ifdef V8_CAGED_POINTERS
// CagedPointer constants inside this cage.
CagedPointerConstants constants_;
#endif
}; };
#endif // V8_VIRTUAL_MEMORY_CAGE_IS_AVAILABLE #endif // V8_VIRTUAL_MEMORY_CAGE_IS_AVAILABLE
...@@ -151,6 +181,16 @@ V8_INLINE bool IsValidBackingStorePointer(void* ptr) { ...@@ -151,6 +181,16 @@ V8_INLINE bool IsValidBackingStorePointer(void* ptr) {
#endif #endif
} }
V8_INLINE void* EmptyBackingStoreBuffer() {
#ifdef V8_CAGED_POINTERS
return reinterpret_cast<void*>(GetProcessWideVirtualMemoryCage()
->constants()
.empty_backing_store_buffer());
#else
return nullptr;
#endif
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -486,7 +486,7 @@ void Deserializer<IsolateT>::PostProcessNewObject(Handle<Map> map, ...@@ -486,7 +486,7 @@ void Deserializer<IsolateT>::PostProcessNewObject(Handle<Map> map,
} else if (InstanceTypeChecker::IsJSDataView(instance_type)) { } else if (InstanceTypeChecker::IsJSDataView(instance_type)) {
Handle<JSDataView> data_view = Handle<JSDataView>::cast(obj); Handle<JSDataView> data_view = Handle<JSDataView>::cast(obj);
JSArrayBuffer buffer = JSArrayBuffer::cast(data_view->buffer()); JSArrayBuffer buffer = JSArrayBuffer::cast(data_view->buffer());
void* backing_store = nullptr; void* backing_store = EmptyBackingStoreBuffer();
uint32_t store_index = buffer.GetBackingStoreRefForDeserialization(); uint32_t store_index = buffer.GetBackingStoreRefForDeserialization();
if (store_index != kEmptyBackingStoreRefSentinel) { if (store_index != kEmptyBackingStoreRefSentinel) {
// The backing store of the JSArrayBuffer has not been correctly restored // The backing store of the JSArrayBuffer has not been correctly restored
...@@ -501,18 +501,15 @@ void Deserializer<IsolateT>::PostProcessNewObject(Handle<Map> map, ...@@ -501,18 +501,15 @@ void Deserializer<IsolateT>::PostProcessNewObject(Handle<Map> map,
Handle<JSTypedArray> typed_array = Handle<JSTypedArray>::cast(obj); Handle<JSTypedArray> typed_array = Handle<JSTypedArray>::cast(obj);
// Fixup typed array pointers. // Fixup typed array pointers.
if (typed_array->is_on_heap()) { if (typed_array->is_on_heap()) {
Address raw_external_pointer = typed_array->external_pointer_raw(); typed_array->AddExternalPointerCompensationForDeserialization(
typed_array->SetOnHeapDataPtr( main_thread_isolate());
main_thread_isolate(), HeapObject::cast(typed_array->base_pointer()),
raw_external_pointer);
} else { } else {
// Serializer writes backing store ref as a DataPtr() value. // Serializer writes backing store ref as a DataPtr() value.
uint32_t store_index = uint32_t store_index =
typed_array->GetExternalBackingStoreRefForDeserialization(); typed_array->GetExternalBackingStoreRefForDeserialization();
auto backing_store = backing_stores_[store_index]; auto backing_store = backing_stores_[store_index];
auto start = backing_store void* start = backing_store ? backing_store->buffer_start()
? reinterpret_cast<byte*>(backing_store->buffer_start()) : EmptyBackingStoreBuffer();
: nullptr;
typed_array->SetOffHeapDataPtr(main_thread_isolate(), start, typed_array->SetOffHeapDataPtr(main_thread_isolate(), start,
typed_array->byte_offset()); typed_array->byte_offset());
} }
...@@ -523,7 +520,8 @@ void Deserializer<IsolateT>::PostProcessNewObject(Handle<Map> map, ...@@ -523,7 +520,8 @@ void Deserializer<IsolateT>::PostProcessNewObject(Handle<Map> map,
kEmptyBackingStoreRefSentinel) { kEmptyBackingStoreRefSentinel) {
new_off_heap_array_buffers_.push_back(buffer); new_off_heap_array_buffers_.push_back(buffer);
} else { } else {
buffer->set_backing_store(nullptr); buffer->set_backing_store(main_thread_isolate(),
EmptyBackingStoreBuffer());
} }
} else if (InstanceTypeChecker::IsBytecodeArray(instance_type)) { } else if (InstanceTypeChecker::IsBytecodeArray(instance_type)) {
// TODO(mythria): Remove these once we store the default values for these // TODO(mythria): Remove these once we store the default values for these
......
...@@ -545,7 +545,7 @@ void Serializer::ObjectSerializer::SerializeJSArrayBuffer() { ...@@ -545,7 +545,7 @@ void Serializer::ObjectSerializer::SerializeJSArrayBuffer() {
SerializeObject(); SerializeObject();
buffer->set_backing_store(backing_store); buffer->set_backing_store(isolate(), backing_store);
buffer->set_extension(extension); buffer->set_extension(extension);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment