Commit b15aa56c authored by Samuel Groß's avatar Samuel Groß Committed by V8 LUCI CQ

[sandbox] Clean up sandbox API

This CL removes some deprecated sandbox APIs and introduces new ones, in
particular IsSandboxInitialized and GetSandboxReservationSizeInBytes. In
additon, this CL also adds comments to the various public methods of the
Sandbox class.

Bug: v8:10391
Change-Id: If5c3081a0b9f7f192966150a0d2716099357363a
Cq-Include-Trybots: luci.v8.try:v8_linux64_heap_sandbox_dbg_ng,v8_linux_arm64_sim_heap_sandbox_dbg_ng
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3647362Reviewed-by: 's avatarIgor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Commit-Queue: Samuel Groß <saelo@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80544}
parent 07a76e37
...@@ -186,12 +186,6 @@ class V8_EXPORT V8 { ...@@ -186,12 +186,6 @@ class V8_EXPORT V8 {
static void DisposePlatform(); static void DisposePlatform();
#if defined(V8_ENABLE_SANDBOX) #if defined(V8_ENABLE_SANDBOX)
//
// Sandbox related API.
//
// This API is not yet stable and subject to changes in the future.
//
/** /**
* Initializes the V8 sandbox. * Initializes the V8 sandbox.
* *
...@@ -205,8 +199,23 @@ class V8_EXPORT V8 { ...@@ -205,8 +199,23 @@ class V8_EXPORT V8 {
* removed. * removed.
*/ */
static bool InitializeSandbox(); static bool InitializeSandbox();
V8_DEPRECATED("Use InitializeSandbox()")
static bool InitializeVirtualMemoryCage() { return InitializeSandbox(); } /**
* Returns true if the sandbox has been initialized, false otherwise.
*/
static bool IsSandboxInitialized();
/**
* Returns true if the sandbox is configured securely.
*
* If V8 cannot create a regular sandbox during initialization, for example
* because not enough virtual address space can be reserved, it will instead
* create a fallback sandbox that still allows it to function normally but
* does not have the same security properties as a regular sandbox. This API
* can be used to determine if such a fallback sandbox is being used, in
* which case it will return false.
*/
static bool IsSandboxConfiguredSecurely();
/** /**
* Provides access to the virtual address subspace backing the sandbox. * Provides access to the virtual address subspace backing the sandbox.
...@@ -223,34 +232,29 @@ class V8_EXPORT V8 { ...@@ -223,34 +232,29 @@ class V8_EXPORT V8 {
* This function must only be called after initializing the sandbox. * This function must only be called after initializing the sandbox.
*/ */
static VirtualAddressSpace* GetSandboxAddressSpace(); static VirtualAddressSpace* GetSandboxAddressSpace();
V8_DEPRECATED("Use GetSandboxAddressSpace()")
static PageAllocator* GetVirtualMemoryCagePageAllocator();
/** /**
* Returns the size of the sandbox in bytes. * Returns the size of the sandbox in bytes.
* *
* This represents the size of the address space that V8 can directly address
* and in which it allocates its objects.
*
* If the sandbox has not been initialized, or if the initialization failed, * If the sandbox has not been initialized, or if the initialization failed,
* this returns zero. * this returns zero.
*/ */
static size_t GetSandboxSizeInBytes(); static size_t GetSandboxSizeInBytes();
V8_DEPRECATED("Use GetSandboxSizeInBytes()")
static size_t GetVirtualMemoryCageSizeInBytes() {
return GetSandboxSizeInBytes();
}
/** /**
* Returns whether the sandbox is configured securely. * Returns the size of the address space reservation backing the sandbox.
* *
* If V8 cannot create a proper sandbox, it will fall back to creating a * This may be larger than the sandbox (i.e. |GetSandboxSizeInBytes()|) due
* sandbox that doesn't have the desired security properties but at least * to surrounding guard regions, or may be smaller than the sandbox in case a
* still allows V8 to function. This API can be used to determine if such an * fallback sandbox is being used, which will use a smaller virtual address
* insecure sandbox is being used, in which case it will return false. * space reservation. In the latter case this will also be different from
* |GetSandboxAddressSpace()->size()| as that will cover a larger part of the
* address space than what has actually been reserved.
*/ */
static bool IsSandboxConfiguredSecurely(); static size_t GetSandboxReservationSizeInBytes();
V8_DEPRECATED("Use IsSandboxConfiguredSecurely()")
static bool IsUsingSecureVirtualMemoryCage() {
return IsSandboxConfiguredSecurely();
}
#endif // V8_ENABLE_SANDBOX #endif // V8_ENABLE_SANDBOX
/** /**
......
...@@ -6213,13 +6213,6 @@ VirtualAddressSpace* v8::V8::GetSandboxAddressSpace() { ...@@ -6213,13 +6213,6 @@ VirtualAddressSpace* v8::V8::GetSandboxAddressSpace() {
return i::GetProcessWideSandbox()->address_space(); return i::GetProcessWideSandbox()->address_space();
} }
PageAllocator* v8::V8::GetVirtualMemoryCagePageAllocator() {
Utils::ApiCheck(i::GetProcessWideSandbox()->is_initialized(),
"v8::V8::GetVirtualMemoryCagePageAllocator",
"The sandbox must be initialized first.");
return i::GetProcessWideSandbox()->page_allocator();
}
size_t v8::V8::GetSandboxSizeInBytes() { size_t v8::V8::GetSandboxSizeInBytes() {
if (!i::GetProcessWideSandbox()->is_initialized()) { if (!i::GetProcessWideSandbox()->is_initialized()) {
return 0; return 0;
...@@ -6228,14 +6221,20 @@ size_t v8::V8::GetSandboxSizeInBytes() { ...@@ -6228,14 +6221,20 @@ size_t v8::V8::GetSandboxSizeInBytes() {
} }
} }
size_t v8::V8::GetSandboxReservationSizeInBytes() {
Utils::ApiCheck(i::GetProcessWideSandbox()->is_initialized(),
"v8::V8::GetSandboxReservationSizeInBytes",
"The sandbox must be initialized first.");
return i::GetProcessWideSandbox()->reservation_size();
}
bool v8::V8::IsSandboxConfiguredSecurely() { bool v8::V8::IsSandboxConfiguredSecurely() {
Utils::ApiCheck(i::GetProcessWideSandbox()->is_initialized(), Utils::ApiCheck(i::GetProcessWideSandbox()->is_initialized(),
"v8::V8::IsSandoxConfiguredSecurely", "v8::V8::IsSandoxConfiguredSecurely",
"The sandbox must be initialized first."); "The sandbox must be initialized first.");
// TODO(saelo) For now, we only treat a partially reserved sandbox as // The sandbox is (only) configured insecurely if it is a partially reserved
// insecure. Once we use sandboxed pointers, which assume that the sandbox // sandbox, since in that case unrelated memory mappings may end up inside
// has a fixed size, we'll also treat sandboxes with a smaller size as // the sandbox address space where they could be corrupted by an attacker.
// insecure because these pointers can then access memory outside of them.
return !i::GetProcessWideSandbox()->is_partially_reserved(); return !i::GetProcessWideSandbox()->is_partially_reserved();
} }
#endif // V8_ENABLE_SANDBOX #endif // V8_ENABLE_SANDBOX
......
...@@ -124,10 +124,11 @@ void V8::Initialize() { ...@@ -124,10 +124,11 @@ void V8::Initialize() {
CHECK(platform_); CHECK(platform_);
#ifdef V8_ENABLE_SANDBOX #ifdef V8_ENABLE_SANDBOX
if (!GetProcessWideSandbox()->is_initialized()) { if (!kAllowBackingStoresOutsideSandbox) {
// For now, we still allow the cage to be disabled even if V8 was compiled CHECK(GetProcessWideSandbox()->is_initialized());
// with V8_ENABLE_SANDBOX. This will eventually be forbidden. } else if (!GetProcessWideSandbox()->is_initialized()) {
CHECK(kAllowBackingStoresOutsideSandbox); // For now, we still allow the sandbox to be disabled even if V8 was
// compiled with V8_SANDBOX. This will eventually be forbidden.
GetProcessWideSandbox()->Disable(); GetProcessWideSandbox()->Disable();
} }
#endif // V8_ENABLE_SANDBOX #endif // V8_ENABLE_SANDBOX
......
...@@ -223,10 +223,10 @@ bool Sandbox::Initialize(v8::VirtualAddressSpace* vas, size_t size, ...@@ -223,10 +223,10 @@ bool Sandbox::Initialize(v8::VirtualAddressSpace* vas, size_t size,
address_space_.get()); address_space_.get());
initialized_ = true; initialized_ = true;
is_partially_reserved_ = false;
InitializeConstants(); InitializeConstants();
DCHECK(!is_partially_reserved());
return true; return true;
} }
...@@ -283,7 +283,6 @@ bool Sandbox::InitializeAsPartiallyReservedSandbox(v8::VirtualAddressSpace* vas, ...@@ -283,7 +283,6 @@ bool Sandbox::InitializeAsPartiallyReservedSandbox(v8::VirtualAddressSpace* vas,
end_ = base_ + size_; end_ = base_ + size_;
reservation_size_ = size_to_reserve; reservation_size_ = size_to_reserve;
initialized_ = true; initialized_ = true;
is_partially_reserved_ = true;
address_space_ = std::make_unique<base::EmulatedVirtualAddressSubspace>( address_space_ = std::make_unique<base::EmulatedVirtualAddressSubspace>(
vas, reservation_base_, reservation_size_, size_); vas, reservation_base_, reservation_size_, size_);
sandbox_page_allocator_ = sandbox_page_allocator_ =
...@@ -292,6 +291,7 @@ bool Sandbox::InitializeAsPartiallyReservedSandbox(v8::VirtualAddressSpace* vas, ...@@ -292,6 +291,7 @@ bool Sandbox::InitializeAsPartiallyReservedSandbox(v8::VirtualAddressSpace* vas,
InitializeConstants(); InitializeConstants();
DCHECK(is_partially_reserved());
return true; return true;
} }
...@@ -314,7 +314,6 @@ void Sandbox::TearDown() { ...@@ -314,7 +314,6 @@ void Sandbox::TearDown() {
reservation_base_ = kNullAddress; reservation_base_ = kNullAddress;
reservation_size_ = 0; reservation_size_ = 0;
initialized_ = false; initialized_ = false;
is_partially_reserved_ = false;
#ifdef V8_SANDBOXED_POINTERS #ifdef V8_SANDBOXED_POINTERS
constants_.Reset(); constants_.Reset();
#endif #endif
......
...@@ -50,46 +50,127 @@ class V8_EXPORT_PRIVATE Sandbox { ...@@ -50,46 +50,127 @@ class V8_EXPORT_PRIVATE Sandbox {
// | (front) | Region : any other sandboxed objects. | (back) | // | (front) | Region : any other sandboxed objects. | (back) |
// +- ~~~ -+----------------+----------------------- ~~~ -+- ~~~ -+ // +- ~~~ -+----------------+----------------------- ~~~ -+- ~~~ -+
// ^ ^ // ^ ^
// base base + size // base end
// < - - - - - - - - - - - size - - - - - - - - - - >
// < - - - - - - - - - - - - - reservation_size - - - - - - - - - - - - >
Sandbox() = default; Sandbox() = default;
Sandbox(const Sandbox&) = delete; Sandbox(const Sandbox&) = delete;
Sandbox& operator=(Sandbox&) = delete; Sandbox& operator=(Sandbox&) = delete;
/**
* Initializes this sandbox.
*
* This will allocate the virtual address subspace for the sandbox inside the
* provided virtual address space. If a subspace of the required size cannot
* be allocated, this method will insted initialize this sandbox as a
* partially-reserved sandbox. In that case, a smaller virtual address space
* reservation will be used and an EmulatedVirtualAddressSubspace instance
* will be created on top of it to back the sandbox.
*/
bool Initialize(v8::VirtualAddressSpace* vas); bool Initialize(v8::VirtualAddressSpace* vas);
/**
* Disable this sandbox.
*
* TODO(saelo) remove this once the sandbox is mandatory when enabled at
* compile time.
*/
void Disable() { void Disable() {
CHECK(!initialized_); CHECK(!initialized_);
disabled_ = true; disabled_ = true;
} }
/**
* Tear down this sandbox.
*
* This will free the virtual address subspace backing this sandbox.
*/
void TearDown(); void TearDown();
/**
* Returns true if this sandbox has been initialized successfully.
*/
bool is_initialized() const { return initialized_; } bool is_initialized() const { return initialized_; }
/**
* Whether this sandbox is enabled or not.
*
* TODO(saelo) remove these two once the sandbox is mandatory when enabled at
* compile time.
*/
bool is_disabled() const { return disabled_; } bool is_disabled() const { return disabled_; }
bool is_enabled() const { return !disabled_; } bool is_enabled() const { return !disabled_; }
bool is_partially_reserved() const { return is_partially_reserved_; }
/**
* Returns true if this sandbox is a partially-reserved sandbox.
*
* A partially-reserved sandbox is backed by a virtual address space
* reservation that is smaller than its size. It also does not have guard
* regions surrounding it. A partially-reserved sandbox is usually created if
* not enough virtual address space could be reserved for the sandbox during
* initialization. In such a configuration, unrelated memory mappings may end
* up inside the sandbox, which affects its security properties.
*/
bool is_partially_reserved() const { return reservation_size_ < size_; }
/**
* The base address of the sandbox.
*
* This is the start of the address space region that is directly addressable
* by V8. In practice, this means the start of the part of the sandbox
* address space between the surrounding guard regions.
*/
Address base() const { return base_; } Address base() const { return base_; }
/**
* The address right after the end of the sandbox.
*
* This is equal to |base| + |size|.
*/
Address end() const { return end_; } Address end() const { return end_; }
/**
* The size of the sandbox in bytes.
*/
size_t size() const { return size_; } size_t size() const { return size_; }
Address base_address() const { return reinterpret_cast<Address>(&base_); } /**
Address end_address() const { return reinterpret_cast<Address>(&end_); } * The size of the virtual address space reservation backing the sandbox.
Address size_address() const { return reinterpret_cast<Address>(&size_); } *
* This can be larger than |size| as it contains the surrounding guard
* regions as well, or can be smaller than |size| in the case of a
* partially-reserved sandbox.
*/
size_t reservation_size() const { return reservation_size_; }
/**
* The virtual address subspace backing this sandbox.
*
* This can be used to allocate and manage memory pages inside the sandbox.
*/
v8::VirtualAddressSpace* address_space() const {
return address_space_.get();
}
/**
* Returns a PageAllocator instance that allocates pages inside the sandbox.
*/
v8::PageAllocator* page_allocator() const { v8::PageAllocator* page_allocator() const {
return sandbox_page_allocator_.get(); return sandbox_page_allocator_.get();
} }
v8::VirtualAddressSpace* address_space() const { /**
return address_space_.get(); * Returns true if the given address lies within the sandbox address space.
} */
bool Contains(Address addr) const { bool Contains(Address addr) const {
return addr >= base_ && addr < base_ + size_; return addr >= base_ && addr < base_ + size_;
} }
/**
* Returns true if the given pointer points into the sandbox address space.
*/
bool Contains(void* ptr) const { bool Contains(void* ptr) const {
return Contains(reinterpret_cast<Address>(ptr)); return Contains(reinterpret_cast<Address>(ptr));
} }
...@@ -115,6 +196,10 @@ class V8_EXPORT_PRIVATE Sandbox { ...@@ -115,6 +196,10 @@ class V8_EXPORT_PRIVATE Sandbox {
const SandboxedPointerConstants& constants() const { return constants_; } const SandboxedPointerConstants& constants() const { return constants_; }
#endif #endif
Address base_address() const { return reinterpret_cast<Address>(&base_); }
Address end_address() const { return reinterpret_cast<Address>(&end_); }
Address size_address() const { return reinterpret_cast<Address>(&size_); }
private: private:
// The SequentialUnmapperTest calls the private Initialize method to create a // The SequentialUnmapperTest calls the private Initialize method to create a
// sandbox without guard regions, which would consume too much memory. // sandbox without guard regions, which would consume too much memory.
...@@ -153,13 +238,12 @@ class V8_EXPORT_PRIVATE Sandbox { ...@@ -153,13 +238,12 @@ class V8_EXPORT_PRIVATE Sandbox {
// Base and size of the virtual memory reservation backing this sandbox. // Base and size of the virtual memory reservation backing this sandbox.
// These can be different from the sandbox base and size due to guard regions // These can be different from the sandbox base and size due to guard regions
// or when a fake sandbox is used. // or when a partially-reserved sandbox is used.
Address reservation_base_ = kNullAddress; Address reservation_base_ = kNullAddress;
size_t reservation_size_ = 0; size_t reservation_size_ = 0;
bool initialized_ = false; bool initialized_ = false;
bool disabled_ = false; bool disabled_ = false;
bool is_partially_reserved_ = false;
// The virtual address subspace backing the sandbox. // The virtual address subspace backing the sandbox.
std::unique_ptr<v8::VirtualAddressSpace> address_space_; std::unique_ptr<v8::VirtualAddressSpace> address_space_;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment