Commit cda9a103 authored by Anton Bikineev's avatar Anton Bikineev Committed by V8 LUCI CQ

cppgc: Optimize compression by removing the branch and or-instruction

This is done by making sure that LSB of the cage-base is 1. This way we
know that on compression normal pointers after the shift will have the
MSB set to 1.

Bug: chromium:1325007
Change-Id: I8699aaa464f1a8c18d2092f5eb474266fb409fcb
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3688399
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81039}
parent 8961f42f
...@@ -100,14 +100,11 @@ class CompressedPointer final { ...@@ -100,14 +100,11 @@ class CompressedPointer final {
(reinterpret_cast<uintptr_t>(ptr) & kGigaCageMask)); (reinterpret_cast<uintptr_t>(ptr) & kGigaCageMask));
const auto uptr = reinterpret_cast<uintptr_t>(ptr); const auto uptr = reinterpret_cast<uintptr_t>(ptr);
// Truncate the pointer and shift right by one. // Shift the pointer by one and truncate.
auto compressed = static_cast<Storage>(uptr) >> 1; auto compressed = static_cast<Storage>(uptr >> 1);
// If the pointer is regular, set the most significant bit. // Normal compressed pointers must have the MSB set.
if (V8_LIKELY(compressed > 1)) { CPPGC_DCHECK((!compressed || compressed == kCompressedSentinel) ||
CPPGC_DCHECK((reinterpret_cast<uintptr_t>(ptr) & (compressed & 0x80000000));
(api_constants::kAllocationGranularity - 1)) == 0);
compressed |= 0x80000000;
}
return compressed; return compressed;
} }
......
...@@ -47,13 +47,35 @@ VirtualMemory ReserveCagedHeap(PageAllocator& platform_allocator) { ...@@ -47,13 +47,35 @@ VirtualMemory ReserveCagedHeap(PageAllocator& platform_allocator) {
static constexpr size_t kAllocationTries = 4; static constexpr size_t kAllocationTries = 4;
for (size_t i = 0; i < kAllocationTries; ++i) { for (size_t i = 0; i < kAllocationTries; ++i) {
#if defined(CPPGC_POINTER_COMPRESSION)
// If pointer compression is enabled, reserve 2x of cage size and leave the
// half that has the least significant bit of the most significant halfword
// set. This is needed for compression to make sure that compressed normal
// pointers have the most significant bit set to 1, so that on decompression
// the bit will be sign-extended. This saves us a branch and 'or' operation
// during compression.
static constexpr size_t kTryReserveSize = 2 * kCagedHeapReservationSize;
static constexpr size_t kTryReserveAlignment =
2 * kCagedHeapReservationAlignment;
#else // !defined(CPPGC_POINTER_COMPRESSION)
static constexpr size_t kTryReserveSize = kCagedHeapReservationSize;
static constexpr size_t kTryReserveAlignment =
kCagedHeapReservationAlignment;
#endif // !defined(CPPGC_POINTER_COMPRESSION)
void* hint = reinterpret_cast<void*>(RoundDown( void* hint = reinterpret_cast<void*>(RoundDown(
reinterpret_cast<uintptr_t>(platform_allocator.GetRandomMmapAddr()), reinterpret_cast<uintptr_t>(platform_allocator.GetRandomMmapAddr()),
kCagedHeapReservationAlignment)); kTryReserveAlignment));
VirtualMemory memory(&platform_allocator, kCagedHeapReservationSize, VirtualMemory memory(&platform_allocator, kTryReserveSize,
kCagedHeapReservationAlignment, hint); kTryReserveAlignment, hint);
if (memory.IsReserved()) return memory; if (memory.IsReserved()) {
#if defined(CPPGC_POINTER_COMPRESSION)
VirtualMemory second_half = memory.Split(kCagedHeapReservationSize);
return second_half;
#else // !defined(CPPGC_POINTER_COMPRESSION)
return memory;
#endif // !defined(CPPGC_POINTER_COMPRESSION)
}
} }
FATAL("Fatal process out of memory: Failed to reserve memory for caged heap"); FATAL("Fatal process out of memory: Failed to reserve memory for caged heap");
......
...@@ -47,6 +47,21 @@ VirtualMemory& VirtualMemory::operator=(VirtualMemory&& other) V8_NOEXCEPT { ...@@ -47,6 +47,21 @@ VirtualMemory& VirtualMemory::operator=(VirtualMemory&& other) V8_NOEXCEPT {
return *this; return *this;
} }
VirtualMemory VirtualMemory::Split(size_t size) {
DCHECK_GT(size, 0u);
DCHECK_LT(size, size_);
DCHECK(IsAligned(size, page_allocator_->CommitPageSize()));
const size_t old_size = std::exchange(size_, size);
VirtualMemory new_memory;
new_memory.page_allocator_ = page_allocator_;
new_memory.start_ = reinterpret_cast<uint8_t*>(start_) + size_;
new_memory.size_ = old_size - size;
return new_memory;
}
void VirtualMemory::Reset() { void VirtualMemory::Reset() {
start_ = nullptr; start_ = nullptr;
size_ = 0; size_ = 0;
......
...@@ -32,6 +32,9 @@ class V8_EXPORT_PRIVATE VirtualMemory { ...@@ -32,6 +32,9 @@ class V8_EXPORT_PRIVATE VirtualMemory {
VirtualMemory(VirtualMemory&&) V8_NOEXCEPT; VirtualMemory(VirtualMemory&&) V8_NOEXCEPT;
VirtualMemory& operator=(VirtualMemory&&) V8_NOEXCEPT; VirtualMemory& operator=(VirtualMemory&&) V8_NOEXCEPT;
// Splits the current reservation and returns the residual one.
VirtualMemory Split(size_t size);
// Returns whether the memory has been reserved. // Returns whether the memory has been reserved.
bool IsReserved() const { return start_ != nullptr; } bool IsReserved() const { return start_ != nullptr; }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment