Commit e0f40862 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

cppgc: Add explicit Resize() call

Resize() may be used to adjust additional trailing bytes of an object.
It is up to the embedder to ensure correctness in case of shrinking.

Bug: chromium:1056170
Change-Id: I954df6c7440b77275cd62e4b802e8f5d39c06f9d
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2739652
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#73277}
parent 9658615e
......@@ -7,6 +7,7 @@
#include <cstddef>
#include "cppgc/allocation.h"
#include "cppgc/internal/logging.h"
#include "cppgc/type-traits.h"
......@@ -14,6 +15,7 @@ namespace cppgc {
namespace internal {
V8_EXPORT void FreeUnreferencedObject(void*);
V8_EXPORT bool Resize(void*, size_t);
} // namespace internal
......@@ -39,6 +41,33 @@ void FreeUnreferencedObject(T* object) {
internal::FreeUnreferencedObject(object);
}
/**
* Tries to resize `object` of type `T` with additional bytes on top of
* sizeof(T). Resizing is only useful with trailing inlined storage, see e.g.
* `MakeGarbageCollected(AllocationHandle&, AdditionalBytes)`.
*
* `Resize()` performs growing or shrinking as needed and may skip the operation
* for internal reasons, see return value.
*
* It is up to the embedder to guarantee that in case of shrinking a larger
* object down, the reclaimed area is not used anymore. Any subsequent use
* results in a use-after-free.
*
* \param object Reference to an object that is of type `GarbageCollected` and
* should be resized.
* \param additional_bytes Bytes in addition to sizeof(T) that the object should
* provide.
* \returns true when the operation was successful and the result can be relied
* on, and false otherwise.
*/
template <typename T>
bool Resize(T* object, AdditionalBytes additional_bytes) {
static_assert(IsGarbageCollectedTypeV<T>,
"Object must be of type GarbageCollected.");
if (!object) return true;
return internal::Resize(object, sizeof(T) + additional_bytes.value);
}
} // namespace subtle
} // namespace cppgc
......
......@@ -9,13 +9,14 @@
#include "src/heap/cppgc/heap-base.h"
#include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/heap-page.h"
#include "src/heap/cppgc/sanitizers.h"
namespace cppgc {
namespace internal {
namespace {
std::pair<bool, BasePage*> CanExplicitlyFree(void* object) {
std::pair<bool, BasePage*> CanModifyObject(void* object) {
// object is guaranteed to be of type GarbageCollected, so getting the
// BasePage is okay for regular and large objects.
auto* base_page = BasePage::FromPayload(object);
......@@ -32,7 +33,7 @@ std::pair<bool, BasePage*> CanExplicitlyFree(void* object) {
void FreeUnreferencedObject(void* object) {
bool can_free;
BasePage* base_page;
std::tie(can_free, base_page) = CanExplicitlyFree(object);
std::tie(can_free, base_page) = CanModifyObject(object);
if (!can_free) {
return;
}
......@@ -62,5 +63,89 @@ void FreeUnreferencedObject(void* object) {
}
}
namespace {
bool Grow(HeapObjectHeader& header, BasePage& base_page, size_t new_size,
size_t size_delta) {
DCHECK_GE(new_size, header.GetSize() + kAllocationGranularity);
DCHECK_GE(size_delta, kAllocationGranularity);
DCHECK(!base_page.is_large());
auto& normal_space = *static_cast<NormalPageSpace*>(base_page.space());
auto& lab = normal_space.linear_allocation_buffer();
if (lab.start() == header.PayloadEnd() && lab.size() >= size_delta) {
// LABs are considered used memory which means that no allocated size
// adjustments are needed.
Address delta_start = lab.Allocate(size_delta);
SET_MEMORY_ACCESSIBLE(delta_start, size_delta);
header.SetSize(new_size);
return true;
}
return false;
}
bool Shrink(HeapObjectHeader& header, BasePage& base_page, size_t new_size,
size_t size_delta) {
DCHECK_GE(header.GetSize(), new_size + kAllocationGranularity);
DCHECK_GE(size_delta, kAllocationGranularity);
DCHECK(!base_page.is_large());
auto& normal_space = *static_cast<NormalPageSpace*>(base_page.space());
auto& lab = normal_space.linear_allocation_buffer();
Address free_start = header.PayloadEnd() - size_delta;
if (lab.start() == header.PayloadEnd()) {
DCHECK_EQ(free_start, lab.start() - size_delta);
// LABs are considered used memory which means that no allocated size
// adjustments are needed.
lab.Set(free_start, lab.size() + size_delta);
SET_MEMORY_INACCESSIBLE(lab.start(), size_delta);
header.SetSize(new_size);
return true;
}
// Heuristic: Only return memory to the free list if the block is larger than
// the smallest size class.
if (size_delta >= ObjectAllocator::kSmallestSpaceSize) {
SET_MEMORY_INACCESSIBLE(free_start, size_delta);
base_page.heap()->stats_collector()->NotifyExplicitFree(size_delta);
normal_space.free_list().Add({free_start, size_delta});
header.SetSize(new_size);
}
// Return success in any case, as we want to avoid that embedders start
// copying memory because of small deltas.
return true;
}
} // namespace
bool Resize(void* object, size_t new_object_size) {
bool can_resize;
BasePage* base_page;
std::tie(can_resize, base_page) = CanModifyObject(object);
if (!can_resize) {
return false;
}
// TODO(chromium:1056170): Consider supporting large objects within certain
// restrictions.
if (base_page->is_large()) {
return false;
}
const size_t new_size = RoundUp<kAllocationGranularity>(
sizeof(HeapObjectHeader) + new_object_size);
auto& header = HeapObjectHeader::FromPayload(object);
const size_t old_size = header.GetSize();
if (new_size > old_size) {
return Grow(header, *base_page, new_size, new_size - old_size);
} else if (old_size > new_size) {
return Shrink(header, *base_page, new_size, old_size - new_size);
}
// Same size considering internal restrictions, e.g. alignment.
return true;
}
} // namespace internal
} // namespace cppgc
......@@ -212,7 +212,7 @@ size_t HeapObjectHeader::GetSize() const {
void HeapObjectHeader::SetSize(size_t size) {
DCHECK(!IsMarked());
encoded_low_ |= EncodeSize(size);
encoded_low_ = EncodeSize(size);
}
template <AccessMode mode>
......
......@@ -100,6 +100,8 @@ void* AllocateLargeObject(PageBackend* page_backend, LargePageSpace* space,
} // namespace
constexpr size_t ObjectAllocator::kSmallestSpaceSize;
ObjectAllocator::ObjectAllocator(RawHeap* heap, PageBackend* page_backend,
StatsCollector* stats_collector)
: raw_heap_(heap),
......
......@@ -31,6 +31,8 @@ class PageBackend;
class V8_EXPORT_PRIVATE ObjectAllocator final : public cppgc::AllocationHandle {
public:
static constexpr size_t kSmallestSpaceSize = 32;
ObjectAllocator(RawHeap* heap, PageBackend* page_backend,
StatsCollector* stats_collector);
......@@ -85,8 +87,10 @@ void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo,
// static
RawHeap::RegularSpaceType ObjectAllocator::GetInitialSpaceIndexForSize(
size_t size) {
static_assert(kSmallestSpaceSize == 32,
"should be half the next larger size");
if (size < 64) {
if (size < 32) return RawHeap::RegularSpaceType::kNormal1;
if (size < kSmallestSpaceSize) return RawHeap::RegularSpaceType::kNormal1;
return RawHeap::RegularSpaceType::kNormal2;
}
if (size < 128) return RawHeap::RegularSpaceType::kNormal3;
......
......@@ -115,5 +115,74 @@ TEST_F(ExplicitManagementTest, FreeNull) {
subtle::FreeUnreferencedObject(o);
}
TEST_F(ExplicitManagementTest, GrowAtLAB) {
auto* o =
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
auto& header = HeapObjectHeader::FromPayload(o);
constexpr size_t size_of_o = sizeof(DynamicallySized);
constexpr size_t kFirstDelta = 8;
EXPECT_TRUE(subtle::Resize(o, AdditionalBytes(kFirstDelta)));
EXPECT_EQ(RoundUp<kAllocationGranularity>(size_of_o + kFirstDelta),
header.ObjectSize());
constexpr size_t kSecondDelta = 9;
EXPECT_TRUE(subtle::Resize(o, AdditionalBytes(kSecondDelta)));
EXPECT_EQ(RoundUp<kAllocationGranularity>(size_of_o + kSecondDelta),
header.ObjectSize());
// Second round didn't actually grow object because alignment restrictions
// already forced it to be large enough on the first Grow().
EXPECT_EQ(RoundUp<kAllocationGranularity>(size_of_o + kFirstDelta),
RoundUp<kAllocationGranularity>(size_of_o + kSecondDelta));
constexpr size_t kThirdDelta = 16;
EXPECT_TRUE(subtle::Resize(o, AdditionalBytes(kThirdDelta)));
EXPECT_EQ(RoundUp<kAllocationGranularity>(size_of_o + kThirdDelta),
header.ObjectSize());
}
TEST_F(ExplicitManagementTest, GrowShrinkAtLAB) {
auto* o =
MakeGarbageCollected<DynamicallySized>(GetHeap()->GetAllocationHandle());
auto& header = HeapObjectHeader::FromPayload(o);
constexpr size_t size_of_o = sizeof(DynamicallySized);
constexpr size_t kDelta = 27;
EXPECT_TRUE(subtle::Resize(o, AdditionalBytes(kDelta)));
EXPECT_EQ(RoundUp<kAllocationGranularity>(size_of_o + kDelta),
header.ObjectSize());
EXPECT_TRUE(subtle::Resize(o, AdditionalBytes(0)));
EXPECT_EQ(RoundUp<kAllocationGranularity>(size_of_o), header.ObjectSize());
}
TEST_F(ExplicitManagementTest, ShrinkFreeList) {
auto* o = MakeGarbageCollected<DynamicallySized>(
GetHeap()->GetAllocationHandle(),
AdditionalBytes(ObjectAllocator::kSmallestSpaceSize));
const auto* space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
// Force returning to free list by removing the LAB.
ResetLinearAllocationBuffers();
auto& header = HeapObjectHeader::FromPayload(o);
constexpr size_t size_of_o = sizeof(DynamicallySized);
EXPECT_TRUE(subtle::Resize(o, AdditionalBytes(0)));
EXPECT_EQ(RoundUp<kAllocationGranularity>(size_of_o), header.ObjectSize());
EXPECT_TRUE(space->free_list().ContainsForTesting(
{header.PayloadEnd(), ObjectAllocator::kSmallestSpaceSize}));
}
TEST_F(ExplicitManagementTest, ShrinkFreeListBailoutAvoidFragmentation) {
auto* o = MakeGarbageCollected<DynamicallySized>(
GetHeap()->GetAllocationHandle(),
AdditionalBytes(ObjectAllocator::kSmallestSpaceSize - 1));
const auto* space = NormalPageSpace::From(BasePage::FromPayload(o)->space());
// Force returning to free list by removing the LAB.
ResetLinearAllocationBuffers();
auto& header = HeapObjectHeader::FromPayload(o);
constexpr size_t size_of_o = sizeof(DynamicallySized);
EXPECT_TRUE(subtle::Resize(o, AdditionalBytes(0)));
EXPECT_EQ(RoundUp<kAllocationGranularity>(
size_of_o + ObjectAllocator::kSmallestSpaceSize - 1),
header.ObjectSize());
EXPECT_FALSE(space->free_list().ContainsForTesting(
{header.Payload() + RoundUp<kAllocationGranularity>(size_of_o),
ObjectAllocator::kSmallestSpaceSize - 1}));
}
} // namespace internal
} // namespace cppgc
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment