Commit 05e98e56 authored by Darius Mercadier's avatar Darius Mercadier Committed by Commit Bot

Fix allocation of tiniest object in the tiny freelist

When trying to find a spot in the freelists for a tiniest object,
the tiny freelist was never searched.

This was fixed by modifying FreeList::Allocate in order to handle
that special case.
A test was added in cctest/heap/test-spaces.cc. It allocates a
Tiny object on a new page, then fills up the page, then frees the
first object, and finally tries to allocate a Tiniest
object. Before, this Tiniest object would go on a different page;
now it goes on the same one (which is what the test checks for).

Bug: v8:9329
Change-Id: Ia810726d1bfe1dae4ef2055a7f5b314b1514ee9c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1647162
Commit-Queue: Darius Mercadier <dmercadier@google.com>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#62055}
parent 94ae207b
...@@ -3108,10 +3108,20 @@ FreeSpace FreeList::Allocate(size_t size_in_bytes, size_t* node_size) { ...@@ -3108,10 +3108,20 @@ FreeSpace FreeList::Allocate(size_t size_in_bytes, size_t* node_size) {
} }
if (node.is_null() && type != kHuge) { if (node.is_null() && type != kHuge) {
// We didn't find anything in the huge list. Now search the best fitting // We didn't find anything in the huge list.
// free list for a node that has at least the requested size.
type = SelectFreeListCategoryType(size_in_bytes); type = SelectFreeListCategoryType(size_in_bytes);
node = TryFindNodeIn(type, size_in_bytes, node_size);
if (type == kTiniest) {
// For this tiniest object, the tiny list hasn't been searched yet.
// Now searching the tiny list.
node = FindNodeIn(kTiny, size_in_bytes, node_size);
}
if (node.is_null()) {
// Now search the best fitting free list for a node that has at least the
// requested size.
node = TryFindNodeIn(type, size_in_bytes, node_size);
}
} }
if (!node.is_null()) { if (!node.is_null()) {
......
...@@ -1993,6 +1993,7 @@ class FreeList { ...@@ -1993,6 +1993,7 @@ class FreeList {
FreeListCategory* categories_[kNumberOfCategories]; FreeListCategory* categories_[kNumberOfCategories];
friend class FreeListCategory; friend class FreeListCategory;
friend class heap::HeapTester;
}; };
// LocalAllocationBuffer represents a linear allocation area that is created // LocalAllocationBuffer represents a linear allocation area that is created
......
...@@ -52,7 +52,8 @@ ...@@ -52,7 +52,8 @@
V(Regress791582) \ V(Regress791582) \
V(Regress845060) \ V(Regress845060) \
V(RegressMissingWriteBarrierInAllocate) \ V(RegressMissingWriteBarrierInAllocate) \
V(WriteBarriersInCopyJSObject) V(WriteBarriersInCopyJSObject) \
V(AllocateObjTinyFreeList)
#define HEAP_TEST(Name) \ #define HEAP_TEST(Name) \
CcTest register_test_##Name(v8::internal::heap::HeapTester::Test##Name, \ CcTest register_test_##Name(v8::internal::heap::HeapTester::Test##Name, \
......
...@@ -741,6 +741,59 @@ TEST(ShrinkPageToHighWaterMarkTwoWordFiller) { ...@@ -741,6 +741,59 @@ TEST(ShrinkPageToHighWaterMarkTwoWordFiller) {
CHECK_EQ(0u, shrunk); CHECK_EQ(0u, shrunk);
} }
HEAP_TEST(AllocateObjTinyFreeList) {
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
HandleScope scope(isolate);
heap::SealCurrentObjects(CcTest::heap());
// tinyObjPage will contain the page that contains the tiny object.
Page* tiny_obj_page;
{
// Allocates a tiny object (ie, that fits in the Tiny freelist).
// It will go at the begining of a page.
// Note that the handlescope is locally scoped.
{
HandleScope tiny_scope(isolate);
size_t tiny_obj_size =
(FreeList::kTinyListMax - FixedArray::kHeaderSize) / kTaggedSize;
Handle<FixedArray> tiny_obj = isolate->factory()->NewFixedArray(
static_cast<int>(tiny_obj_size), AllocationType::kOld);
// Remember the page of this tiny object.
tiny_obj_page = Page::FromHeapObject(*tiny_obj);
}
// Fill up the page entirely.
PagedSpace* old_space = CcTest::heap()->old_space();
int space_remaining =
static_cast<int>(*old_space->allocation_limit_address() -
*old_space->allocation_top_address());
std::vector<Handle<FixedArray>> handles = heap::CreatePadding(
old_space->heap(), space_remaining, AllocationType::kOld);
// Checking that the new objects were indeed allocated on the same page
// as the tiny one.
CHECK_EQ(tiny_obj_page, Page::FromHeapObject(*(handles.back())));
}
// Call gc to reclain tinyObj (since its HandleScope went out of scope).
CcTest::CollectAllGarbage();
isolate->heap()->mark_compact_collector()->EnsureSweepingCompleted();
isolate->heap()->old_space()->FreeLinearAllocationArea();
// Now allocate a tyniest object.
// It should go at the same place as the previous one.
size_t tiniest_obj_size =
(FreeList::kTiniestListMax - FixedArray::kHeaderSize) / kTaggedSize;
Handle<FixedArray> tiniest_obj = isolate->factory()->NewFixedArray(
static_cast<int>(tiniest_obj_size), AllocationType::kOld);
// Check that the new tiny object is in the same page as the previous one.
Page* tiniest_obj_page = Page::FromHeapObject(*tiniest_obj);
CHECK_EQ(tiny_obj_page, tiniest_obj_page);
}
} // namespace heap } // namespace heap
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment