Commit 72d6dc6d authored by Omer Katz's avatar Omer Katz Committed by V8 LUCI CQ

[heap] Do precise search in free list for new space

In case the free list fast path fails, do a precise search through the
precise category for the current allocation.

Bug: v8:12612
Change-Id: I120e64b0d09b9cf5a776188180d6e6c53c44886b
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3879494Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83096}
parent 85b5fbc7
......@@ -111,6 +111,10 @@ void FreeListCategory::Relink(FreeList* owner) {
FreeList* FreeList::CreateFreeList() { return new FreeListManyCachedOrigin(); }
FreeList* FreeList::CreateFreeListForNewSpace() {
return new FreeListManyCachedOriginForNewSpace();
}
FreeSpace FreeList::TryFindNodeIn(FreeListCategoryType type,
size_t minimum_size, size_t* node_size) {
FreeListCategory* category = categories_[type];
......@@ -361,12 +365,14 @@ FreeSpace FreeListManyCachedFastPath::Allocate(size_t size_in_bytes,
// Fast path part 2: searching the medium categories for tiny objects
if (node.is_null()) {
if (size_in_bytes <= kTinyObjectMaxSize) {
DCHECK_EQ(kFastPathFirstCategory, first_category);
for (type = next_nonempty_category[kFastPathFallBackTiny];
type < kFastPathFirstCategory;
type = next_nonempty_category[type + 1]) {
node = TryFindNodeIn(type, size_in_bytes, node_size);
if (!node.is_null()) break;
}
first_category = kFastPathFallBackTiny;
}
}
......@@ -387,19 +393,41 @@ FreeSpace FreeListManyCachedFastPath::Allocate(size_t size_in_bytes,
}
}
// Updating cache
if (!node.is_null() && categories_[type] == nullptr) {
UpdateCacheAfterRemoval(type);
if (!node.is_null()) {
if (categories_[type] == nullptr) UpdateCacheAfterRemoval(type);
Page::FromHeapObject(node)->IncreaseAllocatedBytes(*node_size);
}
#ifdef DEBUG
CheckCacheIntegrity();
#endif
DCHECK(IsVeryLong() || Available() == SumFreeLists());
return node;
}
// ------------------------------------------------
// FreeListManyCachedFastPath implementation
FreeSpace FreeListManyCachedFastPathForNewSpace::Allocate(
size_t size_in_bytes, size_t* node_size, AllocationOrigin origin) {
FreeSpace node =
FreeListManyCachedFastPath::Allocate(size_in_bytes, node_size, origin);
if (!node.is_null()) return node;
// Search through the precise category for a fit
FreeListCategoryType type = SelectFreeListCategoryType(size_in_bytes);
node = SearchForNodeInList(type, size_in_bytes, node_size);
if (!node.is_null()) {
if (categories_[type] == nullptr) UpdateCacheAfterRemoval(type);
Page::FromHeapObject(node)->IncreaseAllocatedBytes(*node_size);
}
#ifdef DEBUG
CheckCacheIntegrity();
#endif
DCHECK(IsVeryLong() || Available() == SumFreeLists());
return node;
}
......@@ -418,6 +446,19 @@ FreeSpace FreeListManyCachedOrigin::Allocate(size_t size_in_bytes,
}
}
// ------------------------------------------------
// FreeListManyCachedOrigin implementation
FreeSpace FreeListManyCachedOriginForNewSpace::Allocate(
size_t size_in_bytes, size_t* node_size, AllocationOrigin origin) {
if (origin == AllocationOrigin::kGC) {
return FreeListManyCached::Allocate(size_in_bytes, node_size, origin);
} else {
return FreeListManyCachedOriginForNewSpace::Allocate(size_in_bytes,
node_size, origin);
}
}
// ------------------------------------------------
// Generic FreeList methods (non alloc/free related)
......
......@@ -122,8 +122,10 @@ class FreeListCategory {
// categories would scatter allocation more.
class FreeList {
public:
// Creates a Freelist of the default class (FreeListLegacy for now).
// Creates a Freelist of the default class.
V8_EXPORT_PRIVATE static FreeList* CreateFreeList();
// Creates a Freelist for new space.
V8_EXPORT_PRIVATE static FreeList* CreateFreeListForNewSpace();
virtual ~FreeList() = default;
......@@ -473,6 +475,21 @@ class V8_EXPORT_PRIVATE FreeListManyCachedFastPath : public FreeListManyCached {
FreeListManyCachedFastPathSelectFastAllocationFreeListCategoryType);
};
// Same as FreeListManyCachedFastPath but falls back to a precise search of the
// precise category in case allocation fails. Because new space is relatively
// small, the free list is also relatively small and larger categories are more
// likely to be empty. The precise search is meant to avoid an allocation
// failure and thus avoid GCs.
class V8_EXPORT_PRIVATE FreeListManyCachedFastPathForNewSpace
: public FreeListManyCachedFastPath {
public:
V8_WARN_UNUSED_RESULT FreeSpace Allocate(size_t size_in_bytes,
size_t* node_size,
AllocationOrigin origin) override;
protected:
};
// Uses FreeListManyCached if in the GC; FreeListManyCachedFastPath otherwise.
// The reasoning behind this FreeList is the following: the GC runs in
// parallel, and therefore, more expensive allocations there are less
......@@ -489,6 +506,16 @@ class V8_EXPORT_PRIVATE FreeListManyCachedOrigin
AllocationOrigin origin) override;
};
// Similar to FreeListManyCachedOrigin but uses
// FreeListManyCachedFastPathForNewSpace for allocations outside the GC.
class V8_EXPORT_PRIVATE FreeListManyCachedOriginForNewSpace
: public FreeListManyCachedFastPathForNewSpace {
public:
V8_WARN_UNUSED_RESULT FreeSpace Allocate(size_t size_in_bytes,
size_t* node_size,
AllocationOrigin origin) override;
};
} // namespace internal
} // namespace v8
......
......@@ -902,7 +902,7 @@ PagedSpaceForNewSpace::PagedSpaceForNewSpace(
LinearAllocationArea& allocation_info,
LinearAreaOriginalData& linear_area_original_data)
: PagedSpaceBase(heap, NEW_SPACE, NOT_EXECUTABLE,
FreeList::CreateFreeList(), allocation_counter,
FreeList::CreateFreeListForNewSpace(), allocation_counter,
allocation_info, linear_area_original_data,
CompactionSpaceKind::kNone),
initial_capacity_(RoundDown(initial_capacity, Page::kPageSize)),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment