Commit f74d6931 authored by Nikolaos Papaspyrou's avatar Nikolaos Papaspyrou Committed by V8 LUCI CQ

heap: Refactor MarkCompactCollector::FindBasePtrForMarking

This CL refactors the implementation of inner pointer resolution, based
on the marking bitmap. MarkCompactCollector::FindBasePtrForMarking has
most of its code that processes the marking bitmap moved to a utility
function FindPreviousObjectForConservativeMarking, which iterates
backwards to find the closest previous object on the page that has been
marked.

Bug: v8:12851
Change-Id: I980ac5712d8b1df792196d77edb9526ca2e13e2c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3758227Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarOmer Katz <omerkatz@chromium.org>
Commit-Queue: Nikolaos Papaspyrou <nikolaos@chromium.org>
Cr-Commit-Position: refs/heads/main@{#81750}
parent 5e227beb
......@@ -54,15 +54,6 @@ bool ConservativeStackVisitor::CheckPage(Address address, MemoryChunk* page) {
void ConservativeStackVisitor::VisitConservativelyIfPointer(
const void* pointer) {
auto address = reinterpret_cast<Address>(pointer);
// TODO(v8:12851): Let's figure out what this meant to do...
// This condition is always true, as the LAB invariant requires
// start <= top <= limit
#if 0
if (address > isolate_->heap()->old_space()->top() ||
address < isolate_->heap()->old_space()->limit()) {
return;
}
#endif
for (Page* page : *isolate_->heap()->old_space()) {
if (CheckPage(address, page)) {
......
......@@ -2073,82 +2073,115 @@ void MarkCompactCollector::MarkRoots(RootVisitor* root_visitor,
}
#ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_MB
Address MarkCompactCollector::FindBasePtrForMarking(Address maybe_inner_ptr) {
// TODO(v8:12851): If this implementation is kept:
// 1. This function will have to be refactored. Most of the bit hacking
// belongs to some reverse-iterator abstraction for bitmaps.
// 2. Unit tests will have to be added.
const Page* page = Page::FromAddress(maybe_inner_ptr);
Bitmap* bitmap = page->marking_bitmap<AccessMode::NON_ATOMIC>();
MarkBit::CellType* cells = bitmap->cells();
uint32_t index = page->AddressToMarkbitIndex(maybe_inner_ptr);
unsigned int cell_index = Bitmap::IndexToCell(index);
MarkBit::CellType mask = 1u << Bitmap::IndexInCell(index);
namespace {
// This utility function returns the highest address in the page that is lower
// than maybe_inner_ptr, has its markbit set, and whose previous address (if it
// exists) does not have its markbit set. This address is guaranteed to be the
// start of a valid object in the page. In case the markbit corresponding to
// maybe_inner_ptr is set, the function bails out and returns kNullAddress.
Address FindPreviousObjectForConservativeMarking(const Page* page,
Address maybe_inner_ptr) {
auto* bitmap = page->marking_bitmap<AccessMode::NON_ATOMIC>();
const MarkBit::CellType* cells = bitmap->cells();
// The first actual bit of the bitmap, corresponding to page->area_start(),
// is at start_index which is somewhere in (not necessarily at the start of)
// start_cell_index.
const uint32_t start_index = page->AddressToMarkbitIndex(page->area_start());
const uint32_t start_cell_index = Bitmap::IndexToCell(start_index);
// We assume that all markbits before start_index are clear:
// SLOW_DCHECK(bitmap->AllBitsClearInRange(0, start_index));
// This has already been checked for the entire bitmap before starting marking
// by MarkCompactCollector::VerifyMarkbitsAreClean.
const uint32_t index = page->AddressToMarkbitIndex(maybe_inner_ptr);
uint32_t cell_index = Bitmap::IndexToCell(index);
const MarkBit::CellType mask = 1u << Bitmap::IndexInCell(index);
MarkBit::CellType cell = cells[cell_index];
// If the markbit is set, then we have an object that does not need be marked.
// If the markbit is already set, bail out.
if ((cell & mask) != 0) return kNullAddress;
// Clear the bits corresponding to higher addresses in the cell.
cell &= ((~static_cast<MarkBit::CellType>(0)) >>
(Bitmap::kBitsPerCell - Bitmap::IndexInCell(index) - 1));
// Find the start of a valid object by traversing the bitmap backwards, until
// we find a markbit that is set and whose previous markbit (if it exists) is
// unset.
uint32_t object_index;
// Iterate backwards to find a cell with any set markbit.
while (cell == 0 && cell_index > 0) cell = cells[--cell_index];
// Traverse the bitmap backwards, until we find a markbit that is set and
// whose previous markbit (if it exists) is unset.
// First, iterate backwards to find a cell with any set markbit.
while (cell == 0 && cell_index > start_cell_index) cell = cells[--cell_index];
if (cell == 0) {
// There is no cell with a set markbit, we reached the start of the page.
object_index = 0;
} else {
uint32_t leading_zeros = base::bits::CountLeadingZeros(cell);
uint32_t leftmost_ones =
base::bits::CountLeadingZeros(~(cell << leading_zeros));
uint32_t index_of_last_leftmost_one =
Bitmap::kBitsPerCell - leading_zeros - leftmost_ones;
if (index_of_last_leftmost_one > 0) {
// The leftmost contiguous sequence of set bits does not reach the start
// of the cell.
object_index =
cell_index * Bitmap::kBitsPerCell + index_of_last_leftmost_one;
} else {
// The leftmost contiguous sequence of set bits reaches the start of the
// cell. We must keep traversing backwards until we find the first unset
// markbit.
if (cell_index == 0) {
object_index = 0;
} else {
// Iterate backwards to find a cell with any unset markbit.
do {
cell = cells[--cell_index];
} while (~cell == 0 && cell_index > 0);
if (~cell == 0) {
// There is no cell with a clear markbit, we reached the start of the
// page.
object_index = 0;
} else {
uint32_t leading_ones = base::bits::CountLeadingZeros(~cell);
uint32_t index_of_last_leading_one =
Bitmap::kBitsPerCell - leading_ones;
DCHECK_LT(0, index_of_last_leading_one);
object_index =
cell_index * Bitmap::kBitsPerCell + index_of_last_leading_one;
}
}
}
DCHECK_EQ(start_cell_index, cell_index);
// We have reached the start of the page.
return page->area_start();
}
// We have found such a cell.
const uint32_t leading_zeros = base::bits::CountLeadingZeros(cell);
const uint32_t leftmost_ones =
base::bits::CountLeadingZeros(~(cell << leading_zeros));
const uint32_t index_of_last_leftmost_one =
Bitmap::kBitsPerCell - leading_zeros - leftmost_ones;
// If the leftmost sequence of set bits does not reach the start of the cell,
// we found it.
if (index_of_last_leftmost_one > 0) {
return page->MarkbitIndexToAddress(cell_index * Bitmap::kBitsPerCell +
index_of_last_leftmost_one);
}
// The leftmost sequence of set bits reaches the start of the cell. We must
// keep traversing backwards until we find the first unset markbit.
if (cell_index == start_cell_index) {
// We have reached the start of the page.
return page->area_start();
}
// Iterate backwards to find a cell with any unset markbit.
do {
cell = cells[--cell_index];
} while (~cell == 0 && cell_index > start_cell_index);
if (~cell == 0) {
DCHECK_EQ(start_cell_index, cell_index);
// We have reached the start of the page.
return page->area_start();
}
// We have found such a cell.
const uint32_t leading_ones = base::bits::CountLeadingZeros(~cell);
const uint32_t index_of_last_leading_one =
Bitmap::kBitsPerCell - leading_ones;
DCHECK_LT(0, index_of_last_leading_one);
return page->MarkbitIndexToAddress(cell_index * Bitmap::kBitsPerCell +
index_of_last_leading_one);
}
} // namespace
Address MarkCompactCollector::FindBasePtrForMarking(Address maybe_inner_ptr) {
const Page* page = Page::FromAddress(maybe_inner_ptr);
// TODO(v8:12851): We need a mechanism for checking that this is a valid page,
// otherwise return kNullAddress.
DCHECK_LT(maybe_inner_ptr, page->area_end());
if (maybe_inner_ptr < page->area_start()) return kNullAddress;
Address base_ptr =
FindPreviousObjectForConservativeMarking(page, maybe_inner_ptr);
// If the markbit is set, then we have an object that does not need be marked.
if (base_ptr == kNullAddress) return kNullAddress;
// Iterate through the objects in the page forwards, until we find the object
// containing maybe_inner_pointer.
Address base_ptr = page->MarkbitIndexToAddress(object_index);
const Address limit = page->area_end();
// containing maybe_inner_ptr.
DCHECK_LE(base_ptr, maybe_inner_ptr);
PtrComprCageBase cage_base{page->heap()->isolate()};
while (base_ptr < limit) {
if (maybe_inner_ptr < base_ptr) break;
const int size = HeapObject::FromAddress(base_ptr).Size(cage_base);
if (maybe_inner_ptr < base_ptr + size) return base_ptr;
while (true) {
HeapObject obj(HeapObject::FromAddress(base_ptr));
const int size = obj.Size(cage_base);
DCHECK_LT(0, size);
if (maybe_inner_ptr < base_ptr + size)
return obj.IsFreeSpaceOrFiller(cage_base) ? kNullAddress : base_ptr;
base_ptr += size;
DCHECK_LE(base_ptr, limit);
DCHECK_LT(base_ptr, page->area_end());
}
return kNullAddress;
}
#endif // V8_ENABLE_INNER_POINTER_RESOLUTION_MB
......@@ -2805,7 +2838,7 @@ void MarkCompactCollector::ClearNonLiveReferences() {
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_FLUSHABLE_BYTECODE);
// `ProcessFlusheBaselineCandidates()` must be called after
// `ProcessFlushedBaselineCandidates()` must be called after
// `ProcessOldCodeCandidates()` so that we correctly set the code object on
// the JSFunction after flushing.
ProcessOldCodeCandidates();
......@@ -4684,7 +4717,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
chunk_->invalidated_slots<OLD_TO_OLD>() != nullptr) {
// The invalidated slots are not needed after old-to-old slots were
// processsed.
// processed.
chunk_->ReleaseInvalidatedSlots<OLD_TO_OLD>();
}
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
......@@ -4711,7 +4744,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
chunk_->ReleaseSlotSet<OLD_TO_CODE>();
}
// The invalidated slots are not needed after old-to-code slots were
// processsed, but since there are no invalidated OLD_TO_CODE slots,
// processed, but since there are no invalidated OLD_TO_CODE slots,
// there's nothing to clear.
}
if (updating_mode_ == RememberedSetUpdatingMode::ALL) {
......
......@@ -129,6 +129,10 @@ class V8_EXPORT_PRIVATE Bitmap {
return reinterpret_cast<MarkBit::CellType*>(this);
}
V8_INLINE const MarkBit::CellType* cells() const {
return reinterpret_cast<const MarkBit::CellType*>(this);
}
V8_INLINE static Bitmap* FromAddress(Address addr) {
return reinterpret_cast<Bitmap*>(addr);
}
......
......@@ -378,6 +378,7 @@ v8_source_set("unittests_sources") {
"heap/list-unittest.cc",
"heap/local-factory-unittest.cc",
"heap/local-heap-unittest.cc",
"heap/marking-inner-pointer-resolution-unittest.cc",
"heap/marking-unittest.cc",
"heap/marking-worklist-unittest.cc",
"heap/memory-reducer-unittest.cc",
......
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/mark-compact.h"
#include "test/unittests/test-utils.h"
namespace v8 {
namespace internal {
#ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_MB
namespace {
class InnerPointerResolutionTest : public TestWithIsolate {
public:
struct ObjectRequest {
int size; // The only required field.
enum { REGULAR, FREE } type = REGULAR;
enum { WHITE, GREY, BLACK, BLACK_AREA } marked = WHITE;
// If index_in_cell >= 0, the object is placed at the lowest address s.t.
// Bitmap::IndexInCell(AddressToMarkbitIndex(address)) == index_in_cell.
// To achieve this, padding (i.e., introducing a free-space object of the
// appropriate size) may be necessary. If padding == CONSECUTIVE, no such
// padding is allowed and it is just checked that object layout is as
// intended.
int index_in_cell = -1;
enum { CONSECUTIVE, PAD_WHITE, PAD_BLACK } padding = CONSECUTIVE;
Address address = kNullAddress; // The object's address is stored here.
};
InnerPointerResolutionTest() {
OldSpace* old_space = heap()->old_space();
EXPECT_NE(nullptr, old_space);
page_ = allocator()->AllocatePage(MemoryAllocator::AllocationMode::kRegular,
old_space, NOT_EXECUTABLE);
EXPECT_NE(nullptr, page_);
}
~InnerPointerResolutionTest() override {
allocator()->Free(MemoryAllocator::FreeMode::kImmediately, page_);
}
InnerPointerResolutionTest(const InnerPointerResolutionTest&) = delete;
InnerPointerResolutionTest& operator=(const InnerPointerResolutionTest&) =
delete;
Heap* heap() { return isolate()->heap(); }
Page* page() { return page_; }
MemoryAllocator* allocator() { return heap()->memory_allocator(); }
MarkCompactCollector* collector() { return heap()->mark_compact_collector(); }
// Creates a list of objects in the page and ensures that the page is
// iterable.
void CreateObjects(std::vector<ObjectRequest>& objects) {
Address ptr = page()->area_start();
for (size_t i = 0; i < objects.size(); ++i) {
CHECK_EQ(0, objects[i].size % kTaggedSize);
// Check if padding is needed.
const uint32_t index = page()->AddressToMarkbitIndex(ptr);
const int index_in_cell = Bitmap::IndexInCell(index);
if (objects[i].index_in_cell < 0) {
objects[i].index_in_cell = index_in_cell;
} else if (objects[i].padding != ObjectRequest::CONSECUTIVE) {
DCHECK_LE(0, objects[i].index_in_cell);
DCHECK_GT(Bitmap::kBitsPerCell, objects[i].index_in_cell);
const bool black = objects[i].padding == ObjectRequest::PAD_BLACK;
objects[i].padding = ObjectRequest::CONSECUTIVE;
const int needed_padding_size =
((Bitmap::kBitsPerCell + objects[i].index_in_cell - index_in_cell) %
Bitmap::kBitsPerCell) *
Bitmap::kBytesPerCell;
if (needed_padding_size > 0) {
ObjectRequest pad{
needed_padding_size,
ObjectRequest::FREE,
black ? ObjectRequest::BLACK_AREA : ObjectRequest::WHITE,
index_in_cell,
ObjectRequest::CONSECUTIVE,
ptr};
objects.insert(objects.begin() + i, pad);
CreateObject(pad);
ptr += needed_padding_size;
continue;
}
}
// This will fail if the marking bitmap's implementation parameters change
// (e.g., Bitmap::kBitsPerCell) or the size of the page header changes.
// In this case, the tests will need to be revised accordingly.
EXPECT_EQ(index_in_cell, objects[i].index_in_cell);
objects[i].address = ptr;
CreateObject(objects[i]);
ptr += objects[i].size;
}
// Create one last object that uses the remaining space on the page; this
// simulates freeing the page's LAB.
const int remaining_size = static_cast<int>(page_->area_end() - ptr);
const uint32_t index = page()->AddressToMarkbitIndex(ptr);
const int index_in_cell = Bitmap::IndexInCell(index);
ObjectRequest last{
remaining_size, ObjectRequest::FREE, ObjectRequest::WHITE,
index_in_cell, ObjectRequest::CONSECUTIVE, ptr};
objects.push_back(last);
CreateObject(last);
}
void CreateObject(const ObjectRequest& object) {
// "Allocate" (i.e., manually place) the object in the page, set the map
// and the size.
switch (object.type) {
case ObjectRequest::REGULAR: {
CHECK_LE(2 * kTaggedSize, object.size);
ReadOnlyRoots roots(heap());
HeapObject heap_object(HeapObject::FromAddress(object.address));
heap_object.set_map_after_allocation(roots.unchecked_fixed_array_map(),
SKIP_WRITE_BARRIER);
FixedArray arr(FixedArray::cast(heap_object));
arr.set_length((object.size - FixedArray::SizeFor(0)) / kTaggedSize);
CHECK_EQ(object.size, arr.AllocatedSize());
break;
}
case ObjectRequest::FREE:
heap()->CreateFillerObjectAt(object.address, object.size);
break;
}
// Mark the object in the bitmap, if necessary.
switch (object.marked) {
case ObjectRequest::WHITE:
break;
case ObjectRequest::GREY:
collector()->marking_state()->WhiteToGrey(
HeapObject::FromAddress(object.address));
break;
case ObjectRequest::BLACK:
CHECK_LE(2 * kTaggedSize, object.size);
collector()->marking_state()->WhiteToBlack(
HeapObject::FromAddress(object.address));
break;
case ObjectRequest::BLACK_AREA:
collector()->marking_state()->bitmap(page_)->SetRange(
page_->AddressToMarkbitIndex(object.address),
page_->AddressToMarkbitIndex(object.address + object.size));
break;
}
}
// This must be called with a created object and an offset inside it.
void RunTestInside(const ObjectRequest& object, int offset) {
CHECK_LE(0, offset);
CHECK_GT(object.size, offset);
Address base_ptr =
collector()->FindBasePtrForMarking(object.address + offset);
if (object.type == ObjectRequest::FREE ||
object.marked == ObjectRequest::BLACK_AREA ||
(object.marked == ObjectRequest::BLACK && offset < 2 * kTaggedSize) ||
(object.marked == ObjectRequest::GREY && offset < kTaggedSize))
EXPECT_EQ(kNullAddress, base_ptr);
else
EXPECT_EQ(object.address, base_ptr);
}
// This must be called with an address not contained in any created object.
void RunTestOutside(Address ptr) {
CHECK(!page()->Contains(ptr));
Address base_ptr = collector()->FindBasePtrForMarking(ptr);
EXPECT_EQ(kNullAddress, base_ptr);
}
void TestWith(std::vector<ObjectRequest> objects) {
CreateObjects(objects);
for (auto object : objects) {
RunTestInside(object, 0);
RunTestInside(object, 1);
RunTestInside(object, object.size / 2);
RunTestInside(object, object.size - 1);
}
const Address outside_ptr = page()->area_start() - 42;
CHECK_LE(page()->address(), outside_ptr);
RunTestOutside(outside_ptr);
}
private:
Page* page_;
};
} // namespace
TEST_F(InnerPointerResolutionTest, EmptyPage) {
if (FLAG_enable_third_party_heap) return;
TestWith({});
}
// Tests with some objects layed out randomly.
TEST_F(InnerPointerResolutionTest, NothingMarked) {
if (FLAG_enable_third_party_heap) return;
TestWith({
{64},
{48},
{52},
{512},
{4, ObjectRequest::FREE},
{60},
{8, ObjectRequest::FREE},
{8},
{42176},
});
}
TEST_F(InnerPointerResolutionTest, AllMarked) {
if (FLAG_enable_third_party_heap) return;
TestWith({
{64, ObjectRequest::REGULAR, ObjectRequest::BLACK},
{48, ObjectRequest::REGULAR, ObjectRequest::GREY},
{52, ObjectRequest::REGULAR, ObjectRequest::BLACK},
{512, ObjectRequest::REGULAR, ObjectRequest::BLACK},
{4, ObjectRequest::FREE, ObjectRequest::GREY},
{60, ObjectRequest::REGULAR, ObjectRequest::BLACK},
{8, ObjectRequest::FREE, ObjectRequest::GREY},
{8, ObjectRequest::REGULAR, ObjectRequest::BLACK},
{42176, ObjectRequest::REGULAR, ObjectRequest::BLACK},
});
}
TEST_F(InnerPointerResolutionTest, SomeMarked) {
if (FLAG_enable_third_party_heap) return;
TestWith({
{64, ObjectRequest::REGULAR, ObjectRequest::WHITE},
{48, ObjectRequest::REGULAR, ObjectRequest::WHITE},
{52, ObjectRequest::REGULAR, ObjectRequest::BLACK},
{512, ObjectRequest::REGULAR, ObjectRequest::WHITE},
{4, ObjectRequest::FREE, ObjectRequest::GREY},
{60, ObjectRequest::REGULAR, ObjectRequest::BLACK},
{8, ObjectRequest::FREE, ObjectRequest::GREY},
{8, ObjectRequest::REGULAR, ObjectRequest::WHITE},
{42176, ObjectRequest::REGULAR, ObjectRequest::GREY},
});
}
TEST_F(InnerPointerResolutionTest, BlackAreas) {
if (FLAG_enable_third_party_heap) return;
TestWith({
{64, ObjectRequest::REGULAR, ObjectRequest::WHITE},
{48, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA},
{52, ObjectRequest::REGULAR, ObjectRequest::BLACK},
{512, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA},
{4, ObjectRequest::FREE, ObjectRequest::GREY},
{60, ObjectRequest::REGULAR, ObjectRequest::BLACK},
{8, ObjectRequest::FREE, ObjectRequest::GREY},
{8, ObjectRequest::REGULAR, ObjectRequest::WHITE},
{42176, ObjectRequest::REGULAR, ObjectRequest::GREY},
});
}
// Tests with specific object layout, to cover interesting and corner cases.
TEST_F(InnerPointerResolutionTest, ThreeMarkedObjectsInSameCell) {
if (FLAG_enable_third_party_heap) return;
TestWith({
// Some initial large unmarked object, followed by a small marked object
// towards the end of the cell.
{512},
{20, ObjectRequest::REGULAR, ObjectRequest::BLACK, 20,
ObjectRequest::PAD_WHITE},
// Then three marked objects in the same cell.
{32, ObjectRequest::REGULAR, ObjectRequest::BLACK, 3,
ObjectRequest::PAD_WHITE},
{48, ObjectRequest::REGULAR, ObjectRequest::BLACK, 11},
{20, ObjectRequest::REGULAR, ObjectRequest::BLACK, 23},
// This marked object is in the next cell.
{64, ObjectRequest::REGULAR, ObjectRequest::BLACK, 17,
ObjectRequest::PAD_WHITE},
});
}
TEST_F(InnerPointerResolutionTest, ThreeBlackAreasInSameCell) {
if (FLAG_enable_third_party_heap) return;
TestWith({
// Some initial large unmarked object, followed by a small black area
// towards the end of the cell.
{512},
{20, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 20,
ObjectRequest::PAD_WHITE},
// Then three black areas in the same cell.
{32, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 3,
ObjectRequest::PAD_WHITE},
{48, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 11},
{20, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 23},
// This black area is in the next cell.
{64, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 17,
ObjectRequest::PAD_WHITE},
});
}
TEST_F(InnerPointerResolutionTest, SmallBlackAreaAtPageStart) {
if (FLAG_enable_third_party_heap) return;
TestWith({
{64, ObjectRequest::REGULAR, ObjectRequest::WHITE, 30,
ObjectRequest::PAD_BLACK},
});
}
TEST_F(InnerPointerResolutionTest, SmallBlackAreaAtPageStartUntilCellBoundary) {
if (FLAG_enable_third_party_heap) return;
TestWith({
{8, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA},
{64, ObjectRequest::REGULAR, ObjectRequest::WHITE, 0,
ObjectRequest::PAD_BLACK},
});
}
TEST_F(InnerPointerResolutionTest, LargeBlackAreaAtPageStart) {
if (FLAG_enable_third_party_heap) return;
TestWith({
{42 * Bitmap::kBitsPerCell * Bitmap::kBytesPerCell,
ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA},
{64, ObjectRequest::REGULAR, ObjectRequest::WHITE, 30,
ObjectRequest::PAD_BLACK},
});
}
TEST_F(InnerPointerResolutionTest, LargeBlackAreaAtPageStartUntilCellBoundary) {
if (FLAG_enable_third_party_heap) return;
TestWith({
{42 * Bitmap::kBitsPerCell * Bitmap::kBytesPerCell,
ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA},
{64, ObjectRequest::REGULAR, ObjectRequest::WHITE, 0,
ObjectRequest::PAD_BLACK},
});
}
TEST_F(InnerPointerResolutionTest, SmallBlackAreaStartingAtCellBoundary) {
if (FLAG_enable_third_party_heap) return;
TestWith({
{512},
{20, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 0,
ObjectRequest::PAD_WHITE},
});
}
TEST_F(InnerPointerResolutionTest, LargeBlackAreaStartingAtCellBoundary) {
if (FLAG_enable_third_party_heap) return;
TestWith({
{512},
{42 * Bitmap::kBitsPerCell * Bitmap::kBytesPerCell + 64,
ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 0,
ObjectRequest::PAD_WHITE},
});
}
TEST_F(InnerPointerResolutionTest, SmallBlackAreaEndingAtCellBoundary) {
if (FLAG_enable_third_party_heap) return;
TestWith({
{512},
{8, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 13,
ObjectRequest::PAD_WHITE},
{64, ObjectRequest::REGULAR, ObjectRequest::WHITE, 0,
ObjectRequest::PAD_BLACK},
});
}
TEST_F(InnerPointerResolutionTest, LargeBlackAreaEndingAtCellBoundary) {
if (FLAG_enable_third_party_heap) return;
TestWith({
{512},
{42 * Bitmap::kBitsPerCell * Bitmap::kBytesPerCell + 64,
ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 0,
ObjectRequest::PAD_WHITE},
{64, ObjectRequest::REGULAR, ObjectRequest::WHITE, 0,
ObjectRequest::PAD_BLACK},
});
}
TEST_F(InnerPointerResolutionTest, TwoSmallBlackAreasAtCellBoundaries) {
if (FLAG_enable_third_party_heap) return;
TestWith({
{512},
{24, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 0,
ObjectRequest::PAD_WHITE},
{8, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 25,
ObjectRequest::PAD_WHITE},
{64, ObjectRequest::REGULAR, ObjectRequest::WHITE, 0,
ObjectRequest::PAD_BLACK},
});
}
TEST_F(InnerPointerResolutionTest, BlackAreaOfOneCell) {
if (FLAG_enable_third_party_heap) return;
TestWith({
{512},
{Bitmap::kBitsPerCell * Bitmap::kBytesPerCell, ObjectRequest::REGULAR,
ObjectRequest::BLACK_AREA, 0, ObjectRequest::PAD_WHITE},
});
}
TEST_F(InnerPointerResolutionTest, BlackAreaOfManyCells) {
if (FLAG_enable_third_party_heap) return;
TestWith({
{512},
{17 * Bitmap::kBitsPerCell * Bitmap::kBytesPerCell,
ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 0,
ObjectRequest::PAD_WHITE},
});
}
#endif // V8_ENABLE_INNER_POINTER_RESOLUTION_MB
} // namespace internal
} // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment