Commit 954c19c4 authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

cppgc: Pass PageAllocator as reference when expecting non-null ref

Change-Id: Id807e5e09fff59f4aedfca67461ffe3af3ffbea3
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3114144
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Auto-Submit: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarAnton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/main@{#76458}
parent 22cd8032
...@@ -62,9 +62,9 @@ HeapBase::HeapBase( ...@@ -62,9 +62,9 @@ HeapBase::HeapBase(
#endif // LEAK_SANITIZER #endif // LEAK_SANITIZER
#if defined(CPPGC_CAGED_HEAP) #if defined(CPPGC_CAGED_HEAP)
caged_heap_(this, page_allocator()), caged_heap_(this, page_allocator()),
page_backend_(std::make_unique<PageBackend>(&caged_heap_.allocator())), page_backend_(std::make_unique<PageBackend>(caged_heap_.allocator())),
#else // !CPPGC_CAGED_HEAP #else // !CPPGC_CAGED_HEAP
page_backend_(std::make_unique<PageBackend>(page_allocator())), page_backend_(std::make_unique<PageBackend>(*page_allocator())),
#endif // !CPPGC_CAGED_HEAP #endif // !CPPGC_CAGED_HEAP
stats_collector_(std::make_unique<StatsCollector>(platform_.get())), stats_collector_(std::make_unique<StatsCollector>(platform_.get())),
stack_(std::make_unique<heap::base::Stack>( stack_(std::make_unique<heap::base::Stack>(
......
...@@ -12,11 +12,11 @@ namespace internal { ...@@ -12,11 +12,11 @@ namespace internal {
namespace { namespace {
void Unprotect(PageAllocator* allocator, const PageMemory& page_memory) { void Unprotect(PageAllocator& allocator, const PageMemory& page_memory) {
if (SupportsCommittingGuardPages(allocator)) { if (SupportsCommittingGuardPages(allocator)) {
CHECK(allocator->SetPermissions(page_memory.writeable_region().base(), CHECK(allocator.SetPermissions(page_memory.writeable_region().base(),
page_memory.writeable_region().size(), page_memory.writeable_region().size(),
PageAllocator::Permission::kReadWrite)); PageAllocator::Permission::kReadWrite));
} else { } else {
// No protection in case the allocator cannot commit at the required // No protection in case the allocator cannot commit at the required
// granularity. Only protect if the allocator supports committing at that // granularity. Only protect if the allocator supports committing at that
...@@ -24,51 +24,51 @@ void Unprotect(PageAllocator* allocator, const PageMemory& page_memory) { ...@@ -24,51 +24,51 @@ void Unprotect(PageAllocator* allocator, const PageMemory& page_memory) {
// //
// The allocator needs to support committing the overall range. // The allocator needs to support committing the overall range.
CHECK_EQ(0u, CHECK_EQ(0u,
page_memory.overall_region().size() % allocator->CommitPageSize()); page_memory.overall_region().size() % allocator.CommitPageSize());
CHECK(allocator->SetPermissions(page_memory.overall_region().base(), CHECK(allocator.SetPermissions(page_memory.overall_region().base(),
page_memory.overall_region().size(), page_memory.overall_region().size(),
PageAllocator::Permission::kReadWrite)); PageAllocator::Permission::kReadWrite));
} }
} }
void Protect(PageAllocator* allocator, const PageMemory& page_memory) { void Protect(PageAllocator& allocator, const PageMemory& page_memory) {
if (SupportsCommittingGuardPages(allocator)) { if (SupportsCommittingGuardPages(allocator)) {
// Swap the same region, providing the OS with a chance for fast lookup and // Swap the same region, providing the OS with a chance for fast lookup and
// change. // change.
CHECK(allocator->SetPermissions(page_memory.writeable_region().base(), CHECK(allocator.SetPermissions(page_memory.writeable_region().base(),
page_memory.writeable_region().size(), page_memory.writeable_region().size(),
PageAllocator::Permission::kNoAccess)); PageAllocator::Permission::kNoAccess));
} else { } else {
// See Unprotect(). // See Unprotect().
CHECK_EQ(0u, CHECK_EQ(0u,
page_memory.overall_region().size() % allocator->CommitPageSize()); page_memory.overall_region().size() % allocator.CommitPageSize());
CHECK(allocator->SetPermissions(page_memory.overall_region().base(), CHECK(allocator.SetPermissions(page_memory.overall_region().base(),
page_memory.overall_region().size(), page_memory.overall_region().size(),
PageAllocator::Permission::kNoAccess)); PageAllocator::Permission::kNoAccess));
} }
} }
MemoryRegion ReserveMemoryRegion(PageAllocator* allocator, MemoryRegion ReserveMemoryRegion(PageAllocator& allocator,
size_t allocation_size) { size_t allocation_size) {
void* region_memory = void* region_memory =
allocator->AllocatePages(nullptr, allocation_size, kPageSize, allocator.AllocatePages(nullptr, allocation_size, kPageSize,
PageAllocator::Permission::kNoAccess); PageAllocator::Permission::kNoAccess);
const MemoryRegion reserved_region(static_cast<Address>(region_memory), const MemoryRegion reserved_region(static_cast<Address>(region_memory),
allocation_size); allocation_size);
DCHECK_EQ(reserved_region.base() + allocation_size, reserved_region.end()); DCHECK_EQ(reserved_region.base() + allocation_size, reserved_region.end());
return reserved_region; return reserved_region;
} }
void FreeMemoryRegion(PageAllocator* allocator, void FreeMemoryRegion(PageAllocator& allocator,
const MemoryRegion& reserved_region) { const MemoryRegion& reserved_region) {
// Make sure pages returned to OS are unpoisoned. // Make sure pages returned to OS are unpoisoned.
ASAN_UNPOISON_MEMORY_REGION(reserved_region.base(), reserved_region.size()); ASAN_UNPOISON_MEMORY_REGION(reserved_region.base(), reserved_region.size());
allocator->FreePages(reserved_region.base(), reserved_region.size()); allocator.FreePages(reserved_region.base(), reserved_region.size());
} }
} // namespace } // namespace
PageMemoryRegion::PageMemoryRegion(PageAllocator* allocator, PageMemoryRegion::PageMemoryRegion(PageAllocator& allocator,
MemoryRegion reserved_region, bool is_large) MemoryRegion reserved_region, bool is_large)
: allocator_(allocator), : allocator_(allocator),
reserved_region_(reserved_region), reserved_region_(reserved_region),
...@@ -81,12 +81,12 @@ PageMemoryRegion::~PageMemoryRegion() { ...@@ -81,12 +81,12 @@ PageMemoryRegion::~PageMemoryRegion() {
// static // static
constexpr size_t NormalPageMemoryRegion::kNumPageRegions; constexpr size_t NormalPageMemoryRegion::kNumPageRegions;
NormalPageMemoryRegion::NormalPageMemoryRegion(PageAllocator* allocator) NormalPageMemoryRegion::NormalPageMemoryRegion(PageAllocator& allocator)
: PageMemoryRegion(allocator, : PageMemoryRegion(
ReserveMemoryRegion( allocator,
allocator, RoundUp(kPageSize * kNumPageRegions, ReserveMemoryRegion(allocator, RoundUp(kPageSize * kNumPageRegions,
allocator->AllocatePageSize())), allocator.AllocatePageSize())),
false) { false) {
#ifdef DEBUG #ifdef DEBUG
for (size_t i = 0; i < kNumPageRegions; ++i) { for (size_t i = 0; i < kNumPageRegions; ++i) {
DCHECK_EQ(false, page_memories_in_use_[i]); DCHECK_EQ(false, page_memories_in_use_[i]);
...@@ -114,13 +114,13 @@ void NormalPageMemoryRegion::UnprotectForTesting() { ...@@ -114,13 +114,13 @@ void NormalPageMemoryRegion::UnprotectForTesting() {
} }
} }
LargePageMemoryRegion::LargePageMemoryRegion(PageAllocator* allocator, LargePageMemoryRegion::LargePageMemoryRegion(PageAllocator& allocator,
size_t length) size_t length)
: PageMemoryRegion(allocator, : PageMemoryRegion(
ReserveMemoryRegion( allocator,
allocator, RoundUp(length + 2 * kGuardPageSize, ReserveMemoryRegion(allocator, RoundUp(length + 2 * kGuardPageSize,
allocator->AllocatePageSize())), allocator.AllocatePageSize())),
true) {} true) {}
LargePageMemoryRegion::~LargePageMemoryRegion() = default; LargePageMemoryRegion::~LargePageMemoryRegion() = default;
...@@ -165,7 +165,7 @@ std::pair<NormalPageMemoryRegion*, Address> NormalPageMemoryPool::Take( ...@@ -165,7 +165,7 @@ std::pair<NormalPageMemoryRegion*, Address> NormalPageMemoryPool::Take(
return pair; return pair;
} }
PageBackend::PageBackend(PageAllocator* allocator) : allocator_(allocator) {} PageBackend::PageBackend(PageAllocator& allocator) : allocator_(allocator) {}
PageBackend::~PageBackend() = default; PageBackend::~PageBackend() = default;
......
...@@ -79,9 +79,9 @@ class V8_EXPORT_PRIVATE PageMemoryRegion { ...@@ -79,9 +79,9 @@ class V8_EXPORT_PRIVATE PageMemoryRegion {
virtual void UnprotectForTesting() = 0; virtual void UnprotectForTesting() = 0;
protected: protected:
PageMemoryRegion(PageAllocator*, MemoryRegion, bool); PageMemoryRegion(PageAllocator&, MemoryRegion, bool);
PageAllocator* const allocator_; PageAllocator& allocator_;
const MemoryRegion reserved_region_; const MemoryRegion reserved_region_;
const bool is_large_; const bool is_large_;
}; };
...@@ -91,7 +91,7 @@ class V8_EXPORT_PRIVATE NormalPageMemoryRegion final : public PageMemoryRegion { ...@@ -91,7 +91,7 @@ class V8_EXPORT_PRIVATE NormalPageMemoryRegion final : public PageMemoryRegion {
public: public:
static constexpr size_t kNumPageRegions = 10; static constexpr size_t kNumPageRegions = 10;
explicit NormalPageMemoryRegion(PageAllocator*); explicit NormalPageMemoryRegion(PageAllocator&);
~NormalPageMemoryRegion() override; ~NormalPageMemoryRegion() override;
const PageMemory GetPageMemory(size_t index) const { const PageMemory GetPageMemory(size_t index) const {
...@@ -133,7 +133,7 @@ class V8_EXPORT_PRIVATE NormalPageMemoryRegion final : public PageMemoryRegion { ...@@ -133,7 +133,7 @@ class V8_EXPORT_PRIVATE NormalPageMemoryRegion final : public PageMemoryRegion {
// LargePageMemoryRegion serves a single large PageMemory object. // LargePageMemoryRegion serves a single large PageMemory object.
class V8_EXPORT_PRIVATE LargePageMemoryRegion final : public PageMemoryRegion { class V8_EXPORT_PRIVATE LargePageMemoryRegion final : public PageMemoryRegion {
public: public:
LargePageMemoryRegion(PageAllocator*, size_t); LargePageMemoryRegion(PageAllocator&, size_t);
~LargePageMemoryRegion() override; ~LargePageMemoryRegion() override;
const PageMemory GetPageMemory() const { const PageMemory GetPageMemory() const {
...@@ -193,7 +193,7 @@ class V8_EXPORT_PRIVATE NormalPageMemoryPool final { ...@@ -193,7 +193,7 @@ class V8_EXPORT_PRIVATE NormalPageMemoryPool final {
// regions alive. // regions alive.
class V8_EXPORT_PRIVATE PageBackend final { class V8_EXPORT_PRIVATE PageBackend final {
public: public:
explicit PageBackend(PageAllocator*); explicit PageBackend(PageAllocator&);
~PageBackend(); ~PageBackend();
// Allocates a normal page from the backend. // Allocates a normal page from the backend.
...@@ -223,7 +223,7 @@ class V8_EXPORT_PRIVATE PageBackend final { ...@@ -223,7 +223,7 @@ class V8_EXPORT_PRIVATE PageBackend final {
PageBackend& operator=(const PageBackend&) = delete; PageBackend& operator=(const PageBackend&) = delete;
private: private:
PageAllocator* allocator_; PageAllocator& allocator_;
NormalPageMemoryPool page_pool_; NormalPageMemoryPool page_pool_;
PageMemoryRegionTree page_memory_region_tree_; PageMemoryRegionTree page_memory_region_tree_;
std::vector<std::unique_ptr<PageMemoryRegion>> normal_page_memory_regions_; std::vector<std::unique_ptr<PageMemoryRegion>> normal_page_memory_regions_;
...@@ -233,8 +233,8 @@ class V8_EXPORT_PRIVATE PageBackend final { ...@@ -233,8 +233,8 @@ class V8_EXPORT_PRIVATE PageBackend final {
// Returns true if the provided allocator supports committing at the required // Returns true if the provided allocator supports committing at the required
// granularity. // granularity.
inline bool SupportsCommittingGuardPages(PageAllocator* allocator) { inline bool SupportsCommittingGuardPages(PageAllocator& allocator) {
return kGuardPageSize % allocator->CommitPageSize() == 0; return kGuardPageSize % allocator.CommitPageSize() == 0;
} }
Address NormalPageMemoryRegion::Lookup(ConstAddress address) const { Address NormalPageMemoryRegion::Lookup(ConstAddress address) const {
......
...@@ -77,7 +77,7 @@ TEST(PageMemoryDeathTest, ConstructNonContainedRegions) { ...@@ -77,7 +77,7 @@ TEST(PageMemoryDeathTest, ConstructNonContainedRegions) {
TEST(PageMemoryRegionTest, NormalPageMemoryRegion) { TEST(PageMemoryRegionTest, NormalPageMemoryRegion) {
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(&allocator); auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator);
pmr->UnprotectForTesting(); pmr->UnprotectForTesting();
MemoryRegion prev_overall; MemoryRegion prev_overall;
for (size_t i = 0; i < NormalPageMemoryRegion::kNumPageRegions; ++i) { for (size_t i = 0; i < NormalPageMemoryRegion::kNumPageRegions; ++i) {
...@@ -103,7 +103,7 @@ TEST(PageMemoryRegionTest, NormalPageMemoryRegion) { ...@@ -103,7 +103,7 @@ TEST(PageMemoryRegionTest, NormalPageMemoryRegion) {
TEST(PageMemoryRegionTest, LargePageMemoryRegion) { TEST(PageMemoryRegionTest, LargePageMemoryRegion) {
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
auto pmr = std::make_unique<LargePageMemoryRegion>(&allocator, 1024); auto pmr = std::make_unique<LargePageMemoryRegion>(allocator, 1024);
pmr->UnprotectForTesting(); pmr->UnprotectForTesting();
const PageMemory pm = pmr->GetPageMemory(); const PageMemory pm = pmr->GetPageMemory();
EXPECT_LE(1024u, pm.writeable_region().size()); EXPECT_LE(1024u, pm.writeable_region().size());
...@@ -116,16 +116,16 @@ TEST(PageMemoryRegionTest, PlatformUsesGuardPages) { ...@@ -116,16 +116,16 @@ TEST(PageMemoryRegionTest, PlatformUsesGuardPages) {
// regions. // regions.
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
#if defined(V8_HOST_ARCH_PPC64) && !defined(_AIX) #if defined(V8_HOST_ARCH_PPC64) && !defined(_AIX)
EXPECT_FALSE(SupportsCommittingGuardPages(&allocator)); EXPECT_FALSE(SupportsCommittingGuardPages(allocator));
#elif defined(V8_HOST_ARCH_ARM64) #elif defined(V8_HOST_ARCH_ARM64)
if (allocator.CommitPageSize() == 4096) { if (allocator.CommitPageSize() == 4096) {
EXPECT_TRUE(SupportsCommittingGuardPages(&allocator)); EXPECT_TRUE(SupportsCommittingGuardPages(allocator));
} else { } else {
// Arm64 supports both 16k and 64k OS pages. // Arm64 supports both 16k and 64k OS pages.
EXPECT_FALSE(SupportsCommittingGuardPages(&allocator)); EXPECT_FALSE(SupportsCommittingGuardPages(allocator));
} }
#else // Regular case. #else // Regular case.
EXPECT_TRUE(SupportsCommittingGuardPages(&allocator)); EXPECT_TRUE(SupportsCommittingGuardPages(allocator));
#endif #endif
} }
...@@ -141,7 +141,7 @@ TEST(PageMemoryRegionDeathTest, ReservationIsFreed) { ...@@ -141,7 +141,7 @@ TEST(PageMemoryRegionDeathTest, ReservationIsFreed) {
// and thus not crash. // and thus not crash.
EXPECT_DEATH_IF_SUPPORTED( EXPECT_DEATH_IF_SUPPORTED(
v8::base::PageAllocator allocator; Address base; { v8::base::PageAllocator allocator; Address base; {
auto pmr = std::make_unique<LargePageMemoryRegion>(&allocator, 1024); auto pmr = std::make_unique<LargePageMemoryRegion>(allocator, 1024);
base = pmr->reserved_region().base(); base = pmr->reserved_region().base();
} access(base[0]); } access(base[0]);
, ""); , "");
...@@ -149,8 +149,8 @@ TEST(PageMemoryRegionDeathTest, ReservationIsFreed) { ...@@ -149,8 +149,8 @@ TEST(PageMemoryRegionDeathTest, ReservationIsFreed) {
TEST(PageMemoryRegionDeathTest, FrontGuardPageAccessCrashes) { TEST(PageMemoryRegionDeathTest, FrontGuardPageAccessCrashes) {
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(&allocator); auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator);
if (SupportsCommittingGuardPages(&allocator)) { if (SupportsCommittingGuardPages(allocator)) {
EXPECT_DEATH_IF_SUPPORTED( EXPECT_DEATH_IF_SUPPORTED(
access(pmr->GetPageMemory(0).overall_region().base()[0]), ""); access(pmr->GetPageMemory(0).overall_region().base()[0]), "");
} }
...@@ -158,8 +158,8 @@ TEST(PageMemoryRegionDeathTest, FrontGuardPageAccessCrashes) { ...@@ -158,8 +158,8 @@ TEST(PageMemoryRegionDeathTest, FrontGuardPageAccessCrashes) {
TEST(PageMemoryRegionDeathTest, BackGuardPageAccessCrashes) { TEST(PageMemoryRegionDeathTest, BackGuardPageAccessCrashes) {
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(&allocator); auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator);
if (SupportsCommittingGuardPages(&allocator)) { if (SupportsCommittingGuardPages(allocator)) {
EXPECT_DEATH_IF_SUPPORTED( EXPECT_DEATH_IF_SUPPORTED(
access(pmr->GetPageMemory(0).writeable_region().end()[0]), ""); access(pmr->GetPageMemory(0).writeable_region().end()[0]), "");
} }
...@@ -167,7 +167,7 @@ TEST(PageMemoryRegionDeathTest, BackGuardPageAccessCrashes) { ...@@ -167,7 +167,7 @@ TEST(PageMemoryRegionDeathTest, BackGuardPageAccessCrashes) {
TEST(PageMemoryRegionTreeTest, AddNormalLookupRemove) { TEST(PageMemoryRegionTreeTest, AddNormalLookupRemove) {
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(&allocator); auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator);
PageMemoryRegionTree tree; PageMemoryRegionTree tree;
tree.Add(pmr.get()); tree.Add(pmr.get());
ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().base())); ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().base()));
...@@ -182,7 +182,7 @@ TEST(PageMemoryRegionTreeTest, AddNormalLookupRemove) { ...@@ -182,7 +182,7 @@ TEST(PageMemoryRegionTreeTest, AddNormalLookupRemove) {
TEST(PageMemoryRegionTreeTest, AddLargeLookupRemove) { TEST(PageMemoryRegionTreeTest, AddLargeLookupRemove) {
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
constexpr size_t kLargeSize = 5012; constexpr size_t kLargeSize = 5012;
auto pmr = std::make_unique<LargePageMemoryRegion>(&allocator, kLargeSize); auto pmr = std::make_unique<LargePageMemoryRegion>(allocator, kLargeSize);
PageMemoryRegionTree tree; PageMemoryRegionTree tree;
tree.Add(pmr.get()); tree.Add(pmr.get());
ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().base())); ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().base()));
...@@ -196,9 +196,9 @@ TEST(PageMemoryRegionTreeTest, AddLargeLookupRemove) { ...@@ -196,9 +196,9 @@ TEST(PageMemoryRegionTreeTest, AddLargeLookupRemove) {
TEST(PageMemoryRegionTreeTest, AddLookupRemoveMultiple) { TEST(PageMemoryRegionTreeTest, AddLookupRemoveMultiple) {
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
auto pmr1 = std::make_unique<NormalPageMemoryRegion>(&allocator); auto pmr1 = std::make_unique<NormalPageMemoryRegion>(allocator);
constexpr size_t kLargeSize = 3127; constexpr size_t kLargeSize = 3127;
auto pmr2 = std::make_unique<LargePageMemoryRegion>(&allocator, kLargeSize); auto pmr2 = std::make_unique<LargePageMemoryRegion>(allocator, kLargeSize);
PageMemoryRegionTree tree; PageMemoryRegionTree tree;
tree.Add(pmr1.get()); tree.Add(pmr1.get());
tree.Add(pmr2.get()); tree.Add(pmr2.get());
...@@ -223,7 +223,7 @@ TEST(NormalPageMemoryPool, ConstructorEmpty) { ...@@ -223,7 +223,7 @@ TEST(NormalPageMemoryPool, ConstructorEmpty) {
TEST(NormalPageMemoryPool, AddTakeSameBucket) { TEST(NormalPageMemoryPool, AddTakeSameBucket) {
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(&allocator); auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator);
const PageMemory pm = pmr->GetPageMemory(0); const PageMemory pm = pmr->GetPageMemory(0);
NormalPageMemoryPool pool; NormalPageMemoryPool pool;
constexpr size_t kBucket = 0; constexpr size_t kBucket = 0;
...@@ -235,7 +235,7 @@ TEST(NormalPageMemoryPool, AddTakeSameBucket) { ...@@ -235,7 +235,7 @@ TEST(NormalPageMemoryPool, AddTakeSameBucket) {
TEST(NormalPageMemoryPool, AddTakeNotFoundDifferentBucket) { TEST(NormalPageMemoryPool, AddTakeNotFoundDifferentBucket) {
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
auto pmr = std::make_unique<NormalPageMemoryRegion>(&allocator); auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator);
const PageMemory pm = pmr->GetPageMemory(0); const PageMemory pm = pmr->GetPageMemory(0);
NormalPageMemoryPool pool; NormalPageMemoryPool pool;
constexpr size_t kFirstBucket = 0; constexpr size_t kFirstBucket = 0;
...@@ -250,7 +250,7 @@ TEST(NormalPageMemoryPool, AddTakeNotFoundDifferentBucket) { ...@@ -250,7 +250,7 @@ TEST(NormalPageMemoryPool, AddTakeNotFoundDifferentBucket) {
TEST(PageBackendTest, AllocateNormalUsesPool) { TEST(PageBackendTest, AllocateNormalUsesPool) {
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
PageBackend backend(&allocator); PageBackend backend(allocator);
constexpr size_t kBucket = 0; constexpr size_t kBucket = 0;
Address writeable_base1 = backend.AllocateNormalPageMemory(kBucket); Address writeable_base1 = backend.AllocateNormalPageMemory(kBucket);
EXPECT_NE(nullptr, writeable_base1); EXPECT_NE(nullptr, writeable_base1);
...@@ -262,7 +262,7 @@ TEST(PageBackendTest, AllocateNormalUsesPool) { ...@@ -262,7 +262,7 @@ TEST(PageBackendTest, AllocateNormalUsesPool) {
TEST(PageBackendTest, AllocateLarge) { TEST(PageBackendTest, AllocateLarge) {
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
PageBackend backend(&allocator); PageBackend backend(allocator);
Address writeable_base1 = backend.AllocateLargePageMemory(13731); Address writeable_base1 = backend.AllocateLargePageMemory(13731);
EXPECT_NE(nullptr, writeable_base1); EXPECT_NE(nullptr, writeable_base1);
Address writeable_base2 = backend.AllocateLargePageMemory(9478); Address writeable_base2 = backend.AllocateLargePageMemory(9478);
...@@ -274,7 +274,7 @@ TEST(PageBackendTest, AllocateLarge) { ...@@ -274,7 +274,7 @@ TEST(PageBackendTest, AllocateLarge) {
TEST(PageBackendTest, LookupNormal) { TEST(PageBackendTest, LookupNormal) {
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
PageBackend backend(&allocator); PageBackend backend(allocator);
constexpr size_t kBucket = 0; constexpr size_t kBucket = 0;
Address writeable_base = backend.AllocateNormalPageMemory(kBucket); Address writeable_base = backend.AllocateNormalPageMemory(kBucket);
EXPECT_EQ(nullptr, backend.Lookup(writeable_base - kGuardPageSize)); EXPECT_EQ(nullptr, backend.Lookup(writeable_base - kGuardPageSize));
...@@ -290,7 +290,7 @@ TEST(PageBackendTest, LookupNormal) { ...@@ -290,7 +290,7 @@ TEST(PageBackendTest, LookupNormal) {
TEST(PageBackendTest, LookupLarge) { TEST(PageBackendTest, LookupLarge) {
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
PageBackend backend(&allocator); PageBackend backend(allocator);
constexpr size_t kSize = 7934; constexpr size_t kSize = 7934;
Address writeable_base = backend.AllocateLargePageMemory(kSize); Address writeable_base = backend.AllocateLargePageMemory(kSize);
EXPECT_EQ(nullptr, backend.Lookup(writeable_base - kGuardPageSize)); EXPECT_EQ(nullptr, backend.Lookup(writeable_base - kGuardPageSize));
...@@ -303,7 +303,7 @@ TEST(PageBackendDeathTest, DestructingBackendDestroysPageMemory) { ...@@ -303,7 +303,7 @@ TEST(PageBackendDeathTest, DestructingBackendDestroysPageMemory) {
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
Address base; Address base;
{ {
PageBackend backend(&allocator); PageBackend backend(allocator);
constexpr size_t kBucket = 0; constexpr size_t kBucket = 0;
base = backend.AllocateNormalPageMemory(kBucket); base = backend.AllocateNormalPageMemory(kBucket);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment