Commit 881fc049 authored by Michael Lippautz's avatar Michael Lippautz Committed by V8 LUCI CQ

[cppgc, cppgc-js] Implement GC on allocation failure

So far Oilpan garbage collection was only ever triggered via growing
strategies in either V8 or stand-alone heap growing. This CL
implements a fallback for GC on allocation.

- Stand-alone implementation will defer to GCInvoker which is aware of
  stack support.
- CppHeap implementation will just trigger a full V8 GC.

Bug: chromium:1352649
Change-Id: If92f705b4e272290ca7022864fd7b90f0fcb809e
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3865148Reviewed-by: 's avatarAnton Bikineev <bikineev@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#82844}
parent 6229eee3
...@@ -855,7 +855,7 @@ class BackingStoreBase {}; ...@@ -855,7 +855,7 @@ class BackingStoreBase {};
// The maximum value in enum GarbageCollectionReason, defined in heap.h. // The maximum value in enum GarbageCollectionReason, defined in heap.h.
// This is needed for histograms sampling garbage collection reasons. // This is needed for histograms sampling garbage collection reasons.
constexpr int kGarbageCollectionReasonMaxValue = 25; constexpr int kGarbageCollectionReasonMaxValue = 27;
} // namespace internal } // namespace internal
......
...@@ -47,6 +47,7 @@ ...@@ -47,6 +47,7 @@
#include "src/heap/embedder-tracing.h" #include "src/heap/embedder-tracing.h"
#include "src/heap/gc-tracer.h" #include "src/heap/gc-tracer.h"
#include "src/heap/global-handle-marking-visitor.h" #include "src/heap/global-handle-marking-visitor.h"
#include "src/heap/heap.h"
#include "src/heap/marking-worklist.h" #include "src/heap/marking-worklist.h"
#include "src/heap/sweeper.h" #include "src/heap/sweeper.h"
#include "src/init/v8.h" #include "src/init/v8.h"
...@@ -485,7 +486,7 @@ CppHeap::CppHeap( ...@@ -485,7 +486,7 @@ CppHeap::CppHeap(
std::make_shared<CppgcPlatformAdapter>(platform), custom_spaces, std::make_shared<CppgcPlatformAdapter>(platform), custom_spaces,
cppgc::internal::HeapBase::StackSupport:: cppgc::internal::HeapBase::StackSupport::
kSupportsConservativeStackScan, kSupportsConservativeStackScan,
marking_support, sweeping_support), marking_support, sweeping_support, *this),
wrapper_descriptor_(wrapper_descriptor) { wrapper_descriptor_(wrapper_descriptor) {
CHECK_NE(WrapperDescriptor::kUnknownEmbedderId, CHECK_NE(WrapperDescriptor::kUnknownEmbedderId,
wrapper_descriptor_.embedder_id_for_garbage_collected); wrapper_descriptor_.embedder_id_for_garbage_collected);
...@@ -1004,5 +1005,24 @@ CppHeap::PauseConcurrentMarkingScope::PauseConcurrentMarkingScope( ...@@ -1004,5 +1005,24 @@ CppHeap::PauseConcurrentMarkingScope::PauseConcurrentMarkingScope(
} }
} }
void CppHeap::CollectGarbage(Config config) {
if (in_no_gc_scope() || !isolate_) return;
// TODO(mlippautz): Respect full config.
const int flags = (config.free_memory_handling ==
Config::FreeMemoryHandling::kDiscardWherePossible)
? Heap::kReduceMemoryFootprintMask
: Heap::kNoGCFlags;
isolate_->heap()->CollectAllGarbage(
flags, GarbageCollectionReason::kCppHeapAllocationFailure);
}
const cppgc::EmbedderStackState* CppHeap::override_stack_state() const {
return HeapBase::override_stack_state();
}
void CppHeap::StartIncrementalGarbageCollection(Config) { UNIMPLEMENTED(); }
size_t CppHeap::epoch() const { UNIMPLEMENTED(); }
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -33,7 +33,8 @@ class CppMarkingState; ...@@ -33,7 +33,8 @@ class CppMarkingState;
class V8_EXPORT_PRIVATE CppHeap final class V8_EXPORT_PRIVATE CppHeap final
: public cppgc::internal::HeapBase, : public cppgc::internal::HeapBase,
public v8::CppHeap, public v8::CppHeap,
public cppgc::internal::StatsCollector::AllocationObserver { public cppgc::internal::StatsCollector::AllocationObserver,
public cppgc::internal::GarbageCollector {
public: public:
enum GarbageCollectionFlagValues : uint8_t { enum GarbageCollectionFlagValues : uint8_t {
kNoFlags = 0, kNoFlags = 0,
...@@ -166,6 +167,12 @@ class V8_EXPORT_PRIVATE CppHeap final ...@@ -166,6 +167,12 @@ class V8_EXPORT_PRIVATE CppHeap final
std::unique_ptr<CppMarkingState> CreateCppMarkingState(); std::unique_ptr<CppMarkingState> CreateCppMarkingState();
std::unique_ptr<CppMarkingState> CreateCppMarkingStateForMutatorThread(); std::unique_ptr<CppMarkingState> CreateCppMarkingStateForMutatorThread();
// cppgc::internal::GarbageCollector interface.
void CollectGarbage(Config) override;
const cppgc::EmbedderStackState* override_stack_state() const override;
void StartIncrementalGarbageCollection(Config) override;
size_t epoch() const override;
private: private:
void ReduceGCCapabilititesFromFlags(); void ReduceGCCapabilititesFromFlags();
......
...@@ -96,7 +96,7 @@ HeapBase::HeapBase( ...@@ -96,7 +96,7 @@ HeapBase::HeapBase(
std::shared_ptr<cppgc::Platform> platform, std::shared_ptr<cppgc::Platform> platform,
const std::vector<std::unique_ptr<CustomSpaceBase>>& custom_spaces, const std::vector<std::unique_ptr<CustomSpaceBase>>& custom_spaces,
StackSupport stack_support, MarkingType marking_support, StackSupport stack_support, MarkingType marking_support,
SweepingType sweeping_support) SweepingType sweeping_support, GarbageCollector& garbage_collector)
: raw_heap_(this, custom_spaces), : raw_heap_(this, custom_spaces),
platform_(std::move(platform)), platform_(std::move(platform)),
oom_handler_(std::make_unique<FatalOutOfMemoryHandler>(this)), oom_handler_(std::make_unique<FatalOutOfMemoryHandler>(this)),
...@@ -111,7 +111,8 @@ HeapBase::HeapBase( ...@@ -111,7 +111,8 @@ HeapBase::HeapBase(
prefinalizer_handler_(std::make_unique<PreFinalizerHandler>(*this)), prefinalizer_handler_(std::make_unique<PreFinalizerHandler>(*this)),
compactor_(raw_heap_), compactor_(raw_heap_),
object_allocator_(raw_heap_, *page_backend_, *stats_collector_, object_allocator_(raw_heap_, *page_backend_, *stats_collector_,
*prefinalizer_handler_), *prefinalizer_handler_, *oom_handler_,
garbage_collector),
sweeper_(*this), sweeper_(*this),
strong_persistent_region_(*oom_handler_.get()), strong_persistent_region_(*oom_handler_.get()),
weak_persistent_region_(*oom_handler_.get()), weak_persistent_region_(*oom_handler_.get()),
......
...@@ -15,7 +15,6 @@ ...@@ -15,7 +15,6 @@
#include "include/cppgc/macros.h" #include "include/cppgc/macros.h"
#include "src/base/macros.h" #include "src/base/macros.h"
#include "src/heap/cppgc/compactor.h" #include "src/heap/cppgc/compactor.h"
#include "src/heap/cppgc/garbage-collector.h"
#include "src/heap/cppgc/heap-object-header.h" #include "src/heap/cppgc/heap-object-header.h"
#include "src/heap/cppgc/marker.h" #include "src/heap/cppgc/marker.h"
#include "src/heap/cppgc/metric-recorder.h" #include "src/heap/cppgc/metric-recorder.h"
...@@ -60,6 +59,7 @@ class Platform; ...@@ -60,6 +59,7 @@ class Platform;
namespace internal { namespace internal {
class FatalOutOfMemoryHandler; class FatalOutOfMemoryHandler;
class GarbageCollector;
class PageBackend; class PageBackend;
class PreFinalizerHandler; class PreFinalizerHandler;
class StatsCollector; class StatsCollector;
...@@ -83,7 +83,7 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle { ...@@ -83,7 +83,7 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
HeapBase(std::shared_ptr<cppgc::Platform> platform, HeapBase(std::shared_ptr<cppgc::Platform> platform,
const std::vector<std::unique_ptr<CustomSpaceBase>>& custom_spaces, const std::vector<std::unique_ptr<CustomSpaceBase>>& custom_spaces,
StackSupport stack_support, MarkingType marking_support, StackSupport stack_support, MarkingType marking_support,
SweepingType sweeping_support); SweepingType sweeping_support, GarbageCollector& garbage_collector);
virtual ~HeapBase(); virtual ~HeapBase();
HeapBase(const HeapBase&) = delete; HeapBase(const HeapBase&) = delete;
......
...@@ -5,6 +5,7 @@ ...@@ -5,6 +5,7 @@
#include "src/heap/cppgc/heap-page.h" #include "src/heap/cppgc/heap-page.h"
#include <algorithm> #include <algorithm>
#include <cstddef>
#include "include/cppgc/internal/api-constants.h" #include "include/cppgc/internal/api-constants.h"
#include "src/base/logging.h" #include "src/base/logging.h"
...@@ -132,9 +133,11 @@ BasePage::BasePage(HeapBase& heap, BaseSpace& space, PageType type) ...@@ -132,9 +133,11 @@ BasePage::BasePage(HeapBase& heap, BaseSpace& space, PageType type)
} }
// static // static
NormalPage* NormalPage::Create(PageBackend& page_backend, NormalPage* NormalPage::TryCreate(PageBackend& page_backend,
NormalPageSpace& space) { NormalPageSpace& space) {
void* memory = page_backend.AllocateNormalPageMemory(); void* memory = page_backend.TryAllocateNormalPageMemory();
if (!memory) return nullptr;
auto* normal_page = new (memory) NormalPage(*space.raw_heap()->heap(), space); auto* normal_page = new (memory) NormalPage(*space.raw_heap()->heap(), space);
normal_page->SynchronizedStore(); normal_page->SynchronizedStore();
normal_page->heap().stats_collector()->NotifyAllocatedMemory(kPageSize); normal_page->heap().stats_collector()->NotifyAllocatedMemory(kPageSize);
...@@ -226,8 +229,8 @@ size_t LargePage::AllocationSize(size_t payload_size) { ...@@ -226,8 +229,8 @@ size_t LargePage::AllocationSize(size_t payload_size) {
} }
// static // static
LargePage* LargePage::Create(PageBackend& page_backend, LargePageSpace& space, LargePage* LargePage::TryCreate(PageBackend& page_backend,
size_t size) { LargePageSpace& space, size_t size) {
// Ensure that the API-provided alignment guarantees does not violate the // Ensure that the API-provided alignment guarantees does not violate the
// internally guaranteed alignment of large page allocations. // internally guaranteed alignment of large page allocations.
static_assert(kGuaranteedObjectAlignment <= static_assert(kGuaranteedObjectAlignment <=
...@@ -239,7 +242,9 @@ LargePage* LargePage::Create(PageBackend& page_backend, LargePageSpace& space, ...@@ -239,7 +242,9 @@ LargePage* LargePage::Create(PageBackend& page_backend, LargePageSpace& space,
const size_t allocation_size = AllocationSize(size); const size_t allocation_size = AllocationSize(size);
auto* heap = space.raw_heap()->heap(); auto* heap = space.raw_heap()->heap();
void* memory = page_backend.AllocateLargePageMemory(allocation_size); void* memory = page_backend.TryAllocateLargePageMemory(allocation_size);
if (!memory) return nullptr;
LargePage* page = new (memory) LargePage(*heap, space, size); LargePage* page = new (memory) LargePage(*heap, space, size);
page->SynchronizedStore(); page->SynchronizedStore();
#if defined(CPPGC_CAGED_HEAP) #if defined(CPPGC_CAGED_HEAP)
......
...@@ -151,7 +151,7 @@ class V8_EXPORT_PRIVATE NormalPage final : public BasePage { ...@@ -151,7 +151,7 @@ class V8_EXPORT_PRIVATE NormalPage final : public BasePage {
using const_iterator = IteratorImpl<const HeapObjectHeader>; using const_iterator = IteratorImpl<const HeapObjectHeader>;
// Allocates a new page in the detached state. // Allocates a new page in the detached state.
static NormalPage* Create(PageBackend&, NormalPageSpace&); static NormalPage* TryCreate(PageBackend&, NormalPageSpace&);
// Destroys and frees the page. The page must be detached from the // Destroys and frees the page. The page must be detached from the
// corresponding space (i.e. be swept when called). // corresponding space (i.e. be swept when called).
static void Destroy(NormalPage*); static void Destroy(NormalPage*);
...@@ -221,7 +221,7 @@ class V8_EXPORT_PRIVATE LargePage final : public BasePage { ...@@ -221,7 +221,7 @@ class V8_EXPORT_PRIVATE LargePage final : public BasePage {
// Returns the allocation size required for a payload of size |size|. // Returns the allocation size required for a payload of size |size|.
static size_t AllocationSize(size_t size); static size_t AllocationSize(size_t size);
// Allocates a new page in the detached state. // Allocates a new page in the detached state.
static LargePage* Create(PageBackend&, LargePageSpace&, size_t); static LargePage* TryCreate(PageBackend&, LargePageSpace&, size_t);
// Destroys and frees the page. The page must be detached from the // Destroys and frees the page. The page must be detached from the
// corresponding space (i.e. be swept when called). // corresponding space (i.e. be swept when called).
static void Destroy(LargePage*); static void Destroy(LargePage*);
......
...@@ -79,7 +79,7 @@ void CheckConfig(Heap::Config config, HeapBase::MarkingType marking_support, ...@@ -79,7 +79,7 @@ void CheckConfig(Heap::Config config, HeapBase::MarkingType marking_support,
Heap::Heap(std::shared_ptr<cppgc::Platform> platform, Heap::Heap(std::shared_ptr<cppgc::Platform> platform,
cppgc::Heap::HeapOptions options) cppgc::Heap::HeapOptions options)
: HeapBase(platform, options.custom_spaces, options.stack_support, : HeapBase(platform, options.custom_spaces, options.stack_support,
options.marking_support, options.sweeping_support), options.marking_support, options.sweeping_support, gc_invoker_),
gc_invoker_(this, platform_.get(), options.stack_support), gc_invoker_(this, platform_.get(), options.stack_support),
growing_(&gc_invoker_, stats_collector_.get(), growing_(&gc_invoker_, stats_collector_.get(),
options.resource_constraints, options.marking_support, options.resource_constraints, options.marking_support,
......
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
#include "src/heap/cppgc/memory.h" #include "src/heap/cppgc/memory.h"
#include "src/heap/cppgc/object-start-bitmap.h" #include "src/heap/cppgc/object-start-bitmap.h"
#include "src/heap/cppgc/page-memory.h" #include "src/heap/cppgc/page-memory.h"
#include "src/heap/cppgc/platform.h"
#include "src/heap/cppgc/prefinalizer-handler.h" #include "src/heap/cppgc/prefinalizer-handler.h"
#include "src/heap/cppgc/stats-collector.h" #include "src/heap/cppgc/stats-collector.h"
#include "src/heap/cppgc/sweeper.h" #include "src/heap/cppgc/sweeper.h"
...@@ -79,10 +80,12 @@ void ReplaceLinearAllocationBuffer(NormalPageSpace& space, ...@@ -79,10 +80,12 @@ void ReplaceLinearAllocationBuffer(NormalPageSpace& space,
} }
} }
void* AllocateLargeObject(PageBackend& page_backend, LargePageSpace& space, void* TryAllocateLargeObject(PageBackend& page_backend, LargePageSpace& space,
StatsCollector& stats_collector, size_t size, StatsCollector& stats_collector, size_t size,
GCInfoIndex gcinfo) { GCInfoIndex gcinfo) {
LargePage* page = LargePage::Create(page_backend, space, size); LargePage* page = LargePage::TryCreate(page_backend, space, size);
if (!page) return nullptr;
space.AddPage(page); space.AddPage(page);
auto* header = new (page->ObjectHeader()) auto* header = new (page->ObjectHeader())
...@@ -100,11 +103,15 @@ constexpr size_t ObjectAllocator::kSmallestSpaceSize; ...@@ -100,11 +103,15 @@ constexpr size_t ObjectAllocator::kSmallestSpaceSize;
ObjectAllocator::ObjectAllocator(RawHeap& heap, PageBackend& page_backend, ObjectAllocator::ObjectAllocator(RawHeap& heap, PageBackend& page_backend,
StatsCollector& stats_collector, StatsCollector& stats_collector,
PreFinalizerHandler& prefinalizer_handler) PreFinalizerHandler& prefinalizer_handler,
FatalOutOfMemoryHandler& oom_handler,
GarbageCollector& garbage_collector)
: raw_heap_(heap), : raw_heap_(heap),
page_backend_(page_backend), page_backend_(page_backend),
stats_collector_(stats_collector), stats_collector_(stats_collector),
prefinalizer_handler_(prefinalizer_handler) {} prefinalizer_handler_(prefinalizer_handler),
oom_handler_(oom_handler),
garbage_collector_(garbage_collector) {}
void* ObjectAllocator::OutOfLineAllocate(NormalPageSpace& space, size_t size, void* ObjectAllocator::OutOfLineAllocate(NormalPageSpace& space, size_t size,
AlignVal alignment, AlignVal alignment,
...@@ -138,8 +145,20 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space, ...@@ -138,8 +145,20 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space,
*raw_heap_.Space(RawHeap::RegularSpaceType::kLarge)); *raw_heap_.Space(RawHeap::RegularSpaceType::kLarge));
// LargePage has a natural alignment that already satisfies // LargePage has a natural alignment that already satisfies
// `kMaxSupportedAlignment`. // `kMaxSupportedAlignment`.
return AllocateLargeObject(page_backend_, large_space, stats_collector_, void* result = TryAllocateLargeObject(page_backend_, large_space,
size, gcinfo); stats_collector_, size, gcinfo);
if (!result) {
auto config = GarbageCollector::Config::ConservativeAtomicConfig();
config.free_memory_handling =
GarbageCollector::Config::FreeMemoryHandling::kDiscardWherePossible;
garbage_collector_.CollectGarbage(config);
result = TryAllocateLargeObject(page_backend_, large_space,
stats_collector_, size, gcinfo);
if (!result) {
oom_handler_("Oilpan: Large allocation.");
}
}
return result;
} }
size_t request_size = size; size_t request_size = size;
...@@ -150,7 +169,15 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space, ...@@ -150,7 +169,15 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space,
request_size += kAllocationGranularity; request_size += kAllocationGranularity;
} }
RefillLinearAllocationBuffer(space, request_size); if (!TryRefillLinearAllocationBuffer(space, request_size)) {
auto config = GarbageCollector::Config::ConservativeAtomicConfig();
config.free_memory_handling =
GarbageCollector::Config::FreeMemoryHandling::kDiscardWherePossible;
garbage_collector_.CollectGarbage(config);
if (!TryRefillLinearAllocationBuffer(space, request_size)) {
oom_handler_("Oilpan: Normal allocation.");
}
}
// The allocation must succeed, as we just refilled the LAB. // The allocation must succeed, as we just refilled the LAB.
void* result = (dynamic_alignment == kAllocationGranularity) void* result = (dynamic_alignment == kAllocationGranularity)
...@@ -160,10 +187,10 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space, ...@@ -160,10 +187,10 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space,
return result; return result;
} }
void ObjectAllocator::RefillLinearAllocationBuffer(NormalPageSpace& space, bool ObjectAllocator::TryRefillLinearAllocationBuffer(NormalPageSpace& space,
size_t size) { size_t size) {
// Try to allocate from the freelist. // Try to allocate from the freelist.
if (RefillLinearAllocationBufferFromFreeList(space, size)) return; if (TryRefillLinearAllocationBufferFromFreeList(space, size)) return true;
// Lazily sweep pages of this heap until we find a freed area for this // Lazily sweep pages of this heap until we find a freed area for this
// allocation or we finish sweeping all pages of this heap. // allocation or we finish sweeping all pages of this heap.
...@@ -179,22 +206,26 @@ void ObjectAllocator::RefillLinearAllocationBuffer(NormalPageSpace& space, ...@@ -179,22 +206,26 @@ void ObjectAllocator::RefillLinearAllocationBuffer(NormalPageSpace& space,
// may only potentially fit the block. For the bucket that may exactly fit // may only potentially fit the block. For the bucket that may exactly fit
// the allocation of `size` bytes (no overallocation), only the first // the allocation of `size` bytes (no overallocation), only the first
// entry is checked. // entry is checked.
if (RefillLinearAllocationBufferFromFreeList(space, size)) return; if (TryRefillLinearAllocationBufferFromFreeList(space, size)) return true;
} }
sweeper.FinishIfRunning(); sweeper.FinishIfRunning();
// TODO(chromium:1056170): Make use of the synchronously freed memory. // TODO(chromium:1056170): Make use of the synchronously freed memory.
auto* new_page = NormalPage::Create(page_backend_, space); auto* new_page = NormalPage::TryCreate(page_backend_, space);
space.AddPage(new_page); if (!new_page) {
return false;
}
space.AddPage(new_page);
// Set linear allocation buffer to new page. // Set linear allocation buffer to new page.
ReplaceLinearAllocationBuffer(space, stats_collector_, ReplaceLinearAllocationBuffer(space, stats_collector_,
new_page->PayloadStart(), new_page->PayloadStart(),
new_page->PayloadSize()); new_page->PayloadSize());
return true;
} }
bool ObjectAllocator::RefillLinearAllocationBufferFromFreeList( bool ObjectAllocator::TryRefillLinearAllocationBufferFromFreeList(
NormalPageSpace& space, size_t size) { NormalPageSpace& space, size_t size) {
const FreeList::Block entry = space.free_list().Allocate(size); const FreeList::Block entry = space.free_list().Allocate(size);
if (!entry.address) return false; if (!entry.address) return false;
......
...@@ -34,14 +34,14 @@ namespace internal { ...@@ -34,14 +34,14 @@ namespace internal {
class StatsCollector; class StatsCollector;
class PageBackend; class PageBackend;
class GarbageCollector;
class V8_EXPORT_PRIVATE ObjectAllocator final : public cppgc::AllocationHandle { class V8_EXPORT_PRIVATE ObjectAllocator final : public cppgc::AllocationHandle {
public: public:
static constexpr size_t kSmallestSpaceSize = 32; static constexpr size_t kSmallestSpaceSize = 32;
ObjectAllocator(RawHeap& heap, PageBackend& page_backend, ObjectAllocator(RawHeap&, PageBackend&, StatsCollector&, PreFinalizerHandler&,
StatsCollector& stats_collector, FatalOutOfMemoryHandler&, GarbageCollector&);
PreFinalizerHandler& prefinalizer_handler);
inline void* AllocateObject(size_t size, GCInfoIndex gcinfo); inline void* AllocateObject(size_t size, GCInfoIndex gcinfo);
inline void* AllocateObject(size_t size, AlignVal alignment, inline void* AllocateObject(size_t size, AlignVal alignment,
...@@ -71,13 +71,15 @@ class V8_EXPORT_PRIVATE ObjectAllocator final : public cppgc::AllocationHandle { ...@@ -71,13 +71,15 @@ class V8_EXPORT_PRIVATE ObjectAllocator final : public cppgc::AllocationHandle {
void* OutOfLineAllocate(NormalPageSpace&, size_t, AlignVal, GCInfoIndex); void* OutOfLineAllocate(NormalPageSpace&, size_t, AlignVal, GCInfoIndex);
void* OutOfLineAllocateImpl(NormalPageSpace&, size_t, AlignVal, GCInfoIndex); void* OutOfLineAllocateImpl(NormalPageSpace&, size_t, AlignVal, GCInfoIndex);
void RefillLinearAllocationBuffer(NormalPageSpace&, size_t); bool TryRefillLinearAllocationBuffer(NormalPageSpace&, size_t);
bool RefillLinearAllocationBufferFromFreeList(NormalPageSpace&, size_t); bool TryRefillLinearAllocationBufferFromFreeList(NormalPageSpace&, size_t);
RawHeap& raw_heap_; RawHeap& raw_heap_;
PageBackend& page_backend_; PageBackend& page_backend_;
StatsCollector& stats_collector_; StatsCollector& stats_collector_;
PreFinalizerHandler& prefinalizer_handler_; PreFinalizerHandler& prefinalizer_handler_;
FatalOutOfMemoryHandler& oom_handler_;
GarbageCollector& garbage_collector_;
}; };
void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo) { void* ObjectAllocator::AllocateObject(size_t size, GCInfoIndex gcinfo) {
......
...@@ -4,6 +4,8 @@ ...@@ -4,6 +4,8 @@
#include "src/heap/cppgc/page-memory.h" #include "src/heap/cppgc/page-memory.h"
#include <cstddef>
#include "src/base/macros.h" #include "src/base/macros.h"
#include "src/base/sanitizer/asan.h" #include "src/base/sanitizer/asan.h"
#include "src/heap/cppgc/platform.h" #include "src/heap/cppgc/platform.h"
...@@ -13,50 +15,40 @@ namespace internal { ...@@ -13,50 +15,40 @@ namespace internal {
namespace { namespace {
void Unprotect(PageAllocator& allocator, FatalOutOfMemoryHandler& oom_handler, V8_WARN_UNUSED_RESULT bool TryUnprotect(PageAllocator& allocator,
const PageMemory& page_memory) { const PageMemory& page_memory) {
if (SupportsCommittingGuardPages(allocator)) { if (SupportsCommittingGuardPages(allocator)) {
if (!allocator.SetPermissions(page_memory.writeable_region().base(), return allocator.SetPermissions(page_memory.writeable_region().base(),
page_memory.writeable_region().size(), page_memory.writeable_region().size(),
PageAllocator::Permission::kReadWrite)) { PageAllocator::Permission::kReadWrite);
oom_handler("Oilpan: Unprotecting memory.");
}
} else {
// No protection in case the allocator cannot commit at the required
// granularity. Only protect if the allocator supports committing at that
// granularity.
//
// The allocator needs to support committing the overall range.
CHECK_EQ(0u,
page_memory.overall_region().size() % allocator.CommitPageSize());
if (!allocator.SetPermissions(page_memory.overall_region().base(),
page_memory.overall_region().size(),
PageAllocator::Permission::kReadWrite)) {
oom_handler("Oilpan: Unprotecting memory.");
}
} }
// No protection using guard pages in case the allocator cannot commit at
// the required granularity. Only protect if the allocator supports
// committing at that granularity.
//
// The allocator needs to support committing the overall range.
CHECK_EQ(0u,
page_memory.overall_region().size() % allocator.CommitPageSize());
return allocator.SetPermissions(page_memory.overall_region().base(),
page_memory.overall_region().size(),
PageAllocator::Permission::kReadWrite);
} }
void Protect(PageAllocator& allocator, FatalOutOfMemoryHandler& oom_handler, V8_WARN_UNUSED_RESULT bool TryProtect(PageAllocator& allocator,
const PageMemory& page_memory) { const PageMemory& page_memory) {
if (SupportsCommittingGuardPages(allocator)) { if (SupportsCommittingGuardPages(allocator)) {
// Swap the same region, providing the OS with a chance for fast lookup and // Swap the same region, providing the OS with a chance for fast lookup and
// change. // change.
if (!allocator.SetPermissions(page_memory.writeable_region().base(), return allocator.SetPermissions(page_memory.writeable_region().base(),
page_memory.writeable_region().size(), page_memory.writeable_region().size(),
PageAllocator::Permission::kNoAccess)) { PageAllocator::Permission::kNoAccess);
oom_handler("Oilpan: Protecting memory.");
}
} else {
// See Unprotect().
CHECK_EQ(0u,
page_memory.overall_region().size() % allocator.CommitPageSize());
if (!allocator.SetPermissions(page_memory.overall_region().base(),
page_memory.overall_region().size(),
PageAllocator::Permission::kNoAccess)) {
oom_handler("Oilpan: Protecting memory.");
}
} }
// See Unprotect().
CHECK_EQ(0u,
page_memory.overall_region().size() % allocator.CommitPageSize());
return allocator.SetPermissions(page_memory.overall_region().base(),
page_memory.overall_region().size(),
PageAllocator::Permission::kNoAccess);
} }
MemoryRegion ReserveMemoryRegion(PageAllocator& allocator, MemoryRegion ReserveMemoryRegion(PageAllocator& allocator,
...@@ -84,10 +76,8 @@ void FreeMemoryRegion(PageAllocator& allocator, ...@@ -84,10 +76,8 @@ void FreeMemoryRegion(PageAllocator& allocator,
} // namespace } // namespace
PageMemoryRegion::PageMemoryRegion(PageAllocator& allocator, PageMemoryRegion::PageMemoryRegion(PageAllocator& allocator,
FatalOutOfMemoryHandler& oom_handler,
MemoryRegion reserved_region, bool is_large) MemoryRegion reserved_region, bool is_large)
: allocator_(allocator), : allocator_(allocator),
oom_handler_(oom_handler),
reserved_region_(reserved_region), reserved_region_(reserved_region),
is_large_(is_large) {} is_large_(is_large) {}
...@@ -101,7 +91,7 @@ constexpr size_t NormalPageMemoryRegion::kNumPageRegions; ...@@ -101,7 +91,7 @@ constexpr size_t NormalPageMemoryRegion::kNumPageRegions;
NormalPageMemoryRegion::NormalPageMemoryRegion( NormalPageMemoryRegion::NormalPageMemoryRegion(
PageAllocator& allocator, FatalOutOfMemoryHandler& oom_handler) PageAllocator& allocator, FatalOutOfMemoryHandler& oom_handler)
: PageMemoryRegion( : PageMemoryRegion(
allocator, oom_handler, allocator,
ReserveMemoryRegion(allocator, oom_handler, ReserveMemoryRegion(allocator, oom_handler,
RoundUp(kPageSize * kNumPageRegions, RoundUp(kPageSize * kNumPageRegions,
allocator.AllocatePageSize())), allocator.AllocatePageSize())),
...@@ -115,21 +105,24 @@ NormalPageMemoryRegion::NormalPageMemoryRegion( ...@@ -115,21 +105,24 @@ NormalPageMemoryRegion::NormalPageMemoryRegion(
NormalPageMemoryRegion::~NormalPageMemoryRegion() = default; NormalPageMemoryRegion::~NormalPageMemoryRegion() = default;
void NormalPageMemoryRegion::Allocate(Address writeable_base) { bool NormalPageMemoryRegion::TryAllocate(Address writeable_base) {
const size_t index = GetIndex(writeable_base); const size_t index = GetIndex(writeable_base);
ChangeUsed(index, true); if (TryUnprotect(allocator_, GetPageMemory(index))) {
Unprotect(allocator_, oom_handler_, GetPageMemory(index)); ChangeUsed(index, true);
return true;
}
return false;
} }
void NormalPageMemoryRegion::Free(Address writeable_base) { void NormalPageMemoryRegion::Free(Address writeable_base) {
const size_t index = GetIndex(writeable_base); const size_t index = GetIndex(writeable_base);
ChangeUsed(index, false); ChangeUsed(index, false);
Protect(allocator_, oom_handler_, GetPageMemory(index)); CHECK(TryProtect(allocator_, GetPageMemory(index)));
} }
void NormalPageMemoryRegion::UnprotectForTesting() { void NormalPageMemoryRegion::UnprotectForTesting() {
for (size_t i = 0; i < kNumPageRegions; ++i) { for (size_t i = 0; i < kNumPageRegions; ++i) {
Unprotect(allocator_, oom_handler_, GetPageMemory(i)); CHECK(TryUnprotect(allocator_, GetPageMemory(i)));
} }
} }
...@@ -137,7 +130,7 @@ LargePageMemoryRegion::LargePageMemoryRegion( ...@@ -137,7 +130,7 @@ LargePageMemoryRegion::LargePageMemoryRegion(
PageAllocator& allocator, FatalOutOfMemoryHandler& oom_handler, PageAllocator& allocator, FatalOutOfMemoryHandler& oom_handler,
size_t length) size_t length)
: PageMemoryRegion( : PageMemoryRegion(
allocator, oom_handler, allocator,
ReserveMemoryRegion(allocator, oom_handler, ReserveMemoryRegion(allocator, oom_handler,
RoundUp(length + 2 * kGuardPageSize, RoundUp(length + 2 * kGuardPageSize,
allocator.AllocatePageSize())), allocator.AllocatePageSize())),
...@@ -146,7 +139,7 @@ LargePageMemoryRegion::LargePageMemoryRegion( ...@@ -146,7 +139,7 @@ LargePageMemoryRegion::LargePageMemoryRegion(
LargePageMemoryRegion::~LargePageMemoryRegion() = default; LargePageMemoryRegion::~LargePageMemoryRegion() = default;
void LargePageMemoryRegion::UnprotectForTesting() { void LargePageMemoryRegion::UnprotectForTesting() {
Unprotect(allocator_, oom_handler_, GetPageMemory()); CHECK(TryUnprotect(allocator_, GetPageMemory()));
} }
PageMemoryRegionTree::PageMemoryRegionTree() = default; PageMemoryRegionTree::PageMemoryRegionTree() = default;
...@@ -192,7 +185,7 @@ PageBackend::PageBackend(PageAllocator& normal_page_allocator, ...@@ -192,7 +185,7 @@ PageBackend::PageBackend(PageAllocator& normal_page_allocator,
PageBackend::~PageBackend() = default; PageBackend::~PageBackend() = default;
Address PageBackend::AllocateNormalPageMemory() { Address PageBackend::TryAllocateNormalPageMemory() {
v8::base::MutexGuard guard(&mutex_); v8::base::MutexGuard guard(&mutex_);
std::pair<NormalPageMemoryRegion*, Address> result = page_pool_.Take(); std::pair<NormalPageMemoryRegion*, Address> result = page_pool_.Take();
if (!result.first) { if (!result.first) {
...@@ -207,8 +200,11 @@ Address PageBackend::AllocateNormalPageMemory() { ...@@ -207,8 +200,11 @@ Address PageBackend::AllocateNormalPageMemory() {
result = page_pool_.Take(); result = page_pool_.Take();
DCHECK(result.first); DCHECK(result.first);
} }
result.first->Allocate(result.second); if (V8_LIKELY(result.first->TryAllocate(result.second))) {
return result.second; return result.second;
}
page_pool_.Add(result.first, result.second);
return nullptr;
} }
void PageBackend::FreeNormalPageMemory(size_t bucket, Address writeable_base) { void PageBackend::FreeNormalPageMemory(size_t bucket, Address writeable_base) {
...@@ -219,15 +215,18 @@ void PageBackend::FreeNormalPageMemory(size_t bucket, Address writeable_base) { ...@@ -219,15 +215,18 @@ void PageBackend::FreeNormalPageMemory(size_t bucket, Address writeable_base) {
page_pool_.Add(pmr, writeable_base); page_pool_.Add(pmr, writeable_base);
} }
Address PageBackend::AllocateLargePageMemory(size_t size) { Address PageBackend::TryAllocateLargePageMemory(size_t size) {
v8::base::MutexGuard guard(&mutex_); v8::base::MutexGuard guard(&mutex_);
auto pmr = std::make_unique<LargePageMemoryRegion>(large_page_allocator_, auto pmr = std::make_unique<LargePageMemoryRegion>(large_page_allocator_,
oom_handler_, size); oom_handler_, size);
const PageMemory pm = pmr->GetPageMemory(); const PageMemory pm = pmr->GetPageMemory();
Unprotect(large_page_allocator_, oom_handler_, pm); if (V8_LIKELY(TryUnprotect(large_page_allocator_, pm))) {
page_memory_region_tree_.Add(pmr.get()); page_memory_region_tree_.Add(pmr.get());
large_page_memory_regions_.insert(std::make_pair(pmr.get(), std::move(pmr))); large_page_memory_regions_.insert(
return pm.writeable_region().base(); std::make_pair(pmr.get(), std::move(pmr)));
return pm.writeable_region().base();
}
return nullptr;
} }
void PageBackend::FreeLargePageMemory(Address writeable_base) { void PageBackend::FreeLargePageMemory(Address writeable_base) {
......
...@@ -82,11 +82,9 @@ class V8_EXPORT_PRIVATE PageMemoryRegion { ...@@ -82,11 +82,9 @@ class V8_EXPORT_PRIVATE PageMemoryRegion {
virtual void UnprotectForTesting() = 0; virtual void UnprotectForTesting() = 0;
protected: protected:
PageMemoryRegion(PageAllocator&, FatalOutOfMemoryHandler&, MemoryRegion, PageMemoryRegion(PageAllocator&, MemoryRegion, bool);
bool);
PageAllocator& allocator_; PageAllocator& allocator_;
FatalOutOfMemoryHandler& oom_handler_;
const MemoryRegion reserved_region_; const MemoryRegion reserved_region_;
const bool is_large_; const bool is_large_;
}; };
...@@ -110,7 +108,8 @@ class V8_EXPORT_PRIVATE NormalPageMemoryRegion final : public PageMemoryRegion { ...@@ -110,7 +108,8 @@ class V8_EXPORT_PRIVATE NormalPageMemoryRegion final : public PageMemoryRegion {
// Allocates a normal page at |writeable_base| address. Changes page // Allocates a normal page at |writeable_base| address. Changes page
// protection. // protection.
void Allocate(Address writeable_base); // Returns true when the allocation was successful and false otherwise.
V8_WARN_UNUSED_RESULT bool TryAllocate(Address writeable_base);
// Frees a normal page at at |writeable_base| address. Changes page // Frees a normal page at at |writeable_base| address. Changes page
// protection. // protection.
...@@ -203,7 +202,7 @@ class V8_EXPORT_PRIVATE PageBackend final { ...@@ -203,7 +202,7 @@ class V8_EXPORT_PRIVATE PageBackend final {
// Allocates a normal page from the backend. // Allocates a normal page from the backend.
// //
// Returns the writeable base of the region. // Returns the writeable base of the region.
Address AllocateNormalPageMemory(); Address TryAllocateNormalPageMemory();
// Returns normal page memory back to the backend. Expects the // Returns normal page memory back to the backend. Expects the
// |writeable_base| returned by |AllocateNormalMemory()|. // |writeable_base| returned by |AllocateNormalMemory()|.
...@@ -212,7 +211,7 @@ class V8_EXPORT_PRIVATE PageBackend final { ...@@ -212,7 +211,7 @@ class V8_EXPORT_PRIVATE PageBackend final {
// Allocates a large page from the backend. // Allocates a large page from the backend.
// //
// Returns the writeable base of the region. // Returns the writeable base of the region.
Address AllocateLargePageMemory(size_t size); Address TryAllocateLargePageMemory(size_t size);
// Returns large page memory back to the backend. Expects the |writeable_base| // Returns large page memory back to the backend. Expects the |writeable_base|
// returned by |AllocateLargePageMemory()|. // returned by |AllocateLargePageMemory()|.
......
...@@ -4284,8 +4284,9 @@ const char* Heap::GarbageCollectionReasonToString( ...@@ -4284,8 +4284,9 @@ const char* Heap::GarbageCollectionReasonToString(
return "background allocation failure"; return "background allocation failure";
case GarbageCollectionReason::kFinalizeMinorMC: case GarbageCollectionReason::kFinalizeMinorMC:
return "finalize MinorMC"; return "finalize MinorMC";
case GarbageCollectionReason::kCppHeapAllocationFailure:
return "CppHeap allocation failure";
} }
UNREACHABLE();
} }
bool Heap::Contains(HeapObject value) const { bool Heap::Contains(HeapObject value) const {
......
...@@ -178,8 +178,9 @@ enum class GarbageCollectionReason : int { ...@@ -178,8 +178,9 @@ enum class GarbageCollectionReason : int {
kMeasureMemory = 24, kMeasureMemory = 24,
kBackgroundAllocationFailure = 25, kBackgroundAllocationFailure = 25,
kFinalizeMinorMC = 26, kFinalizeMinorMC = 26,
kCppHeapAllocationFailure = 27,
kLastReason = kBackgroundAllocationFailure, kLastReason = kCppHeapAllocationFailure,
}; };
static_assert(kGarbageCollectionReasonMaxValue == static_assert(kGarbageCollectionReasonMaxValue ==
......
...@@ -188,7 +188,8 @@ TEST_F(PageTest, NormalPageCreationDestruction) { ...@@ -188,7 +188,8 @@ TEST_F(PageTest, NormalPageCreationDestruction) {
const PageBackend* backend = Heap::From(GetHeap())->page_backend(); const PageBackend* backend = Heap::From(GetHeap())->page_backend();
auto* space = static_cast<NormalPageSpace*>( auto* space = static_cast<NormalPageSpace*>(
heap.Space(RawHeap::RegularSpaceType::kNormal1)); heap.Space(RawHeap::RegularSpaceType::kNormal1));
auto* page = NormalPage::Create(GetPageBackend(), *space); auto* page = NormalPage::TryCreate(GetPageBackend(), *space);
EXPECT_NE(nullptr, page);
EXPECT_NE(nullptr, backend->Lookup(page->PayloadStart())); EXPECT_NE(nullptr, backend->Lookup(page->PayloadStart()));
space->AddPage(page); space->AddPage(page);
...@@ -213,7 +214,8 @@ TEST_F(PageTest, LargePageCreationDestruction) { ...@@ -213,7 +214,8 @@ TEST_F(PageTest, LargePageCreationDestruction) {
const PageBackend* backend = Heap::From(GetHeap())->page_backend(); const PageBackend* backend = Heap::From(GetHeap())->page_backend();
auto* space = static_cast<LargePageSpace*>( auto* space = static_cast<LargePageSpace*>(
heap.Space(RawHeap::RegularSpaceType::kLarge)); heap.Space(RawHeap::RegularSpaceType::kLarge));
auto* page = LargePage::Create(GetPageBackend(), *space, kObjectSize); auto* page = LargePage::TryCreate(GetPageBackend(), *space, kObjectSize);
EXPECT_NE(nullptr, page);
EXPECT_NE(nullptr, backend->Lookup(page->PayloadStart())); EXPECT_NE(nullptr, backend->Lookup(page->PayloadStart()));
space->AddPage(page); space->AddPage(page);
...@@ -231,15 +233,17 @@ TEST_F(PageTest, UnsweptPageDestruction) { ...@@ -231,15 +233,17 @@ TEST_F(PageTest, UnsweptPageDestruction) {
{ {
auto* space = static_cast<NormalPageSpace*>( auto* space = static_cast<NormalPageSpace*>(
heap.Space(RawHeap::RegularSpaceType::kNormal1)); heap.Space(RawHeap::RegularSpaceType::kNormal1));
auto* page = NormalPage::Create(GetPageBackend(), *space); auto* page = NormalPage::TryCreate(GetPageBackend(), *space);
EXPECT_NE(nullptr, page);
space->AddPage(page); space->AddPage(page);
EXPECT_DEATH_IF_SUPPORTED(NormalPage::Destroy(page), ""); EXPECT_DEATH_IF_SUPPORTED(NormalPage::Destroy(page), "");
} }
{ {
auto* space = static_cast<LargePageSpace*>( auto* space = static_cast<LargePageSpace*>(
heap.Space(RawHeap::RegularSpaceType::kLarge)); heap.Space(RawHeap::RegularSpaceType::kLarge));
auto* page = LargePage::Create(GetPageBackend(), *space, auto* page = LargePage::TryCreate(GetPageBackend(), *space,
2 * kLargeObjectSizeThreshold); 2 * kLargeObjectSizeThreshold);
EXPECT_NE(nullptr, page);
space->AddPage(page); space->AddPage(page);
EXPECT_DEATH_IF_SUPPORTED(LargePage::Destroy(page), ""); EXPECT_DEATH_IF_SUPPORTED(LargePage::Destroy(page), "");
// Detach page and really destroy page in the parent process so that sweeper // Detach page and really destroy page in the parent process so that sweeper
......
...@@ -253,10 +253,10 @@ TEST(PageBackendTest, AllocateNormalUsesPool) { ...@@ -253,10 +253,10 @@ TEST(PageBackendTest, AllocateNormalUsesPool) {
FatalOutOfMemoryHandler oom_handler; FatalOutOfMemoryHandler oom_handler;
PageBackend backend(allocator, allocator, oom_handler); PageBackend backend(allocator, allocator, oom_handler);
constexpr size_t kBucket = 0; constexpr size_t kBucket = 0;
Address writeable_base1 = backend.AllocateNormalPageMemory(); Address writeable_base1 = backend.TryAllocateNormalPageMemory();
EXPECT_NE(nullptr, writeable_base1); EXPECT_NE(nullptr, writeable_base1);
backend.FreeNormalPageMemory(kBucket, writeable_base1); backend.FreeNormalPageMemory(kBucket, writeable_base1);
Address writeable_base2 = backend.AllocateNormalPageMemory(); Address writeable_base2 = backend.TryAllocateNormalPageMemory();
EXPECT_NE(nullptr, writeable_base2); EXPECT_NE(nullptr, writeable_base2);
EXPECT_EQ(writeable_base1, writeable_base2); EXPECT_EQ(writeable_base1, writeable_base2);
} }
...@@ -265,9 +265,9 @@ TEST(PageBackendTest, AllocateLarge) { ...@@ -265,9 +265,9 @@ TEST(PageBackendTest, AllocateLarge) {
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
FatalOutOfMemoryHandler oom_handler; FatalOutOfMemoryHandler oom_handler;
PageBackend backend(allocator, allocator, oom_handler); PageBackend backend(allocator, allocator, oom_handler);
Address writeable_base1 = backend.AllocateLargePageMemory(13731); Address writeable_base1 = backend.TryAllocateLargePageMemory(13731);
EXPECT_NE(nullptr, writeable_base1); EXPECT_NE(nullptr, writeable_base1);
Address writeable_base2 = backend.AllocateLargePageMemory(9478); Address writeable_base2 = backend.TryAllocateLargePageMemory(9478);
EXPECT_NE(nullptr, writeable_base2); EXPECT_NE(nullptr, writeable_base2);
EXPECT_NE(writeable_base1, writeable_base2); EXPECT_NE(writeable_base1, writeable_base2);
backend.FreeLargePageMemory(writeable_base1); backend.FreeLargePageMemory(writeable_base1);
...@@ -278,7 +278,7 @@ TEST(PageBackendTest, LookupNormal) { ...@@ -278,7 +278,7 @@ TEST(PageBackendTest, LookupNormal) {
v8::base::PageAllocator allocator; v8::base::PageAllocator allocator;
FatalOutOfMemoryHandler oom_handler; FatalOutOfMemoryHandler oom_handler;
PageBackend backend(allocator, allocator, oom_handler); PageBackend backend(allocator, allocator, oom_handler);
Address writeable_base = backend.AllocateNormalPageMemory(); Address writeable_base = backend.TryAllocateNormalPageMemory();
if (kGuardPageSize) { if (kGuardPageSize) {
EXPECT_EQ(nullptr, backend.Lookup(writeable_base - kGuardPageSize)); EXPECT_EQ(nullptr, backend.Lookup(writeable_base - kGuardPageSize));
} }
...@@ -299,7 +299,7 @@ TEST(PageBackendTest, LookupLarge) { ...@@ -299,7 +299,7 @@ TEST(PageBackendTest, LookupLarge) {
FatalOutOfMemoryHandler oom_handler; FatalOutOfMemoryHandler oom_handler;
PageBackend backend(allocator, allocator, oom_handler); PageBackend backend(allocator, allocator, oom_handler);
constexpr size_t kSize = 7934; constexpr size_t kSize = 7934;
Address writeable_base = backend.AllocateLargePageMemory(kSize); Address writeable_base = backend.TryAllocateLargePageMemory(kSize);
if (kGuardPageSize) { if (kGuardPageSize) {
EXPECT_EQ(nullptr, backend.Lookup(writeable_base - kGuardPageSize)); EXPECT_EQ(nullptr, backend.Lookup(writeable_base - kGuardPageSize));
} }
...@@ -314,7 +314,7 @@ TEST(PageBackendDeathTest, DestructingBackendDestroysPageMemory) { ...@@ -314,7 +314,7 @@ TEST(PageBackendDeathTest, DestructingBackendDestroysPageMemory) {
Address base; Address base;
{ {
PageBackend backend(allocator, allocator, oom_handler); PageBackend backend(allocator, allocator, oom_handler);
base = backend.AllocateNormalPageMemory(); base = backend.TryAllocateNormalPageMemory();
} }
EXPECT_DEATH_IF_SUPPORTED(access(base[0]), ""); EXPECT_DEATH_IF_SUPPORTED(access(base[0]), "");
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment