Commit 3276083a authored by Eric Holk's avatar Eric Holk Committed by Commit Bot

[wasm] Collect garbage when address space limit is reached

Because the GC is not aware of address space usage, this CL causes Wasm to
explicitly trigger a GC when its address space limit is reached in hopes of
being able to successfully allocate memory.

R=mlippautz@chromium.org
R=gdeepti@chromium.org

Change-Id: I2dcc560dd3d351dbfc4dda2f7c321c470a4d9fff
Reviewed-on: https://chromium-review.googlesource.com/985103Reviewed-by: 's avatarDeepti Gandluri <gdeepti@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Eric Holk <eholk@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52305}
parent ab6830c0
......@@ -13,8 +13,8 @@ namespace internal {
namespace wasm {
namespace {
void* TryAllocateBackingStore(WasmMemoryTracker* memory_tracker, size_t size,
bool require_guard_regions,
void* TryAllocateBackingStore(WasmMemoryTracker* memory_tracker, Heap* heap,
size_t size, bool require_guard_regions,
void** allocation_base,
size_t* allocation_length) {
#if V8_TARGET_ARCH_32_BIT
......@@ -33,10 +33,14 @@ void* TryAllocateBackingStore(WasmMemoryTracker* memory_tracker, size_t size,
// Let the WasmMemoryTracker know we are going to reserve a bunch of
// address space.
if (!memory_tracker->ReserveAddressSpace(*allocation_length)) {
// If we fail the first time, collect garbage and retry.
heap->MemoryPressureNotification(MemoryPressureLevel::kCritical, true);
if (!memory_tracker->ReserveAddressSpace(*allocation_length)) {
// If we are over the address space limit, fail.
return nullptr;
}
}
// The Reserve makes the whole region inaccessible by default.
*allocation_base = AllocatePages(nullptr, *allocation_length, kWasmPageSize,
......@@ -161,15 +165,16 @@ bool WasmMemoryTracker::IsWasmMemory(const void* buffer_start) {
}
void* WasmMemoryTracker::GetEmptyBackingStore(void** allocation_base,
size_t* allocation_length) {
size_t* allocation_length,
Heap* heap) {
if (empty_backing_store_.allocation_base == nullptr) {
constexpr size_t buffer_length = 0;
const bool require_guard_regions = trap_handler::IsTrapHandlerEnabled();
void* local_allocation_base;
size_t local_allocation_length;
void* buffer_start = TryAllocateBackingStore(
this, buffer_length, require_guard_regions, &local_allocation_base,
&local_allocation_length);
this, heap, buffer_length, require_guard_regions,
&local_allocation_base, &local_allocation_length);
empty_backing_store_ =
AllocationData(local_allocation_base, local_allocation_length,
......@@ -231,11 +236,13 @@ Handle<JSArrayBuffer> NewArrayBuffer(Isolate* isolate, size_t size,
void* allocation_base = nullptr;
size_t allocation_length = 0;
void* memory = (size == 0) ? memory_tracker->GetEmptyBackingStore(
&allocation_base, &allocation_length)
: TryAllocateBackingStore(
memory_tracker, size, require_guard_regions,
&allocation_base, &allocation_length);
void* memory =
(size == 0)
? memory_tracker->GetEmptyBackingStore(
&allocation_base, &allocation_length, isolate->heap())
: TryAllocateBackingStore(memory_tracker, isolate->heap(), size,
require_guard_regions, &allocation_base,
&allocation_length);
if (size > 0 && memory == nullptr) {
return Handle<JSArrayBuffer>::null();
......
......@@ -72,7 +72,8 @@ class WasmMemoryTracker {
// Empty WebAssembly memories are all backed by a shared inaccessible
// reservation. This method creates this store or returns the existing one if
// already created.
void* GetEmptyBackingStore(void** allocation_base, size_t* allocation_length);
void* GetEmptyBackingStore(void** allocation_base, size_t* allocation_length,
Heap* heap);
bool IsEmptyBackingStore(const void* buffer_start) const;
......
......@@ -1149,6 +1149,21 @@ TEST(Run_WasmModule_Buffer_Externalized_Regression_UseAfterFree) {
int_buffer[0] = 0;
}
#if V8_TARGET_ARCH_64_BIT
TEST(Run_WasmModule_Reclaim_Memory) {
// Make sure we can allocate memories without running out of address space.
Isolate* isolate = CcTest::InitIsolateOnce();
Handle<JSArrayBuffer> buffer;
for (int i = 0; i < 256; ++i) {
HandleScope scope(isolate);
constexpr bool require_guard_regions = true;
buffer = NewArrayBuffer(isolate, kWasmPageSize, require_guard_regions,
SharedFlag::kNotShared);
CHECK(!buffer.is_null());
}
}
#endif
TEST(AtomicOpDisassembly) {
{
EXPERIMENTAL_FLAG_SCOPE(threads);
......
......@@ -785,12 +785,6 @@
'whitespaces': [SKIP],
}], # variant == wasm_traps
['variant == wasm_traps and gc_stress == True', {
# TODO(eholk): these tests are disabled due to address space exhaustion.
# Re-enable them once Wasm address space limits are in place.
'*': [SKIP],
}], # variant == wasm_traps and gc_stress == True
##############################################################################
['no_harness', {
# skip assertion tests since the stack trace is broken if mjsunit is
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment