Commit 10fce9c8 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Do eager unmapping in CollectAllAvailableGarbage.

The memory metric samples memory usage immediately after forcing GC via
LowMemoryNotification. This makes the metric sensitive to the unmapper
tasks timing.

This patch forces eager unmapping in CollectAllAvailableGarbage.

It also forces eager unmapping of non-regular chunks at the beginning
of Mark-Compact to avoid accumulation of non-regular chunks.

Bug: chromium:833291, chromium:826384
Change-Id: Iddf02cd4ab8613385d033899d29525fe6ee47fdd
Reviewed-on: https://chromium-review.googlesource.com/1017102
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52696}
parent 9cb8ad14
......@@ -1257,6 +1257,7 @@ void Heap::CollectAllAvailableGarbage(GarbageCollectionReason gc_reason) {
set_current_gc_flags(kNoGCFlags);
new_space_->Shrink();
UncommitFromSpace();
memory_allocator()->unmapper()->EnsureUnmappingCompleted();
if (FLAG_trace_duplicate_threshold_kb) {
std::map<int, std::vector<HeapObject*>> objects_by_size;
......
......@@ -816,9 +816,7 @@ void MarkCompactCollector::Prepare() {
heap()->incremental_marking()->Stop();
}
// If concurrent unmapping tasks are still running, we should wait for
// them here.
heap()->memory_allocator()->unmapper()->WaitUntilCompleted();
heap()->memory_allocator()->unmapper()->PrepareForMarkCompact();
// Clear marking bits if incremental marking is aborted.
if (was_marked_incrementally_ && heap_->ShouldAbortIncrementalMarking()) {
......
......@@ -373,7 +373,7 @@ void MemoryAllocator::Unmapper::FreeQueuedChunks() {
}
}
void MemoryAllocator::Unmapper::WaitUntilCompleted() {
void MemoryAllocator::Unmapper::CancelAndWaitForPendingTasks() {
for (int i = 0; i < pending_unmapping_tasks_; i++) {
if (heap_->isolate()->cancelable_task_manager()->TryAbort(task_ids_[i]) !=
CancelableTaskManager::kTaskAborted) {
......@@ -384,22 +384,41 @@ void MemoryAllocator::Unmapper::WaitUntilCompleted() {
active_unmapping_tasks_.SetValue(0);
if (FLAG_trace_unmapper) {
PrintIsolate(heap_->isolate(),
"Unmapper::WaitUntilCompleted: no tasks remaining\n");
PrintIsolate(
heap_->isolate(),
"Unmapper::CancelAndWaitForPendingTasks: no tasks remaining\n");
}
}
void MemoryAllocator::Unmapper::PrepareForMarkCompact() {
CancelAndWaitForPendingTasks();
// Free non-regular chunks because they cannot be re-used.
PerformFreeMemoryOnQueuedNonRegularChunks();
}
void MemoryAllocator::Unmapper::EnsureUnmappingCompleted() {
CancelAndWaitForPendingTasks();
PerformFreeMemoryOnQueuedChunks<FreeMode::kReleasePooled>();
}
bool MemoryAllocator::Unmapper::MakeRoomForNewTasks() {
DCHECK_LE(pending_unmapping_tasks_, kMaxUnmapperTasks);
if (active_unmapping_tasks_.Value() == 0 && pending_unmapping_tasks_ > 0) {
// All previous unmapping tasks have been run to completion.
// Finalize those tasks to make room for new ones.
WaitUntilCompleted();
CancelAndWaitForPendingTasks();
}
return pending_unmapping_tasks_ != kMaxUnmapperTasks;
}
void MemoryAllocator::Unmapper::PerformFreeMemoryOnQueuedNonRegularChunks() {
MemoryChunk* chunk = nullptr;
while ((chunk = GetMemoryChunkSafe<kNonRegular>()) != nullptr) {
allocator_->PerformFreeMemory(chunk);
}
}
template <MemoryAllocator::Unmapper::FreeMode mode>
void MemoryAllocator::Unmapper::PerformFreeMemoryOnQueuedChunks() {
MemoryChunk* chunk = nullptr;
......@@ -423,10 +442,7 @@ void MemoryAllocator::Unmapper::PerformFreeMemoryOnQueuedChunks() {
allocator_->Free<MemoryAllocator::kAlreadyPooled>(chunk);
}
}
// Non-regular chunks.
while ((chunk = GetMemoryChunkSafe<kNonRegular>()) != nullptr) {
allocator_->PerformFreeMemory(chunk);
}
PerformFreeMemoryOnQueuedNonRegularChunks();
}
void MemoryAllocator::Unmapper::TearDown() {
......
......@@ -1208,7 +1208,9 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
}
void FreeQueuedChunks();
void WaitUntilCompleted();
void CancelAndWaitForPendingTasks();
void PrepareForMarkCompact();
void EnsureUnmappingCompleted();
void TearDown();
int NumberOfChunks();
......@@ -1249,6 +1251,8 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
template <FreeMode mode>
void PerformFreeMemoryOnQueuedChunks();
void PerformFreeMemoryOnQueuedNonRegularChunks();
Heap* const heap_;
MemoryAllocator* const allocator_;
base::Mutex mutex_;
......
......@@ -2662,7 +2662,7 @@ void Isolate::Deinit() {
wasm_engine()->TearDown();
heap_.mark_compact_collector()->EnsureSweepingCompleted();
heap_.memory_allocator()->unmapper()->WaitUntilCompleted();
heap_.memory_allocator()->unmapper()->EnsureUnmappingCompleted();
DumpAndResetStats();
......
......@@ -124,6 +124,7 @@ v8_source_set("cctest_sources") {
"heap/test-mark-compact.cc",
"heap/test-page-promotion.cc",
"heap/test-spaces.cc",
"heap/test-unmapper.cc",
"heap/test-weak-references.cc",
"interpreter/bytecode-expectations-printer.cc",
"interpreter/bytecode-expectations-printer.h",
......
......@@ -190,7 +190,7 @@ UNINITIALIZED_HEAP_TEST(Regress658718) {
}
heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
heap->new_space()->Shrink();
heap->memory_allocator()->unmapper()->WaitUntilCompleted();
heap->memory_allocator()->unmapper()->EnsureUnmappingCompleted();
heap->delay_sweeper_tasks_for_testing_ = false;
heap->mark_compact_collector()->sweeper()->StartSweeperTasks();
heap->mark_compact_collector()->EnsureSweepingCompleted();
......
......@@ -264,7 +264,7 @@ TEST(NewSpace) {
}
new_space.TearDown();
memory_allocator->unmapper()->WaitUntilCompleted();
memory_allocator->unmapper()->EnsureUnmappingCompleted();
memory_allocator->TearDown();
delete memory_allocator;
}
......
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/v8.h"
#include "src/heap/spaces.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/heap-utils.h"
using v8::IdleTask;
using v8::Task;
using v8::Isolate;
namespace v8 {
namespace internal {
namespace heap {
class MockPlatformForUnmapper : public TestPlatform {
public:
MockPlatformForUnmapper()
: task_(nullptr), old_platform_(i::V8::GetCurrentPlatform()) {
// Now that it's completely constructed, make this the current platform.
i::V8::SetPlatformForTesting(this);
}
virtual ~MockPlatformForUnmapper() {
delete task_;
i::V8::SetPlatformForTesting(old_platform_);
for (auto& task : worker_tasks_) {
old_platform_->CallOnWorkerThread(std::move(task));
}
worker_tasks_.clear();
}
void CallOnForegroundThread(v8::Isolate* isolate, Task* task) override {
task_ = task;
}
void CallOnWorkerThread(std::unique_ptr<Task> task) override {
worker_tasks_.push_back(std::move(task));
}
bool IdleTasksEnabled(v8::Isolate* isolate) override { return false; }
int NumberOfWorkerThreads() override {
return old_platform_->NumberOfWorkerThreads();
}
size_t NumberOfAvailableBackgroundThreads() override {
return old_platform_->NumberOfAvailableBackgroundThreads();
}
private:
Task* task_;
std::vector<std::unique_ptr<Task>> worker_tasks_;
v8::Platform* old_platform_;
};
TEST(EagerUnmappingInCollectAllAvailableGarbage) {
CcTest::InitializeVM();
MockPlatformForUnmapper platform;
Heap* heap = CcTest::heap();
i::heap::SimulateFullSpace(heap->old_space());
CcTest::CollectAllAvailableGarbage();
CHECK_EQ(0, heap->memory_allocator()->unmapper()->NumberOfChunks());
}
} // namespace heap
} // namespace internal
} // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment