Commit 46f9d5a2 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

Reland "[heap] ArrayBufferTracker: Only consider committed size"

This is a reland of 6488c9e5
Original change's description:
> [heap] ArrayBufferTracker: Only consider committed size
> 
> - Only consider commited size of ABs.
> - Compute freed memory from retained sizes byte length might be a
>   HeapNumber and thus prohibited from accessing (as it may be already
>   collected).
> 
> CQ_INCLUDE_TRYBOTS=master.tryserver.v8:v8_linux64_tsan_rel;master.tryserver.v8:v8_linux64_tsan_concurrent_marking_rel_ng;master.tryserver.blink:linux_trusty_blink_rel;master.tryserver.chromium.linux:linux_optional_gpu_tests_rel;master.tryserver.chromium.mac:mac_optional_gpu_tests_rel;master.tryserver.chromium.win:win_optional_gpu_tests_rel;master.tryserver.chromium.android:android_optional_gpu_tests_rel
> 
> Bug: chromium:775896
> Change-Id: Ia0bed66afac5e4d5ed58194950a55156e19cec72
> Reviewed-on: https://chromium-review.googlesource.com/725722
> Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
> Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#48699}

Tbr: ulan@chromium.org
Bug: chromium:775896
Change-Id: Ibbec1ffa8fe90d3668f0fe0c1b8b9997b5fd644e
Cq-Include-Trybots: master.tryserver.v8:v8_linux64_tsan_rel;master.tryserver.v8:v8_linux64_tsan_concurrent_marking_rel_ng;master.tryserver.blink:linux_trusty_blink_rel;master.tryserver.chromium.linux:linux_optional_gpu_tests_rel;master.tryserver.chromium.mac:mac_optional_gpu_tests_rel;master.tryserver.chromium.win:win_optional_gpu_tests_rel;master.tryserver.chromium.android:android_optional_gpu_tests_rel
Reviewed-on: https://chromium-review.googlesource.com/726579Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48707}
parent 8bacd848
......@@ -14,10 +14,9 @@ namespace v8 {
namespace internal {
void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) {
void* data = buffer->backing_store();
if (!data) return;
if (buffer->backing_store() == nullptr) return;
size_t length = buffer->allocation_length();
const size_t length = NumberToSize(buffer->byte_length());
Page* page = Page::FromAddress(buffer->address());
{
base::LockGuard<base::RecursiveMutex> guard(page->mutex());
......@@ -36,11 +35,10 @@ void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) {
}
void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
void* data = buffer->backing_store();
if (!data) return;
if (buffer->backing_store() == nullptr) return;
Page* page = Page::FromAddress(buffer->address());
size_t length = buffer->allocation_length();
const size_t length = NumberToSize(buffer->byte_length());
{
base::LockGuard<base::RecursiveMutex> guard(page->mutex());
LocalArrayBufferTracker* tracker = page->local_tracker();
......@@ -52,26 +50,25 @@ void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
template <typename Callback>
void LocalArrayBufferTracker::Free(Callback should_free) {
size_t freed_memory = 0;
size_t retained_size = 0;
size_t new_retained_size = 0;
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
JSArrayBuffer* buffer = reinterpret_cast<JSArrayBuffer*>(*it);
const size_t length = buffer->allocation_length();
if (should_free(buffer)) {
freed_memory += length;
buffer->FreeBackingStore();
it = array_buffers_.erase(it);
} else {
retained_size += length;
new_retained_size += length;
++it;
}
}
retained_size_ = retained_size;
const size_t freed_memory = retained_size_ - new_retained_size;
if (freed_memory > 0) {
heap_->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory));
}
retained_size_ = new_retained_size;
}
template <typename MarkingState>
......
......@@ -18,15 +18,14 @@ template <typename Callback>
void LocalArrayBufferTracker::Process(Callback callback) {
JSArrayBuffer* new_buffer = nullptr;
JSArrayBuffer* old_buffer = nullptr;
size_t freed_memory = 0;
size_t retained_size = 0;
size_t new_retained_size = 0;
size_t moved_size = 0;
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
old_buffer = reinterpret_cast<JSArrayBuffer*>(*it);
const size_t length = old_buffer->allocation_length();
const CallbackResult result = callback(old_buffer, &new_buffer);
if (result == kKeepEntry) {
retained_size += length;
new_retained_size += NumberToSize(old_buffer->byte_length());
++it;
} else if (result == kUpdateEntry) {
DCHECK_NOT_NULL(new_buffer);
......@@ -39,23 +38,25 @@ void LocalArrayBufferTracker::Process(Callback callback) {
tracker = target_page->local_tracker();
}
DCHECK_NOT_NULL(tracker);
DCHECK_EQ(length, new_buffer->allocation_length());
tracker->Add(new_buffer, length);
const size_t size = NumberToSize(new_buffer->byte_length());
moved_size += size;
tracker->Add(new_buffer, size);
}
it = array_buffers_.erase(it);
} else if (result == kRemoveEntry) {
freed_memory += length;
// Size of freed memory is computed to avoid looking at dead objects.
old_buffer->FreeBackingStore();
it = array_buffers_.erase(it);
} else {
UNREACHABLE();
}
}
retained_size_ = retained_size;
const size_t freed_memory = retained_size_ - new_retained_size - moved_size;
if (freed_memory > 0) {
heap_->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory));
}
retained_size_ = new_retained_size;
}
void ArrayBufferTracker::FreeDeadInNewSpace(Heap* heap) {
......
......@@ -4107,32 +4107,47 @@ int MarkCompactCollectorBase::CollectRememberedSetUpdatingItems(
return pages;
}
void MinorMarkCompactCollector::CollectNewSpaceArrayBufferTrackerItems(
int MinorMarkCompactCollector::CollectNewSpaceArrayBufferTrackerItems(
ItemParallelJob* job) {
int pages = 0;
for (Page* p : new_space_evacuation_pages_) {
if (Evacuator::ComputeEvacuationMode(p) == Evacuator::kObjectsNewToOld) {
if (p->local_tracker() == nullptr) continue;
pages++;
job->AddItem(new ArrayBufferTrackerUpdatingItem(p));
}
}
return pages;
}
void MarkCompactCollector::CollectNewSpaceArrayBufferTrackerItems(
int MarkCompactCollector::CollectNewSpaceArrayBufferTrackerItems(
ItemParallelJob* job) {
int pages = 0;
for (Page* p : new_space_evacuation_pages_) {
if (Evacuator::ComputeEvacuationMode(p) == Evacuator::kObjectsNewToOld) {
if (p->local_tracker() == nullptr) continue;
pages++;
job->AddItem(new ArrayBufferTrackerUpdatingItem(p));
}
}
return pages;
}
void MarkCompactCollector::CollectOldSpaceArrayBufferTrackerItems(
int MarkCompactCollector::CollectOldSpaceArrayBufferTrackerItems(
ItemParallelJob* job) {
int pages = 0;
for (Page* p : old_space_evacuation_pages_) {
if (Evacuator::ComputeEvacuationMode(p) == Evacuator::kObjectsOldToOld &&
p->IsEvacuationCandidate()) {
if (p->local_tracker() == nullptr) continue;
pages++;
job->AddItem(new ArrayBufferTrackerUpdatingItem(p));
}
}
return pages;
}
void MarkCompactCollector::UpdatePointersAfterEvacuation() {
......@@ -4152,9 +4167,6 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() {
ItemParallelJob updating_job(isolate()->cancelable_task_manager(),
&page_parallel_job_semaphore_);
CollectNewSpaceArrayBufferTrackerItems(&updating_job);
CollectOldSpaceArrayBufferTrackerItems(&updating_job);
int remembered_set_pages = 0;
remembered_set_pages += CollectRememberedSetUpdatingItems(
&updating_job, heap()->old_space(), RememberedSetUpdatingMode::ALL);
......@@ -4176,20 +4188,28 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() {
}
{
// Update pointers in map space in a separate phase to avoid data races
// with Map->LayoutDescriptor edge.
// - Update pointers in map space in a separate phase to avoid data races
// with Map->LayoutDescriptor edge.
// - Update array buffer trackers in the second phase to have access to
// byte length which is potentially a HeapNumber.
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAP_SPACE);
ItemParallelJob updating_job(isolate()->cancelable_task_manager(),
&page_parallel_job_semaphore_);
int array_buffer_pages = 0;
array_buffer_pages += CollectNewSpaceArrayBufferTrackerItems(&updating_job);
array_buffer_pages += CollectOldSpaceArrayBufferTrackerItems(&updating_job);
int remembered_set_pages = 0;
remembered_set_pages += CollectRememberedSetUpdatingItems(
&updating_job, heap()->map_space(), RememberedSetUpdatingMode::ALL);
const int num_tasks = remembered_set_pages == 0
? 0
: NumberOfParallelPointerUpdateTasks(
remembered_set_pages, old_to_new_slots_);
const int remembered_set_tasks =
remembered_set_pages == 0
? 0
: NumberOfParallelPointerUpdateTasks(remembered_set_pages,
old_to_new_slots_);
const int num_tasks = Max(array_buffer_pages, remembered_set_tasks);
if (num_tasks > 0) {
for (int i = 0; i < num_tasks; i++) {
updating_job.AddTask(new PointersUpdatingTask(isolate()));
......
......@@ -405,7 +405,7 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
UpdatingItem* CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override;
void CollectNewSpaceArrayBufferTrackerItems(ItemParallelJob* job);
int CollectNewSpaceArrayBufferTrackerItems(ItemParallelJob* job);
int NumberOfParallelMarkingTasks(int pages);
......@@ -897,8 +897,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
UpdatingItem* CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override;
void CollectNewSpaceArrayBufferTrackerItems(ItemParallelJob* job);
void CollectOldSpaceArrayBufferTrackerItems(ItemParallelJob* job);
int CollectNewSpaceArrayBufferTrackerItems(ItemParallelJob* job);
int CollectOldSpaceArrayBufferTrackerItems(ItemParallelJob* job);
void ReleaseEvacuationCandidates();
void PostProcessEvacuationCandidates();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment