Commit 15a0ace5 authored by fedor's avatar fedor Committed by Commit bot

heap: make array buffer maps disjoint

Remove intersection from the `std::map`s representing current live
ArrayBuffers. While being simpler to understand, it poses significant
performance issue for the active ArrayBuffer users (like node.js).

Store buffers separately, and process them together during mark-sweep
phase.

BUG=
R=mlippautz@chromium.org

Review URL: https://codereview.chromium.org/1326613002

Cr-Commit-Position: refs/heads/master@{#30539}
parent 85f6e168
...@@ -1744,61 +1744,13 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { ...@@ -1744,61 +1744,13 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
} }
void Heap::RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers,
void* data, size_t length) {
live_buffers[data] = length;
}
void Heap::UnregisterArrayBufferHelper(
std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
DCHECK(live_buffers.count(data) > 0);
live_buffers.erase(data);
not_yet_discovered_buffers.erase(data);
}
void Heap::RegisterLiveArrayBufferHelper(
std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
not_yet_discovered_buffers.erase(data);
}
size_t Heap::FreeDeadArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers) {
size_t freed_memory = 0;
for (auto buffer = not_yet_discovered_buffers.begin();
buffer != not_yet_discovered_buffers.end(); ++buffer) {
isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
freed_memory += buffer->second;
live_buffers.erase(buffer->first);
}
not_yet_discovered_buffers = live_buffers;
return freed_memory;
}
void Heap::TearDownArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers) {
for (auto buffer = live_buffers.begin(); buffer != live_buffers.end();
++buffer) {
isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
}
live_buffers.clear();
not_yet_discovered_buffers.clear();
}
void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data, void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
size_t length) { size_t length) {
if (!data) return; if (!data) return;
RegisterNewArrayBufferHelper(live_array_buffers_, data, length);
if (in_new_space) { if (in_new_space) {
RegisterNewArrayBufferHelper(live_array_buffers_for_scavenge_, data, live_array_buffers_for_scavenge_[data] = length;
length); } else {
live_array_buffers_[data] = length;
} }
// We may go over the limit of externally allocated memory here. We call the // We may go over the limit of externally allocated memory here. We call the
...@@ -1810,54 +1762,79 @@ void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data, ...@@ -1810,54 +1762,79 @@ void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) { void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) {
if (!data) return; if (!data) return;
UnregisterArrayBufferHelper(live_array_buffers_,
not_yet_discovered_array_buffers_, data); std::map<void*, size_t>* live_buffers =
if (in_new_space) { in_new_space ? &live_array_buffers_for_scavenge_ : &live_array_buffers_;
UnregisterArrayBufferHelper(live_array_buffers_for_scavenge_, std::map<void*, size_t>* not_yet_discovered_buffers =
not_yet_discovered_array_buffers_for_scavenge_, in_new_space ? &not_yet_discovered_array_buffers_for_scavenge_
data); : &not_yet_discovered_array_buffers_;
}
DCHECK(live_buffers->count(data) > 0);
size_t length = (*live_buffers)[data];
live_buffers->erase(data);
not_yet_discovered_buffers->erase(data);
amount_of_external_allocated_memory_ -= length;
} }
void Heap::RegisterLiveArrayBuffer(bool from_scavenge, void* data) { void Heap::RegisterLiveArrayBuffer(bool in_new_space, void* data) {
// ArrayBuffer might be in the middle of being constructed. // ArrayBuffer might be in the middle of being constructed.
if (data == undefined_value()) return; if (data == undefined_value()) return;
RegisterLiveArrayBufferHelper( if (in_new_space) {
from_scavenge ? not_yet_discovered_array_buffers_for_scavenge_ not_yet_discovered_array_buffers_for_scavenge_.erase(data);
: not_yet_discovered_array_buffers_, } else {
data); not_yet_discovered_array_buffers_.erase(data);
}
} }
void Heap::FreeDeadArrayBuffers(bool from_scavenge) { void Heap::FreeDeadArrayBuffers(bool from_scavenge) {
if (from_scavenge) { size_t freed_memory = 0;
for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) { for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) {
not_yet_discovered_array_buffers_.erase(buffer.first); isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
live_array_buffers_.erase(buffer.first); freed_memory += buffer.second;
} live_array_buffers_for_scavenge_.erase(buffer.first);
} else { }
if (!from_scavenge) {
for (auto& buffer : not_yet_discovered_array_buffers_) { for (auto& buffer : not_yet_discovered_array_buffers_) {
// Scavenge can't happend during evacuation, so we only need to update isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
// live_array_buffers_for_scavenge_. freed_memory += buffer.second;
// not_yet_discovered_array_buffers_for_scanvenge_ will be reset before live_array_buffers_.erase(buffer.first);
// the next scavenge run in PrepareArrayBufferDiscoveryInNewSpace.
live_array_buffers_for_scavenge_.erase(buffer.first);
} }
} }
not_yet_discovered_array_buffers_for_scavenge_ =
live_array_buffers_for_scavenge_;
if (!from_scavenge) not_yet_discovered_array_buffers_ = live_array_buffers_;
// Do not call through the api as this code is triggered while doing a GC. // Do not call through the api as this code is triggered while doing a GC.
amount_of_external_allocated_memory_ -= FreeDeadArrayBuffersHelper( amount_of_external_allocated_memory_ -= freed_memory;
isolate_,
from_scavenge ? live_array_buffers_for_scavenge_ : live_array_buffers_,
from_scavenge ? not_yet_discovered_array_buffers_for_scavenge_
: not_yet_discovered_array_buffers_);
} }
void Heap::TearDownArrayBuffers() { void Heap::TearDownArrayBuffers() {
TearDownArrayBuffersHelper(isolate_, live_array_buffers_, size_t freed_memory = 0;
not_yet_discovered_array_buffers_); for (auto& buffer : live_array_buffers_) {
isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
}
for (auto& buffer : live_array_buffers_for_scavenge_) {
isolate()->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
}
live_array_buffers_.clear();
live_array_buffers_for_scavenge_.clear();
not_yet_discovered_array_buffers_.clear();
not_yet_discovered_array_buffers_for_scavenge_.clear();
if (freed_memory > 0) {
reinterpret_cast<v8::Isolate*>(isolate_)
->AdjustAmountOfExternalAllocatedMemory(
-static_cast<int64_t>(freed_memory));
}
} }
...@@ -1875,7 +1852,7 @@ void Heap::PromoteArrayBuffer(Object* obj) { ...@@ -1875,7 +1852,7 @@ void Heap::PromoteArrayBuffer(Object* obj) {
// ArrayBuffer might be in the middle of being constructed. // ArrayBuffer might be in the middle of being constructed.
if (data == undefined_value()) return; if (data == undefined_value()) return;
DCHECK(live_array_buffers_for_scavenge_.count(data) > 0); DCHECK(live_array_buffers_for_scavenge_.count(data) > 0);
DCHECK(live_array_buffers_.count(data) > 0); live_array_buffers_[data] = live_array_buffers_for_scavenge_[data];
live_array_buffers_for_scavenge_.erase(data); live_array_buffers_for_scavenge_.erase(data);
not_yet_discovered_array_buffers_for_scavenge_.erase(data); not_yet_discovered_array_buffers_for_scavenge_.erase(data);
} }
......
...@@ -1070,7 +1070,7 @@ class Heap { ...@@ -1070,7 +1070,7 @@ class Heap {
void UnregisterArrayBuffer(bool in_new_space, void* data); void UnregisterArrayBuffer(bool in_new_space, void* data);
// A live ArrayBuffer was discovered during marking/scavenge. // A live ArrayBuffer was discovered during marking/scavenge.
void RegisterLiveArrayBuffer(bool from_scavenge, void* data); void RegisterLiveArrayBuffer(bool in_new_space, void* data);
// Frees all backing store pointers that weren't discovered in the previous // Frees all backing store pointers that weren't discovered in the previous
// marking or scavenge phase. // marking or scavenge phase.
...@@ -1790,21 +1790,6 @@ class Heap { ...@@ -1790,21 +1790,6 @@ class Heap {
// Called on heap tear-down. Frees all remaining ArrayBuffer backing stores. // Called on heap tear-down. Frees all remaining ArrayBuffer backing stores.
void TearDownArrayBuffers(); void TearDownArrayBuffers();
// These correspond to the non-Helper versions.
void RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers,
void* data, size_t length);
void UnregisterArrayBufferHelper(
std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers, void* data);
void RegisterLiveArrayBufferHelper(
std::map<void*, size_t>& not_yet_discovered_buffers, void* data);
size_t FreeDeadArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers);
void TearDownArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers);
// Record statistics before and after garbage collection. // Record statistics before and after garbage collection.
void ReportStatisticsBeforeGC(); void ReportStatisticsBeforeGC();
void ReportStatisticsAfterGC(); void ReportStatisticsAfterGC();
......
...@@ -1867,6 +1867,10 @@ int MarkCompactCollector::DiscoverAndEvacuateBlackObjectsOnPage( ...@@ -1867,6 +1867,10 @@ int MarkCompactCollector::DiscoverAndEvacuateBlackObjectsOnPage(
Object* target = allocation.ToObjectChecked(); Object* target = allocation.ToObjectChecked();
MigrateObject(HeapObject::cast(target), object, size, NEW_SPACE); MigrateObject(HeapObject::cast(target), object, size, NEW_SPACE);
if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
heap()->RegisterLiveArrayBuffer(
true, JSArrayBuffer::cast(target)->backing_store());
}
heap()->IncrementSemiSpaceCopiedObjectSize(size); heap()->IncrementSemiSpaceCopiedObjectSize(size);
} }
*cells = 0; *cells = 0;
...@@ -4431,10 +4435,13 @@ void MarkCompactCollector::SweepSpaces() { ...@@ -4431,10 +4435,13 @@ void MarkCompactCollector::SweepSpaces() {
// buffer entries are already filter out. We can just release the memory. // buffer entries are already filter out. We can just release the memory.
heap()->FreeQueuedChunks(); heap()->FreeQueuedChunks();
heap()->FreeDeadArrayBuffers(false);
EvacuateNewSpaceAndCandidates(); EvacuateNewSpaceAndCandidates();
// EvacuateNewSpaceAndCandidates iterates over new space objects and for
// ArrayBuffers either re-registers them as live or promotes them. This is
// needed to properly free them.
heap()->FreeDeadArrayBuffers(false);
// Clear the marking state of live large objects. // Clear the marking state of live large objects.
heap_->lo_space()->ClearMarkingStateOfLiveObjects(); heap_->lo_space()->ClearMarkingStateOfLiveObjects();
......
...@@ -532,7 +532,8 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer( ...@@ -532,7 +532,8 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
heap, object, heap, object,
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset), HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
if (!JSArrayBuffer::cast(object)->is_external()) { if (!JSArrayBuffer::cast(object)->is_external() &&
!heap->InNewSpace(object)) {
heap->RegisterLiveArrayBuffer(false, heap->RegisterLiveArrayBuffer(false,
JSArrayBuffer::cast(object)->backing_store()); JSArrayBuffer::cast(object)->backing_store());
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment