Commit 279e274e authored by mlippautz's avatar mlippautz Committed by Commit bot

Track based on JSArrayBuffer addresses on pages instead of the attached

backing store.

Details of tracking:
- Scavenge: New space pages are processes in bulk on the main thread
- MC: Unswept pages are processed in bulk in parallel. All other pages
  are processed by the sweeper concurrently.

BUG=chromium:611688
LOG=N
TEST=cctest/test-array-buffer-tracker/*
CQ_EXTRA_TRYBOTS=tryserver.v8:v8_linux_arm64_gc_stress_dbg,v8_linux_gc_stress_dbg,v8_mac_gc_stress_dbg,v8_linux64_tsan_rel,v8_mac64_asan_rel

Review-Url: https://codereview.chromium.org/2026633003
Cr-Commit-Position: refs/heads/master@{#36653}
parent 9fa206e1
...@@ -7417,7 +7417,7 @@ class Internals { ...@@ -7417,7 +7417,7 @@ class Internals {
kAmountOfExternalAllocatedMemoryOffset + kApiInt64Size; kAmountOfExternalAllocatedMemoryOffset + kApiInt64Size;
static const int kIsolateRootsOffset = static const int kIsolateRootsOffset =
kAmountOfExternalAllocatedMemoryAtLastGlobalGCOffset + kApiInt64Size + kAmountOfExternalAllocatedMemoryAtLastGlobalGCOffset + kApiInt64Size +
kApiPointerSize; kApiPointerSize + kApiPointerSize;
static const int kUndefinedValueRootIndex = 4; static const int kUndefinedValueRootIndex = 4;
static const int kTheHoleValueRootIndex = 5; static const int kTheHoleValueRootIndex = 5;
static const int kNullValueRootIndex = 6; static const int kNullValueRootIndex = 6;
......
...@@ -5,136 +5,172 @@ ...@@ -5,136 +5,172 @@
#include "src/heap/array-buffer-tracker.h" #include "src/heap/array-buffer-tracker.h"
#include "src/heap/heap.h" #include "src/heap/heap.h"
#include "src/isolate.h" #include "src/isolate.h"
#include "src/objects.h"
#include "src/objects-inl.h" #include "src/objects-inl.h"
#include "src/objects.h"
#include "src/v8.h" #include "src/v8.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
ArrayBufferTracker::~ArrayBufferTracker() { LocalArrayBufferTracker::~LocalArrayBufferTracker() {
Isolate* isolate = heap()->isolate();
size_t freed_memory = 0; size_t freed_memory = 0;
for (auto& buffer : live_array_buffers_) { for (auto& buffer : array_buffers_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second); heap_->isolate()->array_buffer_allocator()->Free(buffer.second.first,
freed_memory += buffer.second; buffer.second.second);
freed_memory += buffer.second.second;
} }
for (auto& buffer : live_array_buffers_for_scavenge_) { if (freed_memory > 0) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second); heap_->update_amount_of_external_allocated_freed_memory(
freed_memory += buffer.second; static_cast<intptr_t>(freed_memory));
} }
live_array_buffers_.clear(); array_buffers_.clear();
live_array_buffers_for_scavenge_.clear(); }
not_yet_discovered_array_buffers_.clear();
not_yet_discovered_array_buffers_for_scavenge_.clear(); void LocalArrayBufferTracker::Add(Key key, const Value& value) {
array_buffers_[key] = value;
}
LocalArrayBufferTracker::Value LocalArrayBufferTracker::Remove(Key key) {
DCHECK_EQ(array_buffers_.count(key), 1);
Value value = array_buffers_[key];
array_buffers_.erase(key);
return value;
}
void LocalArrayBufferTracker::FreeDead() {
size_t freed_memory = 0;
for (TrackingMap::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
if (Marking::IsWhite(Marking::MarkBitFrom(it->first))) {
heap_->isolate()->array_buffer_allocator()->Free(it->second.first,
it->second.second);
freed_memory += it->second.second;
array_buffers_.erase(it++);
} else {
it++;
}
}
if (freed_memory > 0) { if (freed_memory > 0) {
heap()->update_amount_of_external_allocated_memory( heap_->update_amount_of_external_allocated_freed_memory(
-static_cast<int64_t>(freed_memory)); static_cast<intptr_t>(freed_memory));
} }
} }
template <typename Callback>
void LocalArrayBufferTracker::Process(Callback callback) {
JSArrayBuffer* new_buffer = nullptr;
size_t freed_memory = 0;
for (TrackingMap::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
switch (callback(it->first, &new_buffer)) {
case kKeepEntry:
it++;
break;
case kUpdateEntry:
DCHECK_NOT_NULL(new_buffer);
Page::FromAddress(new_buffer->address())
->local_tracker<Page::kCreateIfNotPresent>()
->Add(new_buffer, it->second);
array_buffers_.erase(it++);
break;
case kRemoveEntry:
heap_->isolate()->array_buffer_allocator()->Free(it->second.first,
it->second.second);
freed_memory += it->second.second;
array_buffers_.erase(it++);
break;
default:
UNREACHABLE();
}
}
if (freed_memory > 0) {
heap_->update_amount_of_external_allocated_freed_memory(
static_cast<intptr_t>(freed_memory));
}
}
void ArrayBufferTracker::RegisterNew(JSArrayBuffer* buffer) { void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) {
void* data = buffer->backing_store(); void* data = buffer->backing_store();
if (!data) return; if (!data) return;
bool in_new_space = heap()->InNewSpace(buffer); size_t length = NumberToSize(heap->isolate(), buffer->byte_length());
size_t length = NumberToSize(heap()->isolate(), buffer->byte_length()); Page* page = Page::FromAddress(buffer->address());
if (in_new_space) { LocalArrayBufferTracker* tracker =
live_array_buffers_for_scavenge_[data] = length; page->local_tracker<Page::kCreateIfNotPresent>();
} else { DCHECK_NOT_NULL(tracker);
live_array_buffers_[data] = length; {
base::LockGuard<base::Mutex> guard(page->mutex());
tracker->Add(buffer, std::make_pair(data, length));
} }
// We may go over the limit of externally allocated memory here. We call the // We may go over the limit of externally allocated memory here. We call the
// api function to trigger a GC in this case. // api function to trigger a GC in this case.
reinterpret_cast<v8::Isolate*>(heap()->isolate()) reinterpret_cast<v8::Isolate*>(heap->isolate())
->AdjustAmountOfExternalAllocatedMemory(length); ->AdjustAmountOfExternalAllocatedMemory(length);
} }
void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
void ArrayBufferTracker::Unregister(JSArrayBuffer* buffer) {
void* data = buffer->backing_store(); void* data = buffer->backing_store();
if (!data) return; if (!data) return;
bool in_new_space = heap()->InNewSpace(buffer); Page* page = Page::FromAddress(buffer->address());
std::map<void*, size_t>* live_buffers = LocalArrayBufferTracker* tracker = page->local_tracker<Page::kDontCreate>();
in_new_space ? &live_array_buffers_for_scavenge_ : &live_array_buffers_; DCHECK_NOT_NULL(tracker);
std::map<void*, size_t>* not_yet_discovered_buffers = size_t length = 0;
in_new_space ? &not_yet_discovered_array_buffers_for_scavenge_ {
: &not_yet_discovered_array_buffers_; base::LockGuard<base::Mutex> guard(page->mutex());
length = tracker->Remove(buffer).second;
DCHECK(live_buffers->count(data) > 0);
size_t length = (*live_buffers)[data];
live_buffers->erase(data);
not_yet_discovered_buffers->erase(data);
heap()->update_amount_of_external_allocated_memory(
-static_cast<int64_t>(length));
}
void ArrayBufferTracker::MarkLive(JSArrayBuffer* buffer) {
base::LockGuard<base::Mutex> guard(&mutex_);
void* data = buffer->backing_store();
// ArrayBuffer might be in the middle of being constructed.
if (data == heap()->undefined_value()) return;
if (heap()->InNewSpace(buffer)) {
not_yet_discovered_array_buffers_for_scavenge_.erase(data);
} else {
not_yet_discovered_array_buffers_.erase(data);
} }
heap->update_amount_of_external_allocated_memory(
-static_cast<intptr_t>(length));
} }
void ArrayBufferTracker::FreeDeadInNewSpace(Heap* heap) {
void ArrayBufferTracker::FreeDead(bool from_scavenge) { NewSpacePageIterator from_it(heap->new_space()->FromSpaceStart(),
size_t freed_memory = 0; heap->new_space()->FromSpaceEnd());
Isolate* isolate = heap()->isolate(); while (from_it.has_next()) {
for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) { ProcessBuffers(from_it.next(), kUpdateForwardedRemoveOthers);
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
live_array_buffers_for_scavenge_.erase(buffer.first);
} }
heap->account_amount_of_external_allocated_freed_memory();
}
if (!from_scavenge) { void ArrayBufferTracker::FreeDead(Page* page) {
for (auto& buffer : not_yet_discovered_array_buffers_) { // Only called from the sweeper, which already holds the page lock.
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second); LocalArrayBufferTracker* tracker = page->local_tracker<Page::kDontCreate>();
freed_memory += buffer.second; if (tracker == nullptr) return;
live_array_buffers_.erase(buffer.first); DCHECK(!page->SweepingDone());
} tracker->FreeDead();
if (tracker->IsEmpty()) {
page->ReleaseLocalTracker();
} }
not_yet_discovered_array_buffers_for_scavenge_ =
live_array_buffers_for_scavenge_;
if (!from_scavenge) not_yet_discovered_array_buffers_ = live_array_buffers_;
// Do not call through the api as this code is triggered while doing a GC.
heap()->update_amount_of_external_allocated_memory(
-static_cast<int64_t>(freed_memory));
} }
void ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) {
void ArrayBufferTracker::PrepareDiscoveryInNewSpace() { LocalArrayBufferTracker* tracker = page->local_tracker<Page::kDontCreate>();
not_yet_discovered_array_buffers_for_scavenge_ = if (tracker == nullptr) return;
live_array_buffers_for_scavenge_; {
base::LockGuard<base::Mutex> guard(page->mutex());
tracker->Process(
[mode](JSArrayBuffer* old_buffer, JSArrayBuffer** new_buffer) {
MapWord map_word = old_buffer->map_word();
if (map_word.IsForwardingAddress()) {
*new_buffer = JSArrayBuffer::cast(map_word.ToForwardingAddress());
return LocalArrayBufferTracker::kUpdateEntry;
}
return mode == kUpdateForwardedKeepOthers
? LocalArrayBufferTracker::kKeepEntry
: LocalArrayBufferTracker::kRemoveEntry;
});
}
} }
bool ArrayBufferTracker::IsTracked(JSArrayBuffer* buffer) {
void ArrayBufferTracker::Promote(JSArrayBuffer* buffer) { Page* page = Page::FromAddress(buffer->address());
base::LockGuard<base::Mutex> guard(&mutex_); LocalArrayBufferTracker* tracker =
page->local_tracker<Page::kCreateIfNotPresent>();
if (buffer->is_external()) return; {
void* data = buffer->backing_store(); base::LockGuard<base::Mutex> guard(page->mutex());
if (!data) return; return tracker->IsTracked(buffer);
// ArrayBuffer might be in the middle of being constructed. }
if (data == heap()->undefined_value()) return;
DCHECK(live_array_buffers_for_scavenge_.count(data) > 0);
live_array_buffers_[data] = live_array_buffers_for_scavenge_[data];
live_array_buffers_for_scavenge_.erase(data);
not_yet_discovered_array_buffers_for_scavenge_.erase(data);
} }
} // namespace internal } // namespace internal
......
...@@ -7,69 +7,88 @@ ...@@ -7,69 +7,88 @@
#include <map> #include <map>
#include "src/allocation.h"
#include "src/base/platform/mutex.h" #include "src/base/platform/mutex.h"
#include "src/globals.h" #include "src/globals.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
// Forward declarations.
class Heap; class Heap;
class JSArrayBuffer; class JSArrayBuffer;
class Page;
class ArrayBufferTracker { class ArrayBufferTracker : public AllStatic {
public: public:
explicit ArrayBufferTracker(Heap* heap) : heap_(heap) {} enum ProcessingMode {
~ArrayBufferTracker(); kUpdateForwardedRemoveOthers,
kUpdateForwardedKeepOthers,
inline Heap* heap() { return heap_; } };
// The following methods are used to track raw C++ pointers to externally // The following methods are used to track raw C++ pointers to externally
// allocated memory used as backing store in live array buffers. // allocated memory used as backing store in live array buffers.
// A new ArrayBuffer was created with |data| as backing store. // Register/unregister a new JSArrayBuffer |buffer| for tracking.
void RegisterNew(JSArrayBuffer* buffer); static void RegisterNew(Heap* heap, JSArrayBuffer* buffer);
static void Unregister(Heap* heap, JSArrayBuffer* buffer);
// The backing store |data| is no longer owned by V8. // Frees all backing store pointers for dead JSArrayBuffers in new space.
void Unregister(JSArrayBuffer* buffer); static void FreeDeadInNewSpace(Heap* heap);
// A live ArrayBuffer was discovered during marking/scavenge. // Frees all backing store pointers for dead JSArrayBuffer on a given page.
void MarkLive(JSArrayBuffer* buffer); // Requires marking information to be present.
static void FreeDead(Page* page);
// Frees all backing store pointers that weren't discovered in the previous // Processes all array buffers on a given page. |mode| specifies the action
// marking or scavenge phase. // to perform on the buffers.
void FreeDead(bool from_scavenge); static void ProcessBuffers(Page* page, ProcessingMode mode);
// Prepare for a new scavenge phase. A new marking phase is implicitly // Returns whether a buffer is currently tracked.
// prepared by finishing the previous one. static bool IsTracked(JSArrayBuffer* buffer);
void PrepareDiscoveryInNewSpace(); };
// An ArrayBuffer moved from new space to old space. // LocalArrayBufferTracker tracks internalized array buffers.
void Promote(JSArrayBuffer* buffer); //
// Never use directly but instead always call through |ArrayBufferTracker|.
class LocalArrayBufferTracker {
public:
typedef std::pair<void*, size_t> Value;
typedef JSArrayBuffer* Key;
private: enum CallbackResult { kKeepEntry, kUpdateEntry, kRemoveEntry };
base::Mutex mutex_;
Heap* heap_;
// |live_array_buffers_| maps externally allocated memory used as backing explicit LocalArrayBufferTracker(Heap* heap) : heap_(heap) {}
// store for ArrayBuffers to the length of the respective memory blocks. ~LocalArrayBufferTracker();
//
// At the beginning of mark/compact, |not_yet_discovered_array_buffers_| is void Add(Key key, const Value& value);
// a copy of |live_array_buffers_| and we remove pointers as we discover live Value Remove(Key key);
// ArrayBuffer objects during marking. At the end of mark/compact, the
// remaining memory blocks can be freed. // Frees up any buffers that are currently not marked.
std::map<void*, size_t> live_array_buffers_; void FreeDead();
std::map<void*, size_t> not_yet_discovered_array_buffers_;
// Processes buffers one by one. The CallbackResult of the callback decides
// To be able to free memory held by ArrayBuffers during scavenge as well, we // what action to take on the buffer.
// have a separate list of allocated memory held by ArrayBuffers in new space.
// //
// Since mark/compact also evacuates the new space, all pointers in the // Callback should be of type:
// |live_array_buffers_for_scavenge_| list are also in the // CallbackResult fn(JSArrayBuffer* buffer, JSArrayBuffer** new_buffer);
// |live_array_buffers_| list. template <typename Callback>
std::map<void*, size_t> live_array_buffers_for_scavenge_; inline void Process(Callback callback);
std::map<void*, size_t> not_yet_discovered_array_buffers_for_scavenge_;
bool IsEmpty() { return array_buffers_.empty(); }
bool IsTracked(Key key) {
return array_buffers_.find(key) != array_buffers_.end();
}
private:
// TODO(mlippautz): Switch to unordered_map once it is supported on all
// platforms.
typedef std::map<Key, Value> TrackingMap;
Heap* heap_;
TrackingMap array_buffers_;
}; };
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
#endif // V8_HEAP_ARRAY_BUFFER_TRACKER_H_ #endif // V8_HEAP_ARRAY_BUFFER_TRACKER_H_
...@@ -160,7 +160,6 @@ Heap::Heap() ...@@ -160,7 +160,6 @@ Heap::Heap()
gc_callbacks_depth_(0), gc_callbacks_depth_(0),
deserialization_complete_(false), deserialization_complete_(false),
strong_roots_list_(NULL), strong_roots_list_(NULL),
array_buffer_tracker_(NULL),
heap_iterator_depth_(0), heap_iterator_depth_(0),
force_oom_(false) { force_oom_(false) {
// Allow build-time customization of the max semispace size. Building // Allow build-time customization of the max semispace size. Building
...@@ -1626,8 +1625,6 @@ void Heap::Scavenge() { ...@@ -1626,8 +1625,6 @@ void Heap::Scavenge() {
scavenge_collector_->SelectScavengingVisitorsTable(); scavenge_collector_->SelectScavengingVisitorsTable();
array_buffer_tracker()->PrepareDiscoveryInNewSpace();
// Flip the semispaces. After flipping, to space is empty, from space has // Flip the semispaces. After flipping, to space is empty, from space has
// live objects. // live objects.
new_space_.Flip(); new_space_.Flip();
...@@ -1752,7 +1749,7 @@ void Heap::Scavenge() { ...@@ -1752,7 +1749,7 @@ void Heap::Scavenge() {
// Set age mark. // Set age mark.
new_space_.set_age_mark(new_space_.top()); new_space_.set_age_mark(new_space_.top());
array_buffer_tracker()->FreeDead(true); ArrayBufferTracker::FreeDeadInNewSpace(this);
// Update how much has survived scavenge. // Update how much has survived scavenge.
IncrementYoungSurvivorsCounter(static_cast<int>( IncrementYoungSurvivorsCounter(static_cast<int>(
...@@ -2038,12 +2035,12 @@ HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) { ...@@ -2038,12 +2035,12 @@ HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) {
void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) { void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) {
return array_buffer_tracker()->RegisterNew(buffer); ArrayBufferTracker::RegisterNew(this, buffer);
} }
void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) { void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) {
return array_buffer_tracker()->Unregister(buffer); ArrayBufferTracker::Unregister(this, buffer);
} }
...@@ -5337,8 +5334,6 @@ bool Heap::SetUp() { ...@@ -5337,8 +5334,6 @@ bool Heap::SetUp() {
scavenge_job_ = new ScavengeJob(); scavenge_job_ = new ScavengeJob();
array_buffer_tracker_ = new ArrayBufferTracker(this);
LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
LOG(isolate_, IntPtrTEvent("heap-available", Available())); LOG(isolate_, IntPtrTEvent("heap-available", Available()));
...@@ -5498,9 +5493,6 @@ void Heap::TearDown() { ...@@ -5498,9 +5493,6 @@ void Heap::TearDown() {
delete scavenge_job_; delete scavenge_job_;
scavenge_job_ = nullptr; scavenge_job_ = nullptr;
delete array_buffer_tracker_;
array_buffer_tracker_ = nullptr;
isolate_->global_handles()->TearDown(); isolate_->global_handles()->TearDown();
external_string_table_.TearDown(); external_string_table_.TearDown();
......
...@@ -822,6 +822,16 @@ class Heap { ...@@ -822,6 +822,16 @@ class Heap {
amount_of_external_allocated_memory_ += delta; amount_of_external_allocated_memory_ += delta;
} }
void update_amount_of_external_allocated_freed_memory(intptr_t freed) {
amount_of_external_allocated_memory_freed_.Increment(freed);
}
void account_amount_of_external_allocated_freed_memory() {
amount_of_external_allocated_memory_ -=
amount_of_external_allocated_memory_freed_.Value();
amount_of_external_allocated_memory_freed_.SetValue(0);
}
void DeoptMarkedAllocationSites(); void DeoptMarkedAllocationSites();
bool DeoptMaybeTenuredAllocationSites() { bool DeoptMaybeTenuredAllocationSites() {
...@@ -1356,10 +1366,6 @@ class Heap { ...@@ -1356,10 +1366,6 @@ class Heap {
void RegisterNewArrayBuffer(JSArrayBuffer* buffer); void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
void UnregisterArrayBuffer(JSArrayBuffer* buffer); void UnregisterArrayBuffer(JSArrayBuffer* buffer);
inline ArrayBufferTracker* array_buffer_tracker() {
return array_buffer_tracker_;
}
// =========================================================================== // ===========================================================================
// Allocation site tracking. ================================================= // Allocation site tracking. =================================================
// =========================================================================== // ===========================================================================
...@@ -1997,6 +2003,8 @@ class Heap { ...@@ -1997,6 +2003,8 @@ class Heap {
// Caches the amount of external memory registered at the last global gc. // Caches the amount of external memory registered at the last global gc.
int64_t amount_of_external_allocated_memory_at_last_global_gc_; int64_t amount_of_external_allocated_memory_at_last_global_gc_;
base::AtomicNumber<intptr_t> amount_of_external_allocated_memory_freed_;
// This can be calculated directly from a pointer to the heap; however, it is // This can be calculated directly from a pointer to the heap; however, it is
// more expedient to get at the isolate directly from within Heap methods. // more expedient to get at the isolate directly from within Heap methods.
Isolate* isolate_; Isolate* isolate_;
...@@ -2235,8 +2243,6 @@ class Heap { ...@@ -2235,8 +2243,6 @@ class Heap {
StrongRootsList* strong_roots_list_; StrongRootsList* strong_roots_list_;
ArrayBufferTracker* array_buffer_tracker_;
// The depth of HeapIterator nestings. // The depth of HeapIterator nestings.
int heap_iterator_depth_; int heap_iterator_depth_;
......
...@@ -872,6 +872,7 @@ void MarkCompactCollector::Prepare() { ...@@ -872,6 +872,7 @@ void MarkCompactCollector::Prepare() {
space = spaces.next()) { space = spaces.next()) {
space->PrepareForMarkCompact(); space->PrepareForMarkCompact();
} }
heap()->account_amount_of_external_allocated_freed_memory();
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
if (!was_marked_incrementally_ && FLAG_verify_heap) { if (!was_marked_incrementally_ && FLAG_verify_heap) {
...@@ -1750,20 +1751,12 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final ...@@ -1750,20 +1751,12 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
if (heap_->ShouldBePromoted<DEFAULT_PROMOTION>(object->address(), size) && if (heap_->ShouldBePromoted<DEFAULT_PROMOTION>(object->address(), size) &&
TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object,
&target_object)) { &target_object)) {
// If we end up needing more special cases, we should factor this out.
if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(target_object));
}
promoted_size_ += size; promoted_size_ += size;
return true; return true;
} }
HeapObject* target = nullptr; HeapObject* target = nullptr;
AllocationSpace space = AllocateTargetObject(object, &target); AllocationSpace space = AllocateTargetObject(object, &target);
MigrateObject(HeapObject::cast(target), object, size, space); MigrateObject(HeapObject::cast(target), object, size, space);
if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
}
semispace_copied_size_ += size; semispace_copied_size_ += size;
return true; return true;
} }
...@@ -1888,10 +1881,6 @@ class MarkCompactCollector::EvacuateNewSpacePageVisitor final ...@@ -1888,10 +1881,6 @@ class MarkCompactCollector::EvacuateNewSpacePageVisitor final
} }
inline bool Visit(HeapObject* object) { inline bool Visit(HeapObject* object) {
if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
object->GetHeap()->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(object));
}
RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
object->IterateBodyFast(&visitor); object->IterateBodyFast(&visitor);
promoted_size_ += object->Size(); promoted_size_ += object->Size();
...@@ -3150,11 +3139,14 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) { ...@@ -3150,11 +3139,14 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
switch (ComputeEvacuationMode(page)) { switch (ComputeEvacuationMode(page)) {
case kObjectsNewToOld: case kObjectsNewToOld:
result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_); result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_);
ArrayBufferTracker::ProcessBuffers(
page, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
DCHECK(result); DCHECK(result);
USE(result); USE(result);
break; break;
case kPageNewToOld: case kPageNewToOld:
result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor); result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor);
// ArrayBufferTracker will be updated during sweeping.
DCHECK(result); DCHECK(result);
USE(result); USE(result);
break; break;
...@@ -3168,12 +3160,17 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) { ...@@ -3168,12 +3160,17 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
page->SetFlag(Page::COMPACTION_WAS_ABORTED); page->SetFlag(Page::COMPACTION_WAS_ABORTED);
EvacuateRecordOnlyVisitor record_visitor(collector_->heap()); EvacuateRecordOnlyVisitor record_visitor(collector_->heap());
result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor); result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor);
ArrayBufferTracker::ProcessBuffers(
page, ArrayBufferTracker::kUpdateForwardedKeepOthers);
DCHECK(result); DCHECK(result);
USE(result); USE(result);
// We need to return failure here to indicate that we want this page // We need to return failure here to indicate that we want this page
// added to the sweeper. // added to the sweeper.
return false; return false;
} }
ArrayBufferTracker::ProcessBuffers(
page, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
break; break;
default: default:
UNREACHABLE(); UNREACHABLE();
...@@ -3353,6 +3350,10 @@ int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p, ...@@ -3353,6 +3350,10 @@ int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p,
DCHECK((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST)); DCHECK((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST));
DCHECK(parallelism == SWEEP_ON_MAIN_THREAD || sweeping_mode == SWEEP_ONLY); DCHECK(parallelism == SWEEP_ON_MAIN_THREAD || sweeping_mode == SWEEP_ONLY);
// Before we sweep objects on the page, we free dead array buffers which
// requires valid mark bits.
ArrayBufferTracker::FreeDead(p);
Address free_start = p->area_start(); Address free_start = p->area_start();
DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0); DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0);
...@@ -3552,11 +3553,6 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { ...@@ -3552,11 +3553,6 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
} }
} }
// EvacuateNewSpaceAndCandidates iterates over new space objects and for
// ArrayBuffers either re-registers them as live or promotes them. This is
// needed to properly free them.
heap()->array_buffer_tracker()->FreeDead(false);
// Deallocate evacuated candidate pages. // Deallocate evacuated candidate pages.
ReleaseEvacuationCandidates(); ReleaseEvacuationCandidates();
} }
......
...@@ -77,7 +77,10 @@ void StaticNewSpaceVisitor<StaticVisitor>::Initialize() { ...@@ -77,7 +77,10 @@ void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
&FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode, &FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode,
int>::Visit); int>::Visit);
table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer); table_.Register(
kVisitJSArrayBuffer,
&FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
int>::Visit);
table_.Register(kVisitFreeSpace, &VisitFreeSpace); table_.Register(kVisitFreeSpace, &VisitFreeSpace);
...@@ -99,21 +102,6 @@ void StaticNewSpaceVisitor<StaticVisitor>::Initialize() { ...@@ -99,21 +102,6 @@ void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
kVisitStructGeneric>(); kVisitStructGeneric>();
} }
template <typename StaticVisitor>
int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
Map* map, HeapObject* object) {
typedef FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor, int>
JSArrayBufferBodyVisitor;
if (!JSArrayBuffer::cast(object)->is_external()) {
Heap* heap = map->GetHeap();
heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
}
return JSArrayBufferBodyVisitor::Visit(map, object);
}
template <typename StaticVisitor> template <typename StaticVisitor>
int StaticNewSpaceVisitor<StaticVisitor>::VisitBytecodeArray( int StaticNewSpaceVisitor<StaticVisitor>::VisitBytecodeArray(
Map* map, HeapObject* object) { Map* map, HeapObject* object) {
...@@ -185,7 +173,10 @@ void StaticMarkingVisitor<StaticVisitor>::Initialize() { ...@@ -185,7 +173,10 @@ void StaticMarkingVisitor<StaticVisitor>::Initialize() {
table_.Register(kVisitJSFunction, &VisitJSFunction); table_.Register(kVisitJSFunction, &VisitJSFunction);
table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer); table_.Register(
kVisitJSArrayBuffer,
&FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
void>::Visit);
// Registration for kVisitJSRegExp is done by StaticVisitor. // Registration for kVisitJSRegExp is done by StaticVisitor.
...@@ -520,24 +511,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map, ...@@ -520,24 +511,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
JSObjectVisitor::Visit(map, object); JSObjectVisitor::Visit(map, object);
} }
template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
typedef FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
void> JSArrayBufferBodyVisitor;
JSArrayBufferBodyVisitor::Visit(map, object);
if (!JSArrayBuffer::cast(object)->is_external() &&
!heap->InNewSpace(object)) {
heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
}
}
template <typename StaticVisitor> template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray( void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray(
Map* map, HeapObject* object) { Map* map, HeapObject* object) {
......
...@@ -300,7 +300,6 @@ class StaticNewSpaceVisitor : public StaticVisitorBase { ...@@ -300,7 +300,6 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
return FreeSpace::cast(object)->size(); return FreeSpace::cast(object)->size();
} }
INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
INLINE(static int VisitBytecodeArray(Map* map, HeapObject* object)); INLINE(static int VisitBytecodeArray(Map* map, HeapObject* object));
class DataObjectVisitor { class DataObjectVisitor {
...@@ -379,7 +378,6 @@ class StaticMarkingVisitor : public StaticVisitorBase { ...@@ -379,7 +378,6 @@ class StaticMarkingVisitor : public StaticVisitorBase {
INLINE(static void VisitWeakCollection(Map* map, HeapObject* object)); INLINE(static void VisitWeakCollection(Map* map, HeapObject* object));
INLINE(static void VisitJSFunction(Map* map, HeapObject* object)); INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
INLINE(static void VisitJSRegExp(Map* map, HeapObject* object)); INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object));
INLINE(static void VisitNativeContext(Map* map, HeapObject* object)); INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
INLINE(static void VisitBytecodeArray(Map* map, HeapObject* object)); INLINE(static void VisitBytecodeArray(Map* map, HeapObject* object));
......
...@@ -36,7 +36,8 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -36,7 +36,8 @@ class ScavengingVisitor : public StaticVisitorBase {
table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer); table_.Register(kVisitJSArrayBuffer,
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
table_.Register( table_.Register(
kVisitNativeContext, kVisitNativeContext,
...@@ -281,19 +282,6 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -281,19 +282,6 @@ class ScavengingVisitor : public StaticVisitorBase {
object_size); object_size);
} }
static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
HeapObject* object) {
ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
Heap* heap = map->GetHeap();
MapWord map_word = object->map_word();
DCHECK(map_word.IsForwardingAddress());
HeapObject* target = map_word.ToForwardingAddress();
if (!heap->InNewSpace(target)) {
heap->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target));
}
}
static inline void EvacuateByteArray(Map* map, HeapObject** slot, static inline void EvacuateByteArray(Map* map, HeapObject** slot,
HeapObject* object) { HeapObject* object) {
int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
......
...@@ -270,7 +270,6 @@ template <Page::InitializationMode mode> ...@@ -270,7 +270,6 @@ template <Page::InitializationMode mode>
Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable, Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable,
PagedSpace* owner) { PagedSpace* owner) {
Page* page = reinterpret_cast<Page*>(chunk); Page* page = reinterpret_cast<Page*>(chunk);
page->mutex_ = new base::Mutex();
DCHECK(page->area_size() <= kAllocatableMemory); DCHECK(page->area_size() <= kAllocatableMemory);
DCHECK(chunk->owner() == owner); DCHECK(chunk->owner() == owner);
......
...@@ -511,13 +511,14 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size, ...@@ -511,13 +511,14 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
chunk->progress_bar_ = 0; chunk->progress_bar_ = 0;
chunk->high_water_mark_.SetValue(static_cast<intptr_t>(area_start - base)); chunk->high_water_mark_.SetValue(static_cast<intptr_t>(area_start - base));
chunk->concurrent_sweeping_state().SetValue(kSweepingDone); chunk->concurrent_sweeping_state().SetValue(kSweepingDone);
chunk->mutex_ = nullptr; chunk->mutex_ = new base::Mutex();
chunk->available_in_free_list_ = 0; chunk->available_in_free_list_ = 0;
chunk->wasted_memory_ = 0; chunk->wasted_memory_ = 0;
chunk->ResetLiveBytes(); chunk->ResetLiveBytes();
Bitmap::Clear(chunk); Bitmap::Clear(chunk);
chunk->set_next_chunk(nullptr); chunk->set_next_chunk(nullptr);
chunk->set_prev_chunk(nullptr); chunk->set_prev_chunk(nullptr);
chunk->local_tracker_.SetValue(nullptr);
DCHECK(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset); DCHECK(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset);
DCHECK(OFFSET_OF(MemoryChunk, live_byte_count_) == kLiveBytesOffset); DCHECK(OFFSET_OF(MemoryChunk, live_byte_count_) == kLiveBytesOffset);
...@@ -984,6 +985,8 @@ void MemoryChunk::ReleaseAllocatedMemory() { ...@@ -984,6 +985,8 @@ void MemoryChunk::ReleaseAllocatedMemory() {
if (old_to_old_slots_ != nullptr) ReleaseOldToOldSlots(); if (old_to_old_slots_ != nullptr) ReleaseOldToOldSlots();
if (typed_old_to_new_slots_ != nullptr) ReleaseTypedOldToNewSlots(); if (typed_old_to_new_slots_ != nullptr) ReleaseTypedOldToNewSlots();
if (typed_old_to_old_slots_ != nullptr) ReleaseTypedOldToOldSlots(); if (typed_old_to_old_slots_ != nullptr) ReleaseTypedOldToOldSlots();
if (local_tracker_.Value() != nullptr) ReleaseLocalTracker();
} }
static SlotSet* AllocateSlotSet(size_t size, Address page_start) { static SlotSet* AllocateSlotSet(size_t size, Address page_start) {
...@@ -1035,6 +1038,12 @@ void MemoryChunk::ReleaseTypedOldToOldSlots() { ...@@ -1035,6 +1038,12 @@ void MemoryChunk::ReleaseTypedOldToOldSlots() {
delete typed_old_to_old_slots_; delete typed_old_to_old_slots_;
typed_old_to_old_slots_ = nullptr; typed_old_to_old_slots_ = nullptr;
} }
void MemoryChunk::ReleaseLocalTracker() {
delete local_tracker_.Value();
local_tracker_.SetValue(nullptr);
}
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// PagedSpace implementation // PagedSpace implementation
......
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
#include "src/base/platform/mutex.h" #include "src/base/platform/mutex.h"
#include "src/flags.h" #include "src/flags.h"
#include "src/hashmap.h" #include "src/hashmap.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/list.h" #include "src/list.h"
#include "src/objects.h" #include "src/objects.h"
#include "src/utils.h" #include "src/utils.h"
...@@ -473,6 +474,8 @@ class MemoryChunk { ...@@ -473,6 +474,8 @@ class MemoryChunk {
kSweepingInProgress, kSweepingInProgress,
}; };
enum ArrayBufferTrackerAccessMode { kDontCreate, kCreateIfNotPresent };
// Every n write barrier invocations we go to runtime even though // Every n write barrier invocations we go to runtime even though
// we could have handled it in generated code. This lets us check // we could have handled it in generated code. This lets us check
// whether we have hit the limit and should do some more marking. // whether we have hit the limit and should do some more marking.
...@@ -528,7 +531,8 @@ class MemoryChunk { ...@@ -528,7 +531,8 @@ class MemoryChunk {
+ kPointerSize // AtomicValue next_chunk_ + kPointerSize // AtomicValue next_chunk_
+ kPointerSize // AtomicValue prev_chunk_ + kPointerSize // AtomicValue prev_chunk_
// FreeListCategory categories_[kNumberOfCategories] // FreeListCategory categories_[kNumberOfCategories]
+ FreeListCategory::kSize * kNumberOfCategories; + FreeListCategory::kSize * kNumberOfCategories +
kPointerSize; // LocalArrayBufferTracker tracker_
// We add some more space to the computed header size to amount for missing // We add some more space to the computed header size to amount for missing
// alignment requirements in our computation. // alignment requirements in our computation.
...@@ -647,6 +651,21 @@ class MemoryChunk { ...@@ -647,6 +651,21 @@ class MemoryChunk {
void AllocateTypedOldToOldSlots(); void AllocateTypedOldToOldSlots();
void ReleaseTypedOldToOldSlots(); void ReleaseTypedOldToOldSlots();
template <ArrayBufferTrackerAccessMode tracker_access>
inline LocalArrayBufferTracker* local_tracker() {
LocalArrayBufferTracker* tracker = local_tracker_.Value();
if (tracker == nullptr && tracker_access == kCreateIfNotPresent) {
tracker = new LocalArrayBufferTracker(heap_);
if (!local_tracker_.TrySetValue(nullptr, tracker)) {
tracker = local_tracker_.Value();
}
DCHECK_NOT_NULL(tracker);
}
return tracker;
}
void ReleaseLocalTracker();
Address area_start() { return area_start_; } Address area_start() { return area_start_; }
Address area_end() { return area_end_; } Address area_end() { return area_end_; }
int area_size() { return static_cast<int>(area_end() - area_start()); } int area_size() { return static_cast<int>(area_end() - area_start()); }
...@@ -832,6 +851,8 @@ class MemoryChunk { ...@@ -832,6 +851,8 @@ class MemoryChunk {
FreeListCategory categories_[kNumberOfCategories]; FreeListCategory categories_[kNumberOfCategories];
base::AtomicValue<LocalArrayBufferTracker*> local_tracker_;
private: private:
void InitializeReservedMemory() { reservation_.Reset(); } void InitializeReservedMemory() { reservation_.Reset(); }
......
...@@ -104,6 +104,7 @@ ...@@ -104,6 +104,7 @@
'heap/heap-utils.cc', 'heap/heap-utils.cc',
'heap/heap-utils.h', 'heap/heap-utils.h',
'heap/test-alloc.cc', 'heap/test-alloc.cc',
'heap/test-array-buffer-tracker.cc',
'heap/test-compaction.cc', 'heap/test-compaction.cc',
'heap/test-heap.cc', 'heap/test-heap.cc',
'heap/test-incremental-marking.cc', 'heap/test-incremental-marking.cc',
......
...@@ -141,6 +141,21 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) { ...@@ -141,6 +141,21 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) {
space->ClearStats(); space->ClearStats();
} }
void AbandonCurrentlyFreeMemory(PagedSpace* space) {
space->EmptyAllocationInfo();
PageIterator pit(space);
while (pit.has_next()) {
pit.next()->MarkNeverAllocateForTesting();
}
}
void GcAndSweep(Heap* heap, AllocationSpace space) {
heap->CollectGarbage(space);
if (heap->mark_compact_collector()->sweeping_in_progress()) {
heap->mark_compact_collector()->EnsureSweepingCompleted();
}
}
} // namespace heap } // namespace heap
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -40,6 +40,10 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion = true); ...@@ -40,6 +40,10 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion = true);
// Helper function that simulates a full old-space in the heap. // Helper function that simulates a full old-space in the heap.
void SimulateFullSpace(v8::internal::PagedSpace* space); void SimulateFullSpace(v8::internal::PagedSpace* space);
void AbandonCurrentlyFreeMemory(PagedSpace* space);
void GcAndSweep(Heap* heap, AllocationSpace space);
} // namespace heap } // namespace heap
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment