Commit fbf1bc66 authored by mlippautz's avatar mlippautz Committed by Commit bot

Revert of [heap] Improve size profiling for ArrayBuffer tracking (patchset #6...

Revert of [heap] Improve size profiling for ArrayBuffer tracking (patchset #6 id:140001 of https://codereview.chromium.org/2210263002/ )

Reason for revert:
Tanks octane

Original issue's description:
> [heap] Improve size profiling for ArrayBuffer tracking
>
> Eagerly account for retained sizes during ArrayBuffer tracking. Following up on this,
> we can now do Scavenges if the amount of memory retained from new space is too large.
>
> BUG=chromium:621829
> R=jochen@chromium.org,hpayer@chromium.org
>
> Committed: https://crrev.com/28e13bd6a75c9467dae43043e7b741a1387d5252
> Cr-Commit-Position: refs/heads/master@{#38731}

TBR=jochen@chromium.org,hpayer@chromium.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=chromium:621829

Review-Url: https://codereview.chromium.org/2261513003
Cr-Commit-Position: refs/heads/master@{#38739}
parent ed08838e
...@@ -7605,7 +7605,7 @@ class Internals { ...@@ -7605,7 +7605,7 @@ class Internals {
kExternalMemoryOffset + kApiInt64Size; kExternalMemoryOffset + kApiInt64Size;
static const int kIsolateRootsOffset = kExternalMemoryLimitOffset + static const int kIsolateRootsOffset = kExternalMemoryLimitOffset +
kApiInt64Size + kApiInt64Size + kApiInt64Size + kApiInt64Size +
kApiPointerSize; kApiPointerSize + kApiPointerSize;
static const int kUndefinedValueRootIndex = 4; static const int kUndefinedValueRootIndex = 4;
static const int kTheHoleValueRootIndex = 5; static const int kTheHoleValueRootIndex = 5;
static const int kNullValueRootIndex = 6; static const int kNullValueRootIndex = 6;
......
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
void ArrayBufferTracker::RegisterNew(JSArrayBuffer* buffer) { void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) {
void* data = buffer->backing_store(); void* data = buffer->backing_store();
if (!data) return; if (!data) return;
...@@ -26,18 +26,13 @@ void ArrayBufferTracker::RegisterNew(JSArrayBuffer* buffer) { ...@@ -26,18 +26,13 @@ void ArrayBufferTracker::RegisterNew(JSArrayBuffer* buffer) {
DCHECK_NOT_NULL(tracker); DCHECK_NOT_NULL(tracker);
tracker->Add(buffer, length); tracker->Add(buffer, length);
} }
if (page->InNewSpace()) {
retained_from_new_space_.Increment(length);
} else {
retained_from_old_space_.Increment(length);
}
// We may go over the limit of externally allocated memory here. We call the // We may go over the limit of externally allocated memory here. We call the
// api function to trigger a GC in this case. // api function to trigger a GC in this case.
reinterpret_cast<v8::Isolate*>(heap_->isolate()) reinterpret_cast<v8::Isolate*>(heap->isolate())
->AdjustAmountOfExternalAllocatedMemory(length); ->AdjustAmountOfExternalAllocatedMemory(length);
} }
void ArrayBufferTracker::Unregister(JSArrayBuffer* buffer) { void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
void* data = buffer->backing_store(); void* data = buffer->backing_store();
if (!data) return; if (!data) return;
...@@ -49,12 +44,7 @@ void ArrayBufferTracker::Unregister(JSArrayBuffer* buffer) { ...@@ -49,12 +44,7 @@ void ArrayBufferTracker::Unregister(JSArrayBuffer* buffer) {
DCHECK_NOT_NULL(tracker); DCHECK_NOT_NULL(tracker);
length = tracker->Remove(buffer); length = tracker->Remove(buffer);
} }
if (page->InNewSpace()) { heap->update_external_memory(-static_cast<intptr_t>(length));
retained_from_new_space_.Decrement(length);
} else {
retained_from_old_space_.Decrement(length);
}
heap_->update_external_memory(-static_cast<int64_t>(length));
} }
void LocalArrayBufferTracker::Add(Key key, const Value& value) { void LocalArrayBufferTracker::Add(Key key, const Value& value) {
......
...@@ -14,7 +14,7 @@ LocalArrayBufferTracker::~LocalArrayBufferTracker() { ...@@ -14,7 +14,7 @@ LocalArrayBufferTracker::~LocalArrayBufferTracker() {
} }
template <LocalArrayBufferTracker::FreeMode free_mode> template <LocalArrayBufferTracker::FreeMode free_mode>
LocalArrayBufferTracker::ProcessResult LocalArrayBufferTracker::Free() { void LocalArrayBufferTracker::Free() {
size_t freed_memory = 0; size_t freed_memory = 0;
for (TrackingData::iterator it = array_buffers_.begin(); for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) { it != array_buffers_.end();) {
...@@ -30,71 +30,60 @@ LocalArrayBufferTracker::ProcessResult LocalArrayBufferTracker::Free() { ...@@ -30,71 +30,60 @@ LocalArrayBufferTracker::ProcessResult LocalArrayBufferTracker::Free() {
++it; ++it;
} }
} }
return ProcessResult(freed_memory, 0); if (freed_memory > 0) {
heap_->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory));
}
} }
template <typename Callback> template <typename Callback>
LocalArrayBufferTracker::ProcessResult LocalArrayBufferTracker::Process( void LocalArrayBufferTracker::Process(Callback callback) {
Callback callback) {
JSArrayBuffer* new_buffer = nullptr; JSArrayBuffer* new_buffer = nullptr;
size_t freed_memory = 0; size_t freed_memory = 0;
size_t promoted_memory = 0;
size_t len = 0;
Page* target_page = nullptr;
LocalArrayBufferTracker* tracker = nullptr;
for (TrackingData::iterator it = array_buffers_.begin(); for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) { it != array_buffers_.end();) {
switch (callback(it->first, &new_buffer)) { const CallbackResult result = callback(it->first, &new_buffer);
case kKeepEntry: if (result == kKeepEntry) {
++it; ++it;
break; } else if (result == kUpdateEntry) {
case kUpdateEntry: DCHECK_NOT_NULL(new_buffer);
DCHECK_NOT_NULL(new_buffer); Page* target_page = Page::FromAddress(new_buffer->address());
target_page = Page::FromAddress(new_buffer->address()); // We need to lock the target page because we cannot guarantee
// We need to lock the target page because we cannot guarantee // exclusive access to new space pages.
// exclusive access to new space pages. if (target_page->InNewSpace()) target_page->mutex()->Lock();
if (target_page->InNewSpace()) target_page->mutex()->Lock(); LocalArrayBufferTracker* tracker = target_page->local_tracker();
if (tracker == nullptr) {
target_page->AllocateLocalTracker();
tracker = target_page->local_tracker(); tracker = target_page->local_tracker();
if (tracker == nullptr) { }
target_page->AllocateLocalTracker(); DCHECK_NOT_NULL(tracker);
tracker = target_page->local_tracker(); tracker->Add(new_buffer, it->second);
} if (target_page->InNewSpace()) target_page->mutex()->Unlock();
DCHECK_NOT_NULL(tracker); it = array_buffers_.erase(it);
len = it->second; } else if (result == kRemoveEntry) {
tracker->Add(new_buffer, len); const size_t len = it->second;
if (target_page->InNewSpace()) { heap_->isolate()->array_buffer_allocator()->Free(
target_page->mutex()->Unlock(); it->first->backing_store(), len);
} else { freed_memory += len;
promoted_memory += len; it = array_buffers_.erase(it);
} } else {
it = array_buffers_.erase(it); UNREACHABLE();
break;
case kRemoveEntry:
len = it->second;
heap_->isolate()->array_buffer_allocator()->Free(
it->first->backing_store(), len);
freed_memory += len;
it = array_buffers_.erase(it);
break;
} }
} }
return ProcessResult(freed_memory, promoted_memory); if (freed_memory > 0) {
} heap_->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory));
void ArrayBufferTracker::AccountForConcurrentlyFreedMemory() { }
heap_->update_external_memory(
static_cast<int64_t>(concurrently_freed_.Value()));
concurrently_freed_.SetValue(0);
} }
void ArrayBufferTracker::FreeDeadInNewSpace() { void ArrayBufferTracker::FreeDeadInNewSpace(Heap* heap) {
DCHECK_EQ(heap_->gc_state(), Heap::HeapState::SCAVENGE); DCHECK_EQ(heap->gc_state(), Heap::HeapState::SCAVENGE);
for (Page* page : NewSpacePageRange(heap_->new_space()->FromSpaceStart(), for (Page* page : NewSpacePageRange(heap->new_space()->FromSpaceStart(),
heap_->new_space()->FromSpaceEnd())) { heap->new_space()->FromSpaceEnd())) {
bool empty = ProcessBuffers(page, kUpdateForwardedRemoveOthers); bool empty = ProcessBuffers(page, kUpdateForwardedRemoveOthers);
CHECK(empty); CHECK(empty);
} }
AccountForConcurrentlyFreedMemory(); heap->account_external_memory_concurrently_freed();
} }
void ArrayBufferTracker::FreeDead(Page* page) { void ArrayBufferTracker::FreeDead(Page* page) {
...@@ -102,13 +91,7 @@ void ArrayBufferTracker::FreeDead(Page* page) { ...@@ -102,13 +91,7 @@ void ArrayBufferTracker::FreeDead(Page* page) {
LocalArrayBufferTracker* tracker = page->local_tracker(); LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return; if (tracker == nullptr) return;
DCHECK(!page->SweepingDone()); DCHECK(!page->SweepingDone());
LocalArrayBufferTracker::ProcessResult result = tracker->Free<LocalArrayBufferTracker::kFreeDead>();
tracker->Free<LocalArrayBufferTracker::kFreeDead>();
if (page->InNewSpace()) {
retained_from_new_space_.Decrement(result.freed);
} else {
retained_from_old_space_.Decrement(result.freed);
}
if (tracker->IsEmpty()) { if (tracker->IsEmpty()) {
page->ReleaseLocalTracker(); page->ReleaseLocalTracker();
} }
...@@ -117,14 +100,7 @@ void ArrayBufferTracker::FreeDead(Page* page) { ...@@ -117,14 +100,7 @@ void ArrayBufferTracker::FreeDead(Page* page) {
void ArrayBufferTracker::FreeAll(Page* page) { void ArrayBufferTracker::FreeAll(Page* page) {
LocalArrayBufferTracker* tracker = page->local_tracker(); LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return; if (tracker == nullptr) return;
LocalArrayBufferTracker::ProcessResult result = tracker->Free<LocalArrayBufferTracker::kFreeAll>();
tracker->Free<LocalArrayBufferTracker::kFreeAll>();
concurrently_freed_.Increment(result.freed);
if (page->InNewSpace()) {
retained_from_new_space_.Decrement(result.freed);
} else {
retained_from_old_space_.Decrement(result.freed);
}
if (tracker->IsEmpty()) { if (tracker->IsEmpty()) {
page->ReleaseLocalTracker(); page->ReleaseLocalTracker();
} }
...@@ -135,7 +111,7 @@ bool ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) { ...@@ -135,7 +111,7 @@ bool ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) {
if (tracker == nullptr) return true; if (tracker == nullptr) return true;
DCHECK(page->SweepingDone()); DCHECK(page->SweepingDone());
LocalArrayBufferTracker::ProcessResult result = tracker->Process( tracker->Process(
[mode](JSArrayBuffer* old_buffer, JSArrayBuffer** new_buffer) { [mode](JSArrayBuffer* old_buffer, JSArrayBuffer** new_buffer) {
MapWord map_word = old_buffer->map_word(); MapWord map_word = old_buffer->map_word();
if (map_word.IsForwardingAddress()) { if (map_word.IsForwardingAddress()) {
...@@ -146,13 +122,6 @@ bool ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) { ...@@ -146,13 +122,6 @@ bool ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) {
? LocalArrayBufferTracker::kKeepEntry ? LocalArrayBufferTracker::kKeepEntry
: LocalArrayBufferTracker::kRemoveEntry; : LocalArrayBufferTracker::kRemoveEntry;
}); });
concurrently_freed_.Increment(result.freed);
if (page->InNewSpace()) {
retained_from_new_space_.Decrement(result.freed + result.promoted);
} else {
retained_from_old_space_.Decrement(result.freed);
}
retained_from_old_space_.Increment(result.promoted);
return tracker->IsEmpty(); return tracker->IsEmpty();
} }
......
...@@ -8,7 +8,6 @@ ...@@ -8,7 +8,6 @@
#include <unordered_map> #include <unordered_map>
#include "src/allocation.h" #include "src/allocation.h"
#include "src/base/atomic-utils.h"
#include "src/base/platform/mutex.h" #include "src/base/platform/mutex.h"
#include "src/globals.h" #include "src/globals.h"
...@@ -19,61 +18,40 @@ class Heap; ...@@ -19,61 +18,40 @@ class Heap;
class JSArrayBuffer; class JSArrayBuffer;
class Page; class Page;
class ArrayBufferTracker { class ArrayBufferTracker : public AllStatic {
public: public:
enum ProcessingMode { enum ProcessingMode {
kUpdateForwardedRemoveOthers, kUpdateForwardedRemoveOthers,
kUpdateForwardedKeepOthers, kUpdateForwardedKeepOthers,
}; };
// Returns whether a buffer is currently tracked.
static bool IsTracked(JSArrayBuffer* buffer);
explicit ArrayBufferTracker(Heap* heap)
: heap_(heap),
concurrently_freed_(0),
retained_from_new_space_(0),
retained_from_old_space_(0) {}
// The following methods are used to track raw C++ pointers to externally // The following methods are used to track raw C++ pointers to externally
// allocated memory used as backing store in live array buffers. // allocated memory used as backing store in live array buffers.
// Register/unregister a new JSArrayBuffer |buffer| for tracking. Guards all // Register/unregister a new JSArrayBuffer |buffer| for tracking. Guards all
// access to the tracker by taking the page lock for the corresponding page. // access to the tracker by taking the page lock for the corresponding page.
inline void RegisterNew(JSArrayBuffer* buffer); inline static void RegisterNew(Heap* heap, JSArrayBuffer* buffer);
inline void Unregister(JSArrayBuffer* buffer); inline static void Unregister(Heap* heap, JSArrayBuffer* buffer);
// Frees all backing store pointers for dead JSArrayBuffers in new space. // Frees all backing store pointers for dead JSArrayBuffers in new space.
// Does not take any locks and can only be called during Scavenge. // Does not take any locks and can only be called during Scavenge.
void FreeDeadInNewSpace(); static void FreeDeadInNewSpace(Heap* heap);
// Frees all backing store pointers for dead JSArrayBuffer on a given page. // Frees all backing store pointers for dead JSArrayBuffer on a given page.
// Requires marking information to be present. Requires the page lock to be // Requires marking information to be present. Requires the page lock to be
// taken by the caller. // taken by the caller.
void FreeDead(Page* page); static void FreeDead(Page* page);
// Frees all remaining, live or dead, array buffers on a page. Only useful // Frees all remaining, live or dead, array buffers on a page. Only useful
// during tear down. // during tear down.
void FreeAll(Page* page); static void FreeAll(Page* page);
// Processes all array buffers on a given page. |mode| specifies the action // Processes all array buffers on a given page. |mode| specifies the action
// to perform on the buffers. Returns whether the tracker is empty or not. // to perform on the buffers. Returns whether the tracker is empty or not.
bool ProcessBuffers(Page* page, ProcessingMode mode); static bool ProcessBuffers(Page* page, ProcessingMode mode);
void AccountForConcurrentlyFreedMemory(); // Returns whether a buffer is currently tracked.
static bool IsTracked(JSArrayBuffer* buffer);
size_t retained_from_new_space() { return retained_from_new_space_.Value(); }
size_t retained_from_old_space() { return retained_from_old_space_.Value(); }
private:
Heap* heap_;
base::AtomicNumber<size_t> concurrently_freed_;
// Number of bytes retained from new space.
base::AtomicNumber<size_t> retained_from_new_space_;
// Number of bytes retained from old space.
base::AtomicNumber<size_t> retained_from_old_space_;
}; };
// LocalArrayBufferTracker tracks internalized array buffers. // LocalArrayBufferTracker tracks internalized array buffers.
...@@ -87,32 +65,23 @@ class LocalArrayBufferTracker { ...@@ -87,32 +65,23 @@ class LocalArrayBufferTracker {
enum CallbackResult { kKeepEntry, kUpdateEntry, kRemoveEntry }; enum CallbackResult { kKeepEntry, kUpdateEntry, kRemoveEntry };
enum FreeMode { kFreeDead, kFreeAll }; enum FreeMode { kFreeDead, kFreeAll };
struct ProcessResult {
ProcessResult(size_t freed, size_t promoted)
: freed(freed), promoted(promoted) {}
size_t freed;
size_t promoted;
};
explicit LocalArrayBufferTracker(Heap* heap) : heap_(heap) {} explicit LocalArrayBufferTracker(Heap* heap) : heap_(heap) {}
~LocalArrayBufferTracker(); ~LocalArrayBufferTracker();
inline void Add(Key key, const Value& value); inline void Add(Key key, const Value& value);
inline Value Remove(Key key); inline Value Remove(Key key);
// Frees up array buffers determined by |free_mode|. Returns statistics in // Frees up array buffers determined by |free_mode|.
// ProcessResult.
template <FreeMode free_mode> template <FreeMode free_mode>
ProcessResult Free(); void Free();
// Processes buffers one by one. The CallbackResult of the callback decides // Processes buffers one by one. The CallbackResult of the callback decides
// what action to take on the buffer. Returns statistics in ProcessResult. // what action to take on the buffer.
// //
// Callback should be of type: // Callback should be of type:
// CallbackResult fn(JSArrayBuffer* buffer, JSArrayBuffer** new_buffer); // CallbackResult fn(JSArrayBuffer* buffer, JSArrayBuffer** new_buffer);
template <typename Callback> template <typename Callback>
ProcessResult Process(Callback callback); void Process(Callback callback);
bool IsEmpty() { return array_buffers_.empty(); } bool IsEmpty() { return array_buffers_.empty(); }
......
...@@ -162,8 +162,7 @@ Heap::Heap() ...@@ -162,8 +162,7 @@ Heap::Heap()
deserialization_complete_(false), deserialization_complete_(false),
strong_roots_list_(NULL), strong_roots_list_(NULL),
heap_iterator_depth_(0), heap_iterator_depth_(0),
force_oom_(false), force_oom_(false) {
array_buffer_tracker_(nullptr) {
// Allow build-time customization of the max semispace size. Building // Allow build-time customization of the max semispace size. Building
// V8 with snapshots and a non-default max semispace size is much // V8 with snapshots and a non-default max semispace size is much
// easier if you can define it as part of the build environment. // easier if you can define it as part of the build environment.
...@@ -313,21 +312,6 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space, ...@@ -313,21 +312,6 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
return SCAVENGER; return SCAVENGER;
} }
size_t Heap::external_memory_retained_from_new_space() {
// This is just an approximation.
return array_buffer_tracker()->retained_from_new_space();
}
bool Heap::ShouldDoScavengeForReducingExternalMemory() {
size_t retained_new_space = external_memory_retained_from_new_space();
size_t retained_old_space = external_memory() - retained_new_space;
float new_space_ratio =
static_cast<float>(new_space_.SizeOfObjects()) / retained_new_space;
float old_space_ratio =
static_cast<float>(old_space_->SizeOfObjects()) / retained_old_space;
// TODO(mlippautz): Add some lower bound.
return new_space_ratio > old_space_ratio;
}
// TODO(1238405): Combine the infrastructure for --heap-stats and // TODO(1238405): Combine the infrastructure for --heap-stats and
// --log-gc to avoid the complicated preprocessor and flag testing. // --log-gc to avoid the complicated preprocessor and flag testing.
...@@ -1754,7 +1738,7 @@ void Heap::Scavenge() { ...@@ -1754,7 +1738,7 @@ void Heap::Scavenge() {
// Set age mark. // Set age mark.
new_space_.set_age_mark(new_space_.top()); new_space_.set_age_mark(new_space_.top());
array_buffer_tracker()->FreeDeadInNewSpace(); ArrayBufferTracker::FreeDeadInNewSpace(this);
// Update how much has survived scavenge. // Update how much has survived scavenge.
IncrementYoungSurvivorsCounter(static_cast<int>( IncrementYoungSurvivorsCounter(static_cast<int>(
...@@ -2040,12 +2024,12 @@ HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) { ...@@ -2040,12 +2024,12 @@ HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) {
void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) { void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) {
array_buffer_tracker()->RegisterNew(buffer); ArrayBufferTracker::RegisterNew(this, buffer);
} }
void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) { void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) {
array_buffer_tracker()->Unregister(buffer); ArrayBufferTracker::Unregister(this, buffer);
} }
...@@ -5400,8 +5384,6 @@ bool Heap::SetUp() { ...@@ -5400,8 +5384,6 @@ bool Heap::SetUp() {
*this, ScavengeJob::kBytesAllocatedBeforeNextIdleTask); *this, ScavengeJob::kBytesAllocatedBeforeNextIdleTask);
new_space()->AddAllocationObserver(idle_scavenge_observer_); new_space()->AddAllocationObserver(idle_scavenge_observer_);
array_buffer_tracker_ = new ArrayBufferTracker(this);
return true; return true;
} }
...@@ -5610,9 +5592,6 @@ void Heap::TearDown() { ...@@ -5610,9 +5592,6 @@ void Heap::TearDown() {
delete store_buffer_; delete store_buffer_;
store_buffer_ = nullptr; store_buffer_ = nullptr;
delete array_buffer_tracker_;
array_buffer_tracker_ = nullptr;
delete memory_allocator_; delete memory_allocator_;
memory_allocator_ = nullptr; memory_allocator_ = nullptr;
} }
......
...@@ -850,9 +850,14 @@ class Heap { ...@@ -850,9 +850,14 @@ class Heap {
int64_t external_memory() { return external_memory_; } int64_t external_memory() { return external_memory_; }
void update_external_memory(int64_t delta) { external_memory_ += delta; } void update_external_memory(int64_t delta) { external_memory_ += delta; }
size_t external_memory_retained_from_new_space(); void update_external_memory_concurrently_freed(intptr_t freed) {
external_memory_concurrently_freed_.Increment(freed);
}
bool ShouldDoScavengeForReducingExternalMemory(); void account_external_memory_concurrently_freed() {
external_memory_ -= external_memory_concurrently_freed_.Value();
external_memory_concurrently_freed_.SetValue(0);
}
void DeoptMarkedAllocationSites(); void DeoptMarkedAllocationSites();
...@@ -1407,8 +1412,6 @@ class Heap { ...@@ -1407,8 +1412,6 @@ class Heap {
void RegisterNewArrayBuffer(JSArrayBuffer* buffer); void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
void UnregisterArrayBuffer(JSArrayBuffer* buffer); void UnregisterArrayBuffer(JSArrayBuffer* buffer);
ArrayBufferTracker* array_buffer_tracker() { return array_buffer_tracker_; }
// =========================================================================== // ===========================================================================
// Allocation site tracking. ================================================= // Allocation site tracking. =================================================
// =========================================================================== // ===========================================================================
...@@ -2049,6 +2052,9 @@ class Heap { ...@@ -2049,6 +2052,9 @@ class Heap {
// Caches the amount of external memory registered at the last MC. // Caches the amount of external memory registered at the last MC.
int64_t external_memory_at_last_mark_compact_; int64_t external_memory_at_last_mark_compact_;
// The amount of memory that has been freed concurrently.
base::AtomicNumber<intptr_t> external_memory_concurrently_freed_;
// This can be calculated directly from a pointer to the heap; however, it is // This can be calculated directly from a pointer to the heap; however, it is
// more expedient to get at the isolate directly from within Heap methods. // more expedient to get at the isolate directly from within Heap methods.
Isolate* isolate_; Isolate* isolate_;
...@@ -2290,9 +2296,6 @@ class Heap { ...@@ -2290,9 +2296,6 @@ class Heap {
// Used for testing purposes. // Used for testing purposes.
bool force_oom_; bool force_oom_;
// Tracker for ArrayBuffers pointing to external memory.
ArrayBufferTracker* array_buffer_tracker_;
// Classes in "heap" can be friends. // Classes in "heap" can be friends.
friend class AlwaysAllocateScope; friend class AlwaysAllocateScope;
friend class GCCallbacksScope; friend class GCCallbacksScope;
......
...@@ -833,7 +833,7 @@ void MarkCompactCollector::Prepare() { ...@@ -833,7 +833,7 @@ void MarkCompactCollector::Prepare() {
space = spaces.next()) { space = spaces.next()) {
space->PrepareForMarkCompact(); space->PrepareForMarkCompact();
} }
heap()->array_buffer_tracker()->AccountForConcurrentlyFreedMemory(); heap()->account_external_memory_concurrently_freed();
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
if (!was_marked_incrementally_ && FLAG_verify_heap) { if (!was_marked_incrementally_ && FLAG_verify_heap) {
...@@ -3129,8 +3129,7 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) { ...@@ -3129,8 +3129,7 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
case kObjectsNewToOld: case kObjectsNewToOld:
success = collector_->VisitLiveObjects(page, &new_space_visitor_, success = collector_->VisitLiveObjects(page, &new_space_visitor_,
kClearMarkbits); kClearMarkbits);
ArrayBufferTracker::ProcessBuffers(
heap->array_buffer_tracker()->ProcessBuffers(
page, ArrayBufferTracker::kUpdateForwardedRemoveOthers); page, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
DCHECK(success); DCHECK(success);
break; break;
...@@ -3157,14 +3156,14 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) { ...@@ -3157,14 +3156,14 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
EvacuateRecordOnlyVisitor record_visitor(collector_->heap()); EvacuateRecordOnlyVisitor record_visitor(collector_->heap());
success = success =
collector_->VisitLiveObjects(page, &record_visitor, kKeepMarking); collector_->VisitLiveObjects(page, &record_visitor, kKeepMarking);
heap->array_buffer_tracker()->ProcessBuffers( ArrayBufferTracker::ProcessBuffers(
page, ArrayBufferTracker::kUpdateForwardedKeepOthers); page, ArrayBufferTracker::kUpdateForwardedKeepOthers);
DCHECK(success); DCHECK(success);
// We need to return failure here to indicate that we want this page // We need to return failure here to indicate that we want this page
// added to the sweeper. // added to the sweeper.
success = false; success = false;
} else { } else {
heap->array_buffer_tracker()->ProcessBuffers( ArrayBufferTracker::ProcessBuffers(
page, ArrayBufferTracker::kUpdateForwardedRemoveOthers); page, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
} }
break; break;
...@@ -3368,7 +3367,7 @@ int MarkCompactCollector::Sweeper::RawSweep( ...@@ -3368,7 +3367,7 @@ int MarkCompactCollector::Sweeper::RawSweep(
// Before we sweep objects on the page, we free dead array buffers which // Before we sweep objects on the page, we free dead array buffers which
// requires valid mark bits. // requires valid mark bits.
p->heap()->array_buffer_tracker()->FreeDead(p); ArrayBufferTracker::FreeDead(p);
// We also release the black area markers here. // We also release the black area markers here.
p->ReleaseBlackAreaEndMarkerMap(); p->ReleaseBlackAreaEndMarkerMap();
...@@ -3939,7 +3938,7 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) { ...@@ -3939,7 +3938,7 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
PrintIsolate(isolate(), "sweeping: released page: %p", PrintIsolate(isolate(), "sweeping: released page: %p",
static_cast<void*>(p)); static_cast<void*>(p));
} }
heap()->array_buffer_tracker()->FreeAll(p); ArrayBufferTracker::FreeAll(p);
space->ReleasePage(p); space->ReleasePage(p);
continue; continue;
} }
......
...@@ -1127,7 +1127,7 @@ bool PagedSpace::HasBeenSetUp() { return true; } ...@@ -1127,7 +1127,7 @@ bool PagedSpace::HasBeenSetUp() { return true; }
void PagedSpace::TearDown() { void PagedSpace::TearDown() {
for (auto it = begin(); it != end();) { for (auto it = begin(); it != end();) {
Page* page = *(it++); // Will be erased. Page* page = *(it++); // Will be erased.
heap()->array_buffer_tracker()->FreeAll(page); ArrayBufferTracker::FreeAll(page);
heap()->memory_allocator()->Free<MemoryAllocator::kFull>(page); heap()->memory_allocator()->Free<MemoryAllocator::kFull>(page);
} }
anchor_.set_next_page(&anchor_); anchor_.set_next_page(&anchor_);
...@@ -1627,8 +1627,6 @@ void NewSpace::UpdateInlineAllocationLimit(int size_in_bytes) { ...@@ -1627,8 +1627,6 @@ void NewSpace::UpdateInlineAllocationLimit(int size_in_bytes) {
bool NewSpace::AddFreshPage() { bool NewSpace::AddFreshPage() {
if (heap()->ShouldDoScavengeForReducingExternalMemory()) return false;
Address top = allocation_info_.top(); Address top = allocation_info_.top();
DCHECK(!Page::IsAtObjectStart(top)); DCHECK(!Page::IsAtObjectStart(top));
if (!to_space_.AdvancePage()) { if (!to_space_.AdvancePage()) {
...@@ -1821,7 +1819,7 @@ void SemiSpace::TearDown() { ...@@ -1821,7 +1819,7 @@ void SemiSpace::TearDown() {
// Properly uncommit memory to keep the allocator counters in sync. // Properly uncommit memory to keep the allocator counters in sync.
if (is_committed()) { if (is_committed()) {
for (Page* p : *this) { for (Page* p : *this) {
heap()->array_buffer_tracker()->FreeAll(p); ArrayBufferTracker::FreeAll(p);
} }
Uncommit(); Uncommit();
} }
......
...@@ -314,65 +314,5 @@ UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) { ...@@ -314,65 +314,5 @@ UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) {
} }
} }
TEST(ArrayBuffer_RetainedCounterPromotion) {
// The test checks that retained counters on ArrayBufferTracker are consistent
// with where the buffers are on the heap.
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
ArrayBufferTracker* tracker = heap->array_buffer_tracker();
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
CHECK(heap->InNewSpace(*buf));
CHECK_EQ(tracker->retained_from_new_space(), 100u);
CHECK_EQ(tracker->retained_from_old_space(), 0u);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(heap->InNewSpace(*buf));
CHECK_EQ(tracker->retained_from_new_space(), 100u);
CHECK_EQ(tracker->retained_from_old_space(), 0u);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(heap->InOldSpace(*buf));
CHECK_EQ(tracker->retained_from_new_space(), 0u);
CHECK_EQ(tracker->retained_from_old_space(), 100u);
}
}
TEST(ArrayBuffer_RetainedCounterNewSpace) {
// The test checks that retained counters on ArrayBufferTracker are consistent
// with where the buffers are on the heap.
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
ArrayBufferTracker* tracker = heap->array_buffer_tracker();
{
v8::HandleScope handle_scope(isolate);
{
v8::HandleScope tmp_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
CHECK(heap->InNewSpace(*buf));
}
{
v8::HandleScope tmp_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 50);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
CHECK(heap->InNewSpace(*buf));
CHECK_EQ(tracker->retained_from_new_space(), 150u);
CHECK_EQ(tracker->retained_from_old_space(), 0u);
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(heap->InNewSpace(*buf));
CHECK_EQ(tracker->retained_from_new_space(), 50u);
CHECK_EQ(tracker->retained_from_old_space(), 0u);
}
heap::GcAndSweep(heap, NEW_SPACE);
CHECK_EQ(tracker->retained_from_new_space(), 0u);
CHECK_EQ(tracker->retained_from_old_space(), 0u);
}
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment