Commit 93fe04af authored by mlippautz's avatar mlippautz Committed by Commit bot

Revert of "[heap] Fine-grained JSArrayBuffer tracking" (patchset #6 id:220001...

Revert of "[heap] Fine-grained JSArrayBuffer tracking" (patchset #6 id:220001 of https://codereview.chromium.org/2005723005/ )

Reason for revert:
Breaking TSAN
  https://uberchromegw.corp.google.com/i/client.v8/builders/V8%20Linux64%20TSAN/builds/9741/steps/Ignition%20-%20turbofan%20%28flakes%29/logs/FixedUint32Array

Original issue's description:
> Reland of "[heap] Fine-grained JSArrayBuffer tracking"
>
> Track based on JSArrayBuffer addresses on pages instead of the attached
> backing store.
>
> Details of tracking:
> - Scavenge: New space pages are processes in bulk on the main thread
> - MC: Unswept pages are processed in bulk in parallel. All other pages
>   are processed by the sweeper concurrently.
>
> This reverts commit d2dff032.
>
> R=hpayer@chromium.org
> BUG=chromium:611688
> LOG=N
> CQ_EXTRA_TRYBOTS=tryserver.v8:v8_linux_arm64_gc_stress_dbg,v8_linux_gc_stress_dbg,v8_mac_gc_stress_dbg,v8_linux64_tsan_rel,v8_mac64_asan_rel
>
> Committed: https://crrev.com/b5704bc1e6ba74c9ca975f5d58f5b2dc60261457
> Cr-Commit-Position: refs/heads/master@{#36495}

TBR=hpayer@chromium.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=chromium:611688

Review-Url: https://codereview.chromium.org/2011863002
Cr-Commit-Position: refs/heads/master@{#36504}
parent f768e53c
......@@ -1159,7 +1159,6 @@ v8_source_set("v8_base") {
"src/handles.h",
"src/hashmap.h",
"src/heap-symbols.h",
"src/heap/array-buffer-tracker-inl.h",
"src/heap/array-buffer-tracker.cc",
"src/heap/array-buffer-tracker.h",
"src/heap/gc-idle-time-handler.cc",
......
......@@ -7392,7 +7392,7 @@ class Internals {
kAmountOfExternalAllocatedMemoryOffset + kApiInt64Size;
static const int kIsolateRootsOffset =
kAmountOfExternalAllocatedMemoryAtLastGlobalGCOffset + kApiInt64Size +
kApiPointerSize + kApiPointerSize;
kApiPointerSize;
static const int kUndefinedValueRootIndex = 4;
static const int kTheHoleValueRootIndex = 5;
static const int kNullValueRootIndex = 6;
......
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/heap.h"
#include "src/heap/mark-compact.h"
#include "src/isolate.h"
namespace v8 {
namespace internal {
template <typename Callback>
void LocalArrayBufferTracker::Process(Callback callback) {
JSArrayBuffer* new_buffer = nullptr;
size_t freed_memory = 0;
for (TrackingMap::iterator it = live_.begin(); it != live_.end();) {
switch (callback(it->first, &new_buffer)) {
case kKeepEntry:
it++;
break;
case kKeepAndUpdateEntry:
DCHECK_NOT_NULL(new_buffer);
Page::FromAddress(new_buffer->address())
->local_tracker<Page::kCreateIfNotPresent>()
->AddLive(new_buffer, it->second);
live_.erase(it++);
break;
case kRemoveEntry:
heap_->isolate()->array_buffer_allocator()->Free(it->second.first,
it->second.second);
freed_memory += it->second.second;
live_.erase(it++);
break;
default:
UNREACHABLE();
}
}
if (freed_memory > 0) {
heap_->update_amount_of_external_allocated_freed_memory(
static_cast<intptr_t>(freed_memory));
}
not_yet_discovered_.clear();
started_ = false;
}
template <LocalArrayBufferTracker::LivenessIndicator liveness_indicator>
void LocalArrayBufferTracker::ScanAndFreeDead() {
switch (liveness_indicator) {
case kForwardingPointer:
Process([](JSArrayBuffer* old_buffer, JSArrayBuffer** new_buffer) {
MapWord map_word = old_buffer->map_word();
if (map_word.IsForwardingAddress()) {
*new_buffer = JSArrayBuffer::cast(map_word.ToForwardingAddress());
return LocalArrayBufferTracker::kKeepAndUpdateEntry;
}
return LocalArrayBufferTracker::kRemoveEntry;
});
break;
case kMarkBit:
Process([](JSArrayBuffer* old_buffer, JSArrayBuffer**) {
if (Marking::IsBlackOrGrey(Marking::MarkBitFrom(old_buffer))) {
return LocalArrayBufferTracker::kKeepEntry;
}
return LocalArrayBufferTracker::kRemoveEntry;
});
break;
default:
UNREACHABLE();
}
}
} // namespace internal
} // namespace v8
......@@ -3,104 +3,53 @@
// found in the LICENSE file.
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/heap.h"
#include "src/isolate.h"
#include "src/objects-inl.h"
#include "src/objects.h"
#include "src/objects-inl.h"
#include "src/v8.h"
namespace v8 {
namespace internal {
LocalArrayBufferTracker::~LocalArrayBufferTracker() {
ArrayBufferTracker::~ArrayBufferTracker() {
Isolate* isolate = heap()->isolate();
size_t freed_memory = 0;
for (auto& buffer : live_) {
heap_->isolate()->array_buffer_allocator()->Free(buffer.second.first,
buffer.second.second);
freed_memory += buffer.second.second;
for (auto& buffer : live_array_buffers_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
}
if (freed_memory > 0) {
heap_->update_amount_of_external_allocated_freed_memory(
static_cast<intptr_t>(freed_memory));
for (auto& buffer : live_array_buffers_for_scavenge_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
}
live_.clear();
not_yet_discovered_.clear();
}
void LocalArrayBufferTracker::Add(Key key, const Value& value) {
live_[key] = value;
not_yet_discovered_[key] = value;
}
void LocalArrayBufferTracker::AddLive(Key key, const Value& value) {
DCHECK_EQ(not_yet_discovered_.count(key), 0);
live_[key] = value;
}
void LocalArrayBufferTracker::MarkLive(Key key) {
DCHECK_EQ(live_.count(key), 1);
not_yet_discovered_.erase(key);
}
LocalArrayBufferTracker::Value LocalArrayBufferTracker::Remove(Key key) {
DCHECK_EQ(live_.count(key), 1);
Value value = live_[key];
live_.erase(key);
not_yet_discovered_.erase(key);
return value;
}
live_array_buffers_.clear();
live_array_buffers_for_scavenge_.clear();
not_yet_discovered_array_buffers_.clear();
not_yet_discovered_array_buffers_for_scavenge_.clear();
void LocalArrayBufferTracker::FreeDead() {
size_t freed_memory = 0;
for (TrackingMap::iterator it = not_yet_discovered_.begin();
it != not_yet_discovered_.end();) {
heap_->isolate()->array_buffer_allocator()->Free(it->second.first,
it->second.second);
freed_memory += it->second.second;
live_.erase(it->first);
not_yet_discovered_.erase(it++);
}
if (freed_memory > 0) {
heap_->update_amount_of_external_allocated_freed_memory(
static_cast<intptr_t>(freed_memory));
}
started_ = false;
}
void LocalArrayBufferTracker::Reset() {
if (!started_) {
not_yet_discovered_ = live_;
started_ = true;
heap()->update_amount_of_external_allocated_memory(
-static_cast<int64_t>(freed_memory));
}
}
bool LocalArrayBufferTracker::IsEmpty() {
return live_.empty() && not_yet_discovered_.empty();
}
ArrayBufferTracker::~ArrayBufferTracker() {}
void ArrayBufferTracker::RegisterNew(JSArrayBuffer* buffer) {
void* data = buffer->backing_store();
if (!data) return;
size_t length = NumberToSize(heap_->isolate(), buffer->byte_length());
Page* page = Page::FromAddress(buffer->address());
LocalArrayBufferTracker* tracker =
page->local_tracker<Page::kCreateIfNotPresent>();
DCHECK_NOT_NULL(tracker);
{
base::LockGuard<base::Mutex> guard(tracker->mutex());
if (Marking::IsBlack(Marking::MarkBitFrom(buffer))) {
tracker->AddLive(buffer, std::make_pair(data, length));
bool in_new_space = heap()->InNewSpace(buffer);
size_t length = NumberToSize(heap()->isolate(), buffer->byte_length());
if (in_new_space) {
live_array_buffers_for_scavenge_[data] = length;
} else {
tracker->Add(buffer, std::make_pair(data, length));
}
live_array_buffers_[data] = length;
}
// We may go over the limit of externally allocated memory here. We call the
// api function to trigger a GC in this case.
reinterpret_cast<v8::Isolate*>(heap_->isolate())
reinterpret_cast<v8::Isolate*>(heap()->isolate())
->AdjustAmountOfExternalAllocatedMemory(length);
}
......@@ -109,79 +58,84 @@ void ArrayBufferTracker::Unregister(JSArrayBuffer* buffer) {
void* data = buffer->backing_store();
if (!data) return;
LocalArrayBufferTracker* tracker =
Page::FromAddress(buffer->address())->local_tracker<Page::kDontCreate>();
DCHECK_NOT_NULL(tracker);
size_t length = 0;
{
base::LockGuard<base::Mutex> guard(tracker->mutex());
length = tracker->Remove(buffer).second;
}
heap_->update_amount_of_external_allocated_memory(
-static_cast<intptr_t>(length));
}
bool in_new_space = heap()->InNewSpace(buffer);
std::map<void*, size_t>* live_buffers =
in_new_space ? &live_array_buffers_for_scavenge_ : &live_array_buffers_;
std::map<void*, size_t>* not_yet_discovered_buffers =
in_new_space ? &not_yet_discovered_array_buffers_for_scavenge_
: &not_yet_discovered_array_buffers_;
void ArrayBufferTracker::FreeDeadInNewSpace() {
NewSpacePageIterator from_it(heap_->new_space()->FromSpaceStart(),
heap_->new_space()->FromSpaceEnd());
while (from_it.has_next()) {
ScanAndFreeDeadArrayBuffers<LocalArrayBufferTracker::kForwardingPointer>(
from_it.next());
}
heap_->account_amount_of_external_allocated_freed_memory();
}
DCHECK(live_buffers->count(data) > 0);
void ArrayBufferTracker::ResetTrackersInOldSpace() {
heap_->old_space()->ForAllPages([](Page* p) {
LocalArrayBufferTracker* tracker = p->local_tracker<Page::kDontCreate>();
if (tracker != nullptr) {
tracker->Reset();
if (tracker->IsEmpty()) {
p->ReleaseLocalTracker();
}
}
});
size_t length = (*live_buffers)[data];
live_buffers->erase(data);
not_yet_discovered_buffers->erase(data);
heap()->update_amount_of_external_allocated_memory(
-static_cast<int64_t>(length));
}
void ArrayBufferTracker::MarkLive(JSArrayBuffer* buffer) {
if (buffer->is_external()) return;
base::LockGuard<base::Mutex> guard(&mutex_);
void* data = buffer->backing_store();
if (data == nullptr) return;
if (data == heap_->undefined_value()) return;
Page* page = Page::FromAddress(buffer->address());
LocalArrayBufferTracker* tracker =
page->local_tracker<Page::kCreateIfNotPresent>();
DCHECK_NOT_NULL(tracker);
if (tracker->IsTracked(buffer)) {
base::LockGuard<base::Mutex> guard(page->mutex());
tracker->MarkLive((buffer));
// ArrayBuffer might be in the middle of being constructed.
if (data == heap()->undefined_value()) return;
if (heap()->InNewSpace(buffer)) {
not_yet_discovered_array_buffers_for_scavenge_.erase(data);
} else {
RegisterNew(buffer);
not_yet_discovered_array_buffers_.erase(data);
}
}
void ArrayBufferTracker::FreeDead(Page* page) {
LocalArrayBufferTracker* tracker = page->local_tracker<Page::kDontCreate>();
if (tracker != nullptr) {
base::LockGuard<base::Mutex> guard(tracker->mutex());
tracker->FreeDead();
void ArrayBufferTracker::FreeDead(bool from_scavenge) {
size_t freed_memory = 0;
Isolate* isolate = heap()->isolate();
for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
live_array_buffers_for_scavenge_.erase(buffer.first);
}
}
template <LocalArrayBufferTracker::LivenessIndicator liveness_indicator>
void ArrayBufferTracker::ScanAndFreeDeadArrayBuffers(Page* page) {
LocalArrayBufferTracker* tracker = page->local_tracker<Page::kDontCreate>();
if (tracker != nullptr) {
base::LockGuard<base::Mutex> guard(tracker->mutex());
tracker->ScanAndFreeDead<liveness_indicator>();
if (!from_scavenge) {
for (auto& buffer : not_yet_discovered_array_buffers_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
live_array_buffers_.erase(buffer.first);
}
}
not_yet_discovered_array_buffers_for_scavenge_ =
live_array_buffers_for_scavenge_;
if (!from_scavenge) not_yet_discovered_array_buffers_ = live_array_buffers_;
// Do not call through the api as this code is triggered while doing a GC.
heap()->update_amount_of_external_allocated_memory(
-static_cast<int64_t>(freed_memory));
}
void ArrayBufferTracker::PrepareDiscoveryInNewSpace() {
not_yet_discovered_array_buffers_for_scavenge_ =
live_array_buffers_for_scavenge_;
}
template void ArrayBufferTracker::ScanAndFreeDeadArrayBuffers<
LocalArrayBufferTracker::LivenessIndicator::kForwardingPointer>(Page* page);
template void ArrayBufferTracker::ScanAndFreeDeadArrayBuffers<
LocalArrayBufferTracker::LivenessIndicator::kMarkBit>(Page* page);
void ArrayBufferTracker::Promote(JSArrayBuffer* buffer) {
base::LockGuard<base::Mutex> guard(&mutex_);
if (buffer->is_external()) return;
void* data = buffer->backing_store();
if (!data) return;
// ArrayBuffer might be in the middle of being constructed.
if (data == heap()->undefined_value()) return;
DCHECK(live_array_buffers_for_scavenge_.count(data) > 0);
live_array_buffers_[data] = live_array_buffers_for_scavenge_[data];
live_array_buffers_for_scavenge_.erase(data);
not_yet_discovered_array_buffers_for_scavenge_.erase(data);
}
} // namespace internal
} // namespace v8
......@@ -15,115 +15,61 @@ namespace internal {
// Forward declarations.
class Heap;
class Page;
class JSArrayBuffer;
// LocalArrayBufferTracker is tracker for live and dead JSArrayBuffer objects.
//
// It consists of two sets, a live, and a not yet discovered set of buffers.
// Upon registration (in the ArrayBufferTracker) the buffers are added to both
// sets. When a buffer is encountered as live (or added is live) it is removed
// from the not yet discovered set. Finally, after each round (sometime during
// GC) the left over not yet discovered buffers are cleaned up. Upon starting
// a new round the not yet discovered buffers are initialized from the live set.
//
// Caveats:
// - Between cleaning up the buffers using |Free| we always need a |Reset| and
// thus another marking phase.
// - LocalArrayBufferTracker is completely unlocked. Calls need to ensure
// exclusive access.
class LocalArrayBufferTracker {
public:
typedef std::pair<void*, size_t> Value;
typedef JSArrayBuffer* Key;
enum LivenessIndicator { kForwardingPointer, kMarkBit };
enum CallbackResult { kKeepEntry, kKeepAndUpdateEntry, kRemoveEntry };
explicit LocalArrayBufferTracker(Heap* heap) : heap_(heap), started_(false) {}
~LocalArrayBufferTracker();
void Add(Key key, const Value& value);
void AddLive(Key key, const Value& value);
Value Remove(Key key);
void MarkLive(Key key);
bool IsEmpty();
// Resets the tracking set, i.e., not yet discovered buffers are initialized
// from the remaining live set of buffers.
void Reset();
// Frees up any dead backing stores of not yet discovered array buffers.
// Requires that the buffers have been properly marked using MarkLive.
void FreeDead();
// Scans the whole tracker and decides based on liveness_indicator whether
// a JSArrayBuffer is still considered live.
template <LivenessIndicator liveness_indicator>
inline void ScanAndFreeDead();
bool IsTracked(Key key) { return live_.find(key) != live_.end(); }
base::Mutex* mutex() { return &mutex_; }
private:
// TODO(mlippautz): Switch to unordered_map once it is supported on all
// platforms.
typedef std::map<Key, Value> TrackingMap;
// Processes buffers one by one. The CallbackResult decides whether the buffer
// will be dropped or not.
//
// Callback should be of type:
// CallbackResult fn(JSArrayBuffer*, JSArrayBuffer**);
template <typename Callback>
inline void Process(Callback callback);
Heap* heap_;
base::Mutex mutex_;
// |live_| maps tracked JSArrayBuffers to the internally allocated backing
// store and length. For each GC round |not_yet_discovered_| is initialized
// as a copy of |live_|. Upon finding a JSArrayBuffer during GC, the buffer
// is removed from |not_yet_discovered_|. At the end of a GC, we free up the
// remaining JSArrayBuffers in |not_yet_discovered_|.
TrackingMap live_;
TrackingMap not_yet_discovered_;
bool started_;
};
class ArrayBufferTracker {
public:
explicit ArrayBufferTracker(Heap* heap) : heap_(heap) {}
~ArrayBufferTracker();
inline Heap* heap() { return heap_; }
// The following methods are used to track raw C++ pointers to externally
// allocated memory used as backing store in live array buffers.
// Register/unregister a new JSArrayBuffer |buffer| for tracking.
// A new ArrayBuffer was created with |data| as backing store.
void RegisterNew(JSArrayBuffer* buffer);
void Unregister(JSArrayBuffer* buffer);
// Frees all backing store pointers for dead JSArrayBuffers in new space.
void FreeDeadInNewSpace();
// The backing store |data| is no longer owned by V8.
void Unregister(JSArrayBuffer* buffer);
void FreeDead(Page* page);
// A live ArrayBuffer was discovered during marking/scavenge.
void MarkLive(JSArrayBuffer* buffer);
template <LocalArrayBufferTracker::LivenessIndicator liveness_indicator>
void ScanAndFreeDeadArrayBuffers(Page* page);
// Frees all backing store pointers that weren't discovered in the previous
// marking or scavenge phase.
void FreeDead(bool from_scavenge);
// A live JSArrayBuffer was discovered during marking.
void MarkLive(JSArrayBuffer* buffer);
// Prepare for a new scavenge phase. A new marking phase is implicitly
// prepared by finishing the previous one.
void PrepareDiscoveryInNewSpace();
// Resets all trackers in old space. Is required to be called from the main
// thread.
void ResetTrackersInOldSpace();
// An ArrayBuffer moved from new space to old space.
void Promote(JSArrayBuffer* buffer);
private:
base::Mutex mutex_;
Heap* heap_;
};
// |live_array_buffers_| maps externally allocated memory used as backing
// store for ArrayBuffers to the length of the respective memory blocks.
//
// At the beginning of mark/compact, |not_yet_discovered_array_buffers_| is
// a copy of |live_array_buffers_| and we remove pointers as we discover live
// ArrayBuffer objects during marking. At the end of mark/compact, the
// remaining memory blocks can be freed.
std::map<void*, size_t> live_array_buffers_;
std::map<void*, size_t> not_yet_discovered_array_buffers_;
// To be able to free memory held by ArrayBuffers during scavenge as well, we
// have a separate list of allocated memory held by ArrayBuffers in new space.
//
// Since mark/compact also evacuates the new space, all pointers in the
// |live_array_buffers_for_scavenge_| list are also in the
// |live_array_buffers_| list.
std::map<void*, size_t> live_array_buffers_for_scavenge_;
std::map<void*, size_t> not_yet_discovered_array_buffers_for_scavenge_;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_ARRAY_BUFFER_TRACKER_H_
......@@ -1626,6 +1626,8 @@ void Heap::Scavenge() {
scavenge_collector_->SelectScavengingVisitorsTable();
array_buffer_tracker()->PrepareDiscoveryInNewSpace();
// Flip the semispaces. After flipping, to space is empty, from space has
// live objects.
new_space_.Flip();
......@@ -1745,7 +1747,7 @@ void Heap::Scavenge() {
// Set age mark.
new_space_.set_age_mark(new_space_.top());
array_buffer_tracker()->FreeDeadInNewSpace();
array_buffer_tracker()->FreeDead(true);
// Update how much has survived scavenge.
IncrementYoungSurvivorsCounter(static_cast<int>(
......
......@@ -818,16 +818,6 @@ class Heap {
amount_of_external_allocated_memory_ += delta;
}
void update_amount_of_external_allocated_freed_memory(intptr_t freed) {
amount_of_external_allocated_memory_freed_.Increment(freed);
}
void account_amount_of_external_allocated_freed_memory() {
amount_of_external_allocated_memory_ -=
amount_of_external_allocated_memory_freed_.Value();
amount_of_external_allocated_memory_freed_.SetValue(0);
}
void DeoptMarkedAllocationSites();
bool DeoptMaybeTenuredAllocationSites() {
......@@ -1995,8 +1985,6 @@ class Heap {
// Caches the amount of external memory registered at the last global gc.
int64_t amount_of_external_allocated_memory_at_last_global_gc_;
base::AtomicNumber<intptr_t> amount_of_external_allocated_memory_freed_;
// This can be calculated directly from a pointer to the heap; however, it is
// more expedient to get at the isolate directly from within Heap methods.
Isolate* isolate_;
......
......@@ -547,7 +547,6 @@ void IncrementalMarking::StartMarking() {
MarkCompactCollector::kMaxMarkingDequeSize);
ActivateIncrementalWriteBarrier();
heap_->array_buffer_tracker()->ResetTrackersInOldSpace();
// Marking bits are cleared by the sweeper.
#ifdef VERIFY_HEAP
......
......@@ -14,7 +14,7 @@
#include "src/frames-inl.h"
#include "src/gdb-jit.h"
#include "src/global-handles.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact-inl.h"
......@@ -872,10 +872,6 @@ void MarkCompactCollector::Prepare() {
space = spaces.next()) {
space->PrepareForMarkCompact();
}
if (!was_marked_incrementally_) {
heap_->array_buffer_tracker()->ResetTrackersInOldSpace();
}
heap()->account_amount_of_external_allocated_freed_memory();
#ifdef VERIFY_HEAP
if (!was_marked_incrementally_ && FLAG_verify_heap) {
......@@ -1731,12 +1727,20 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
if (heap_->ShouldBePromoted(object->address(), size) &&
TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object,
&target_object)) {
// If we end up needing more special cases, we should factor this out.
if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(target_object));
}
promoted_size_ += size;
return true;
}
HeapObject* target = nullptr;
AllocationSpace space = AllocateTargetObject(object, &target);
MigrateObject(HeapObject::cast(target), object, size, space);
if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
}
semispace_copied_size_ += size;
return true;
}
......@@ -1861,6 +1865,10 @@ class MarkCompactCollector::EvacuateNewSpacePageVisitor final
}
inline bool Visit(HeapObject* object) {
if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
object->GetHeap()->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(object));
}
RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
object->IterateBodyFast(&visitor);
promoted_size_ += object->Size();
......@@ -1901,9 +1909,6 @@ class MarkCompactCollector::EvacuateRecordOnlyVisitor final
inline bool Visit(HeapObject* object) {
RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
object->IterateBody(&visitor);
if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
}
return true;
}
......@@ -3119,35 +3124,24 @@ bool MarkCompactCollector::Evacuator::EvacuateSinglePage(Page* p,
bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
bool result = false;
DCHECK(page->SweepingDone());
Heap* heap = page->heap();
switch (ComputeEvacuationMode(page)) {
case kObjectsNewToOld:
result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_);
heap->array_buffer_tracker()
->ScanAndFreeDeadArrayBuffers<
LocalArrayBufferTracker::kForwardingPointer>(page);
DCHECK(result);
USE(result);
break;
case kPageNewToOld:
result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor);
// ArrayBufferTracker will be updated during sweeping.
DCHECK(result);
USE(result);
break;
case kObjectsOldToOld:
result = EvacuateSinglePage<kClearMarkbits>(page, &old_space_visitor_);
heap->array_buffer_tracker()
->ScanAndFreeDeadArrayBuffers<
LocalArrayBufferTracker::kForwardingPointer>(page);
if (!result) {
// Aborted compaction page. We can record slots here to have them
// processed in parallel later on.
EvacuateRecordOnlyVisitor record_visitor(collector_->heap());
result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor);
heap->array_buffer_tracker()
->ScanAndFreeDeadArrayBuffers<LocalArrayBufferTracker::kMarkBit>(
page);
DCHECK(result);
USE(result);
// We need to return failure here to indicate that we want this page
......@@ -3390,7 +3384,6 @@ int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p,
freed_bytes = space->UnaccountedFree(free_start, size);
max_freed_bytes = Max(freed_bytes, max_freed_bytes);
}
p->heap()->array_buffer_tracker()->FreeDead(p);
p->concurrent_sweeping_state().SetValue(Page::kSweepingDone);
return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes));
}
......@@ -3534,6 +3527,11 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
}
}
// EvacuateNewSpaceAndCandidates iterates over new space objects and for
// ArrayBuffers either re-registers them as live or promotes them. This is
// needed to properly free them.
heap()->array_buffer_tracker()->FreeDead(false);
// Deallocate evacuated candidate pages.
ReleaseEvacuationCandidates();
}
......
......@@ -105,6 +105,11 @@ int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
Map* map, HeapObject* object) {
typedef FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor, int>
JSArrayBufferBodyVisitor;
if (!JSArrayBuffer::cast(object)->is_external()) {
Heap* heap = map->GetHeap();
heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
}
return JSArrayBufferBodyVisitor::Visit(map, object);
}
......
......@@ -295,6 +295,14 @@ class ScavengingVisitor : public StaticVisitorBase {
PromotionMode promotion_mode) {
ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object,
promotion_mode);
Heap* heap = map->GetHeap();
MapWord map_word = object->map_word();
DCHECK(map_word.IsForwardingAddress());
HeapObject* target = map_word.ToForwardingAddress();
if (!heap->InNewSpace(target)) {
heap->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target));
}
}
static inline void EvacuateByteArray(Map* map, HeapObject** slot,
......
......@@ -270,6 +270,7 @@ template <Page::InitializationMode mode>
Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable,
PagedSpace* owner) {
Page* page = reinterpret_cast<Page*>(chunk);
page->mutex_ = new base::Mutex();
DCHECK(page->area_size() <= kAllocatableMemory);
DCHECK(chunk->owner() == owner);
......
......@@ -511,14 +511,13 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
chunk->progress_bar_ = 0;
chunk->high_water_mark_.SetValue(static_cast<intptr_t>(area_start - base));
chunk->concurrent_sweeping_state().SetValue(kSweepingDone);
chunk->mutex_ = new base::Mutex();
chunk->mutex_ = nullptr;
chunk->available_in_free_list_ = 0;
chunk->wasted_memory_ = 0;
chunk->ResetLiveBytes();
Bitmap::Clear(chunk);
chunk->set_next_chunk(nullptr);
chunk->set_prev_chunk(nullptr);
chunk->local_tracker_ = nullptr;
DCHECK(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset);
DCHECK(OFFSET_OF(MemoryChunk, live_byte_count_) == kLiveBytesOffset);
......@@ -1040,8 +1039,6 @@ void MemoryChunk::ReleaseAllocatedMemory() {
if (old_to_old_slots_ != nullptr) ReleaseOldToOldSlots();
if (typed_old_to_new_slots_ != nullptr) ReleaseTypedOldToNewSlots();
if (typed_old_to_old_slots_ != nullptr) ReleaseTypedOldToOldSlots();
if (local_tracker_ != nullptr) ReleaseLocalTracker();
}
static SlotSet* AllocateSlotSet(size_t size, Address page_start) {
......@@ -1093,12 +1090,6 @@ void MemoryChunk::ReleaseTypedOldToOldSlots() {
delete typed_old_to_old_slots_;
typed_old_to_old_slots_ = nullptr;
}
void MemoryChunk::ReleaseLocalTracker() {
delete local_tracker_;
local_tracker_ = nullptr;
}
// -----------------------------------------------------------------------------
// PagedSpace implementation
......
......@@ -14,7 +14,6 @@
#include "src/base/platform/mutex.h"
#include "src/flags.h"
#include "src/hashmap.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/list.h"
#include "src/objects.h"
#include "src/utils.h"
......@@ -469,8 +468,6 @@ class MemoryChunk {
kSweepingInProgress,
};
enum ArrayBufferTrackerAccessMode { kDontCreate, kCreateIfNotPresent };
// Every n write barrier invocations we go to runtime even though
// we could have handled it in generated code. This lets us check
// whether we have hit the limit and should do some more marking.
......@@ -526,8 +523,7 @@ class MemoryChunk {
+ kPointerSize // AtomicValue next_chunk_
+ kPointerSize // AtomicValue prev_chunk_
// FreeListCategory categories_[kNumberOfCategories]
+ FreeListCategory::kSize * kNumberOfCategories +
kPointerSize; // LocalArrayBufferTracker tracker_
+ FreeListCategory::kSize * kNumberOfCategories;
// We add some more space to the computed header size to amount for missing
// alignment requirements in our computation.
......@@ -646,16 +642,6 @@ class MemoryChunk {
void AllocateTypedOldToOldSlots();
void ReleaseTypedOldToOldSlots();
template <ArrayBufferTrackerAccessMode tracker_access>
inline LocalArrayBufferTracker* local_tracker() {
if (local_tracker_ == nullptr && tracker_access == kCreateIfNotPresent) {
local_tracker_ = new LocalArrayBufferTracker(heap_);
}
return local_tracker_;
}
void ReleaseLocalTracker();
Address area_start() { return area_start_; }
Address area_end() { return area_end_; }
int area_size() { return static_cast<int>(area_end() - area_start()); }
......@@ -838,8 +824,6 @@ class MemoryChunk {
FreeListCategory categories_[kNumberOfCategories];
LocalArrayBufferTracker* local_tracker_;
private:
void InitializeReservedMemory() { reservation_.Reset(); }
......@@ -2311,16 +2295,6 @@ class PagedSpace : public Space {
inline void UnlinkFreeListCategories(Page* page);
inline intptr_t RelinkFreeListCategories(Page* page);
// Callback signature:
// void Callback(Page*);
template <typename Callback>
void ForAllPages(Callback callback) {
PageIterator it(this);
while (it.has_next()) {
callback(it.next());
}
}
protected:
// PagedSpaces that should be included in snapshots have different, i.e.,
// smaller, initial pages.
......
......@@ -830,7 +830,6 @@
'handles.h',
'hashmap.h',
'heap-symbols.h',
'heap/array-buffer-tracker-inl.h',
'heap/array-buffer-tracker.cc',
'heap/array-buffer-tracker.h',
'heap/memory-reducer.cc',
......
......@@ -102,7 +102,6 @@
'heap/heap-utils.cc',
'heap/heap-utils.h',
'heap/test-alloc.cc',
'heap/test-array-buffer-tracker.cc',
'heap/test-compaction.cc',
'heap/test-heap.cc',
'heap/test-incremental-marking.cc',
......
......@@ -141,21 +141,6 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) {
space->ClearStats();
}
void AbandonCurrentlyFreeMemory(PagedSpace* space) {
space->EmptyAllocationInfo();
PageIterator pit(space);
while (pit.has_next()) {
pit.next()->MarkNeverAllocateForTesting();
}
}
void GcAndSweep(Heap* heap, AllocationSpace space) {
heap->CollectGarbage(space);
if (heap->mark_compact_collector()->sweeping_in_progress()) {
heap->mark_compact_collector()->EnsureSweepingCompleted();
}
}
} // namespace heap
} // namespace internal
} // namespace v8
......@@ -40,10 +40,6 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion = true);
// Helper function that simulates a full old-space in the heap.
void SimulateFullSpace(v8::internal::PagedSpace* space);
void AbandonCurrentlyFreeMemory(PagedSpace* space);
void GcAndSweep(Heap* heap, AllocationSpace space);
} // namespace heap
} // namespace internal
} // namespace v8
......
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/array-buffer-tracker.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/heap-utils.h"
namespace {
typedef i::LocalArrayBufferTracker LocalTracker;
void VerifyTrackedInNewSpace(i::JSArrayBuffer* buf) {
CHECK(i::Page::FromAddress(buf->address())->InNewSpace());
CHECK(i::Page::FromAddress(buf->address())
->local_tracker<i::Page::kCreateIfNotPresent>()
->IsTracked(buf));
}
void VerifyTrackedInOldSpace(i::JSArrayBuffer* buf) {
CHECK(!i::Page::FromAddress(buf->address())->InNewSpace());
CHECK(i::Page::FromAddress(buf->address())
->local_tracker<i::Page::kCreateIfNotPresent>()
->IsTracked(buf));
}
void VerifyUntracked(i::JSArrayBuffer* buf) {
CHECK(!i::Page::FromAddress(buf->address())
->local_tracker<i::Page::kCreateIfNotPresent>()
->IsTracked(buf));
}
} // namespace
namespace v8 {
namespace internal {
// The following tests make sure that JSArrayBuffer tracking works expected when
// moving the objects through various spaces during GC phases.
TEST(ArrayBuffer_OnlyMC) {
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
JSArrayBuffer* raw_ab = nullptr;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
VerifyTrackedInNewSpace(*buf);
heap::GcAndSweep(heap, OLD_SPACE);
VerifyTrackedInNewSpace(*buf);
heap::GcAndSweep(heap, OLD_SPACE);
VerifyTrackedInOldSpace(*buf);
raw_ab = *buf;
// Prohibit page from being released.
Page::FromAddress(buf->address())->MarkNeverEvacuate();
}
// 2 GCs are needed because we promote to old space as live, meaning that
// we will survive one GC.
heap::GcAndSweep(heap, OLD_SPACE);
heap::GcAndSweep(heap, OLD_SPACE);
VerifyUntracked(raw_ab);
}
TEST(ArrayBuffer_OnlyScavenge) {
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
JSArrayBuffer* raw_ab = nullptr;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
VerifyTrackedInNewSpace(*buf);
heap::GcAndSweep(heap, NEW_SPACE);
VerifyTrackedInNewSpace(*buf);
heap::GcAndSweep(heap, NEW_SPACE);
VerifyTrackedInOldSpace(*buf);
heap::GcAndSweep(heap, NEW_SPACE);
VerifyTrackedInOldSpace(*buf);
raw_ab = *buf;
// Prohibit page from being released.
Page::FromAddress(buf->address())->MarkNeverEvacuate();
}
// 2 GCs are needed because we promote to old space as live, meaning that
// we will survive one GC.
heap::GcAndSweep(heap, OLD_SPACE);
heap::GcAndSweep(heap, OLD_SPACE);
VerifyUntracked(raw_ab);
}
TEST(ArrayBuffer_ScavengeAndMC) {
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
JSArrayBuffer* raw_ab = nullptr;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
VerifyTrackedInNewSpace(*buf);
heap::GcAndSweep(heap, NEW_SPACE);
VerifyTrackedInNewSpace(*buf);
heap::GcAndSweep(heap, NEW_SPACE);
VerifyTrackedInOldSpace(*buf);
heap::GcAndSweep(heap, OLD_SPACE);
VerifyTrackedInOldSpace(*buf);
heap::GcAndSweep(heap, NEW_SPACE);
VerifyTrackedInOldSpace(*buf);
raw_ab = *buf;
// Prohibit page from being released.
Page::FromAddress(buf->address())->MarkNeverEvacuate();
}
// 2 GCs are needed because we promote to old space as live, meaning that
// we will survive one GC.
heap::GcAndSweep(heap, OLD_SPACE);
heap::GcAndSweep(heap, OLD_SPACE);
VerifyUntracked(raw_ab);
}
TEST(ArrayBuffer_Compaction) {
FLAG_manual_evacuation_candidates_selection = true;
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
heap::AbandonCurrentlyFreeMemory(heap->old_space());
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab1 = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
VerifyTrackedInNewSpace(*buf1);
heap::GcAndSweep(heap, NEW_SPACE);
heap::GcAndSweep(heap, NEW_SPACE);
Page* page_before_gc = Page::FromAddress(buf1->address());
page_before_gc->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
VerifyTrackedInOldSpace(*buf1);
heap->CollectAllGarbage();
Page* page_after_gc = Page::FromAddress(buf1->address());
VerifyTrackedInOldSpace(*buf1);
CHECK_NE(page_before_gc, page_after_gc);
}
TEST(ArrayBuffer_UnregisterDuringSweep) {
// Regular pages in old space (without compaction) are processed concurrently
// in the sweeper. If we happen to unregister a buffer (either explicitly, or
// implicitly through e.g. |Externalize|) we need to sync with the sweeper
// task.
//
// Note: This test will will only fail on TSAN configurations.
// Disable verify-heap since it forces sweeping to be completed in the
// epilogue of the GC.
#ifdef VERIFY_HEAP
i::FLAG_verify_heap = false;
#endif // VERIFY_HEAP
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
{
v8::HandleScope handle_scope(isolate);
// Allocate another buffer on the same page to force processing a
// non-empty set of buffers in the last GC.
Local<v8::ArrayBuffer> ab2 = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf2 = v8::Utils::OpenHandle(*ab2);
VerifyTrackedInNewSpace(*buf);
VerifyTrackedInNewSpace(*buf2);
heap::GcAndSweep(heap, NEW_SPACE);
VerifyTrackedInNewSpace(*buf);
VerifyTrackedInNewSpace(*buf2);
heap::GcAndSweep(heap, NEW_SPACE);
VerifyTrackedInOldSpace(*buf);
VerifyTrackedInOldSpace(*buf2);
}
heap->CollectGarbage(OLD_SPACE);
// |Externalize| will cause the buffer to be |Unregister|ed. Without
// barriers and proper synchronization this will trigger a data race on
// TSAN.
v8::ArrayBuffer::Contents contents = ab->Externalize();
heap->isolate()->array_buffer_allocator()->Free(contents.Data(),
contents.ByteLength());
}
}
} // namespace internal
} // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment