Commit 4cdf71e2 authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Fine-grained JSArrayBuffer tracking

Track based on JSArrayBuffer addresses instead of the attached backing store.
This way we can later on iterate buffers on a single page.

BUG=chromium:581412
LOG=N
R=jochen@chromium.org, hpayer@chromium.org

Review-Url: https://codereview.chromium.org/1936233002
Cr-Commit-Position: refs/heads/master@{#36140}
parent 7d38a161
...@@ -15,18 +15,20 @@ namespace internal { ...@@ -15,18 +15,20 @@ namespace internal {
ArrayBufferTracker::~ArrayBufferTracker() { ArrayBufferTracker::~ArrayBufferTracker() {
Isolate* isolate = heap()->isolate(); Isolate* isolate = heap()->isolate();
size_t freed_memory = 0; size_t freed_memory = 0;
for (auto& buffer : live_array_buffers_) { for (auto& buffer : live_old_gen_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second); isolate->array_buffer_allocator()->Free(buffer.second.first,
freed_memory += buffer.second; buffer.second.second);
freed_memory += buffer.second.second;
} }
for (auto& buffer : live_array_buffers_for_scavenge_) { for (auto& buffer : live_young_gen_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second); isolate->array_buffer_allocator()->Free(buffer.second.first,
freed_memory += buffer.second; buffer.second.second);
freed_memory += buffer.second.second;
} }
live_array_buffers_.clear(); live_old_gen_.clear();
live_array_buffers_for_scavenge_.clear(); live_young_gen_.clear();
not_yet_discovered_array_buffers_.clear(); not_yet_discovered_old_gen_.clear();
not_yet_discovered_array_buffers_for_scavenge_.clear(); not_yet_discovered_young_gen_.clear();
if (freed_memory > 0) { if (freed_memory > 0) {
heap()->update_amount_of_external_allocated_memory( heap()->update_amount_of_external_allocated_memory(
...@@ -42,9 +44,13 @@ void ArrayBufferTracker::RegisterNew(JSArrayBuffer* buffer) { ...@@ -42,9 +44,13 @@ void ArrayBufferTracker::RegisterNew(JSArrayBuffer* buffer) {
bool in_new_space = heap()->InNewSpace(buffer); bool in_new_space = heap()->InNewSpace(buffer);
size_t length = NumberToSize(heap()->isolate(), buffer->byte_length()); size_t length = NumberToSize(heap()->isolate(), buffer->byte_length());
if (in_new_space) { if (in_new_space) {
live_array_buffers_for_scavenge_[data] = length; live_young_gen_[buffer->address()] = std::make_pair(data, length);
not_yet_discovered_young_gen_[buffer->address()] =
std::make_pair(data, length);
} else { } else {
live_array_buffers_[data] = length; live_old_gen_[buffer->address()] = std::make_pair(data, length);
not_yet_discovered_old_gen_[buffer->address()] =
std::make_pair(data, length);
} }
// We may go over the limit of externally allocated memory here. We call the // We may go over the limit of externally allocated memory here. We call the
...@@ -59,82 +65,115 @@ void ArrayBufferTracker::Unregister(JSArrayBuffer* buffer) { ...@@ -59,82 +65,115 @@ void ArrayBufferTracker::Unregister(JSArrayBuffer* buffer) {
if (!data) return; if (!data) return;
bool in_new_space = heap()->InNewSpace(buffer); bool in_new_space = heap()->InNewSpace(buffer);
std::map<void*, size_t>* live_buffers = Key key = buffer->address();
in_new_space ? &live_array_buffers_for_scavenge_ : &live_array_buffers_; TrackingMap* live_buffers = in_new_space ? &live_young_gen_ : &live_old_gen_;
std::map<void*, size_t>* not_yet_discovered_buffers = TrackingMap* not_yet_discovered_buffers = in_new_space
in_new_space ? &not_yet_discovered_array_buffers_for_scavenge_ ? &not_yet_discovered_young_gen_
: &not_yet_discovered_array_buffers_; : &not_yet_discovered_old_gen_;
DCHECK(live_buffers->count(data) > 0); DCHECK(live_buffers->count(key) > 0);
size_t length = (*live_buffers)[data]; size_t length = (*live_buffers)[key].second;
live_buffers->erase(data); live_buffers->erase(key);
not_yet_discovered_buffers->erase(data); not_yet_discovered_buffers->erase(key);
heap()->update_amount_of_external_allocated_memory( heap()->update_amount_of_external_allocated_memory(
-static_cast<int64_t>(length)); -static_cast<int64_t>(length));
} }
void ArrayBufferTracker::MarkLive(JSArrayBuffer* buffer) {
base::LockGuard<base::Mutex> guard(&mutex_);
void* data = buffer->backing_store();
// ArrayBuffer might be in the middle of being constructed.
if (data == heap()->undefined_value()) return;
if (heap()->InNewSpace(buffer)) {
not_yet_discovered_array_buffers_for_scavenge_.erase(data);
} else {
not_yet_discovered_array_buffers_.erase(data);
}
}
void ArrayBufferTracker::FreeDead(bool from_scavenge) { void ArrayBufferTracker::FreeDead(bool from_scavenge) {
size_t freed_memory = 0; size_t freed_memory = 0;
Isolate* isolate = heap()->isolate(); Isolate* isolate = heap()->isolate();
for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) { for (auto& buffer : not_yet_discovered_young_gen_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second); isolate->array_buffer_allocator()->Free(buffer.second.first,
freed_memory += buffer.second; buffer.second.second);
live_array_buffers_for_scavenge_.erase(buffer.first); freed_memory += buffer.second.second;
live_young_gen_.erase(buffer.first);
} }
if (!from_scavenge) { if (!from_scavenge) {
for (auto& buffer : not_yet_discovered_array_buffers_) { for (auto& buffer : not_yet_discovered_old_gen_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second); isolate->array_buffer_allocator()->Free(buffer.second.first,
freed_memory += buffer.second; buffer.second.second);
live_array_buffers_.erase(buffer.first); freed_memory += buffer.second.second;
live_old_gen_.erase(buffer.first);
} }
} }
not_yet_discovered_array_buffers_for_scavenge_ = not_yet_discovered_young_gen_ = live_young_gen_;
live_array_buffers_for_scavenge_; if (!from_scavenge) not_yet_discovered_old_gen_ = live_old_gen_;
if (!from_scavenge) not_yet_discovered_array_buffers_ = live_array_buffers_;
// Do not call through the api as this code is triggered while doing a GC. // Do not call through the api as this code is triggered while doing a GC.
heap()->update_amount_of_external_allocated_memory( heap()->update_amount_of_external_allocated_memory(
-static_cast<int64_t>(freed_memory)); -static_cast<int64_t>(freed_memory));
} }
#define UPDATE_GUARD(buffer, data) \
if (buffer->is_external()) return; \
data = buffer->backing_store(); \
if (data == nullptr) return; \
if (data == heap()->undefined_value()) return; \
base::LockGuard<base::Mutex> guard(&mutex_);
void ArrayBufferTracker::MarkLive(JSArrayBuffer* buffer) {
void* data = nullptr;
UPDATE_GUARD(buffer, data);
void ArrayBufferTracker::PrepareDiscoveryInNewSpace() { if (heap()->InNewSpace(buffer)) {
not_yet_discovered_array_buffers_for_scavenge_ = not_yet_discovered_young_gen_.erase(buffer->address());
live_array_buffers_for_scavenge_; } else {
not_yet_discovered_old_gen_.erase(buffer->address());
}
} }
void ArrayBufferTracker::Promote(JSArrayBuffer* new_buffer,
JSArrayBuffer* old_buffer) {
void* data = nullptr;
UPDATE_GUARD(new_buffer, data);
Key new_key = new_buffer->address();
Key old_key = old_buffer->address();
DCHECK(live_young_gen_.count(old_key) > 0);
live_old_gen_[new_key] = live_young_gen_[old_key];
live_young_gen_.erase(old_key);
not_yet_discovered_young_gen_.erase(old_key);
}
void ArrayBufferTracker::Promote(JSArrayBuffer* buffer) { void ArrayBufferTracker::Compact(JSArrayBuffer* new_buffer,
base::LockGuard<base::Mutex> guard(&mutex_); JSArrayBuffer* old_buffer) {
void* data = nullptr;
UPDATE_GUARD(new_buffer, data);
Key new_key = new_buffer->address();
Key old_key = old_buffer->address();
DCHECK_NE(new_key, old_key);
DCHECK(live_old_gen_.count(old_key) > 0);
live_old_gen_[new_key] = live_old_gen_[old_key];
live_old_gen_.erase(old_key);
not_yet_discovered_old_gen_.erase(old_key);
}
if (buffer->is_external()) return; void ArrayBufferTracker::SemiSpaceCopy(JSArrayBuffer* new_buffer,
void* data = buffer->backing_store(); JSArrayBuffer* old_buffer) {
if (!data) return; void* data = nullptr;
// ArrayBuffer might be in the middle of being constructed. UPDATE_GUARD(new_buffer, data);
if (data == heap()->undefined_value()) return;
DCHECK(live_array_buffers_for_scavenge_.count(data) > 0); Key new_key = new_buffer->address();
live_array_buffers_[data] = live_array_buffers_for_scavenge_[data]; Key old_key = old_buffer->address();
live_array_buffers_for_scavenge_.erase(data); DCHECK(live_young_gen_.count(old_key) > 0);
not_yet_discovered_array_buffers_for_scavenge_.erase(data); live_young_gen_[new_key] = live_young_gen_[old_key];
live_young_gen_.erase(old_key);
not_yet_discovered_young_gen_.erase(old_key);
}
#undef UPDATE_GUARD
bool ArrayBufferTracker::IsTrackedInOldGenForTesting(JSArrayBuffer* buffer) {
return live_old_gen_.find(buffer->address()) != live_old_gen_.end();
}
bool ArrayBufferTracker::IsTrackedInYoungGenForTesting(JSArrayBuffer* buffer) {
return live_young_gen_.find(buffer->address()) != live_young_gen_.end();
} }
} // namespace internal } // namespace internal
......
...@@ -19,11 +19,14 @@ class JSArrayBuffer; ...@@ -19,11 +19,14 @@ class JSArrayBuffer;
class ArrayBufferTracker { class ArrayBufferTracker {
public: public:
typedef void* Key;
enum CallbackResult { kKeepEntry, kRemoveEntry };
enum ListType { kNewSpace, kOldSpace };
explicit ArrayBufferTracker(Heap* heap) : heap_(heap) {} explicit ArrayBufferTracker(Heap* heap) : heap_(heap) {}
~ArrayBufferTracker(); ~ArrayBufferTracker();
inline Heap* heap() { return heap_; }
// The following methods are used to track raw C++ pointers to externally // The following methods are used to track raw C++ pointers to externally
// allocated memory used as backing store in live array buffers. // allocated memory used as backing store in live array buffers.
...@@ -40,36 +43,58 @@ class ArrayBufferTracker { ...@@ -40,36 +43,58 @@ class ArrayBufferTracker {
// marking or scavenge phase. // marking or scavenge phase.
void FreeDead(bool from_scavenge); void FreeDead(bool from_scavenge);
// Prepare for a new scavenge phase. A new marking phase is implicitly // Update methods used to update the tracking state of given ArrayBuffers.
// prepared by finishing the previous one. void Promote(JSArrayBuffer* new_buffer, JSArrayBuffer* old_buffer);
void PrepareDiscoveryInNewSpace(); void SemiSpaceCopy(JSArrayBuffer* new_buffer, JSArrayBuffer* old_buffer);
void Compact(JSArrayBuffer* new_buffer, JSArrayBuffer* old_buffer);
// An ArrayBuffer moved from new space to old space.
void Promote(JSArrayBuffer* buffer); // Callback should be of type:
// CallbackResult fn(Key);
template <typename Callback>
void IterateNotYetDiscoveredEntries(ListType list, Key from, Key to,
Callback callback) {
TrackingMap::iterator it =
list == kNewSpace ? not_yet_discovered_young_gen_.lower_bound(from)
: not_yet_discovered_old_gen_.lower_bound(from);
const TrackingMap::iterator end =
list == kNewSpace ? not_yet_discovered_young_gen_.upper_bound(to)
: not_yet_discovered_old_gen_.upper_bound(to);
{
base::LockGuard<base::Mutex> guard(&mutex_);
while (it != end) {
if (callback(it->first) == kKeepEntry) {
++it;
} else {
live_old_gen_.erase(it++);
}
}
}
}
bool IsTrackedInOldGenForTesting(JSArrayBuffer* buffer);
bool IsTrackedInYoungGenForTesting(JSArrayBuffer* buffer);
private: private:
typedef std::map<Key, std::pair<void*, size_t>> TrackingMap;
inline Heap* heap() { return heap_; }
base::Mutex mutex_; base::Mutex mutex_;
Heap* heap_; Heap* heap_;
// |live_array_buffers_| maps externally allocated memory used as backing // |live_*| maps tracked JSArrayBuffers to the internally allocated backing
// store for ArrayBuffers to the length of the respective memory blocks. // store and length.
// // For each GC round (Scavenger, or incremental/full MC)
// At the beginning of mark/compact, |not_yet_discovered_array_buffers_| is // |not_yet_discovered_*| is initialized as a copy of |live_*|. Upon finding
// a copy of |live_array_buffers_| and we remove pointers as we discover live // a JSArrayBuffer during GC, the buffer is removed from
// ArrayBuffer objects during marking. At the end of mark/compact, the // |not_yet_discovered_*|. At the end of a GC, we free up the remaining
// remaining memory blocks can be freed. // JSArrayBuffers in |not_yet_discovered_*|.
std::map<void*, size_t> live_array_buffers_; TrackingMap live_old_gen_;
std::map<void*, size_t> not_yet_discovered_array_buffers_; TrackingMap not_yet_discovered_old_gen_;
TrackingMap live_young_gen_;
// To be able to free memory held by ArrayBuffers during scavenge as well, we TrackingMap not_yet_discovered_young_gen_;
// have a separate list of allocated memory held by ArrayBuffers in new space.
//
// Since mark/compact also evacuates the new space, all pointers in the
// |live_array_buffers_for_scavenge_| list are also in the
// |live_array_buffers_| list.
std::map<void*, size_t> live_array_buffers_for_scavenge_;
std::map<void*, size_t> not_yet_discovered_array_buffers_for_scavenge_;
}; };
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
#endif // V8_HEAP_ARRAY_BUFFER_TRACKER_H_ #endif // V8_HEAP_ARRAY_BUFFER_TRACKER_H_
...@@ -1626,8 +1626,6 @@ void Heap::Scavenge() { ...@@ -1626,8 +1626,6 @@ void Heap::Scavenge() {
scavenge_collector_->SelectScavengingVisitorsTable(); scavenge_collector_->SelectScavengingVisitorsTable();
array_buffer_tracker()->PrepareDiscoveryInNewSpace();
// Flip the semispaces. After flipping, to space is empty, from space has // Flip the semispaces. After flipping, to space is empty, from space has
// live objects. // live objects.
new_space_.Flip(); new_space_.Flip();
......
...@@ -1675,7 +1675,8 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final ...@@ -1675,7 +1675,8 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
// If we end up needing more special cases, we should factor this out. // If we end up needing more special cases, we should factor this out.
if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) { if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->Promote( heap_->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(target_object)); JSArrayBuffer::cast(target_object),
reinterpret_cast<JSArrayBuffer*>(object));
} }
promoted_size_ += size; promoted_size_ += size;
return true; return true;
...@@ -1684,7 +1685,9 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final ...@@ -1684,7 +1685,9 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
AllocationSpace space = AllocateTargetObject(object, &target); AllocationSpace space = AllocateTargetObject(object, &target);
MigrateObject(HeapObject::cast(target), object, size, space); MigrateObject(HeapObject::cast(target), object, size, space);
if (V8_UNLIKELY(target->IsJSArrayBuffer())) { if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target)); heap_->array_buffer_tracker()->SemiSpaceCopy(
JSArrayBuffer::cast(target),
reinterpret_cast<JSArrayBuffer*>(object));
} }
semispace_copied_size_ += size; semispace_copied_size_ += size;
return true; return true;
...@@ -1811,7 +1814,7 @@ class MarkCompactCollector::EvacuateNewSpacePageVisitor final ...@@ -1811,7 +1814,7 @@ class MarkCompactCollector::EvacuateNewSpacePageVisitor final
inline bool Visit(HeapObject* object) { inline bool Visit(HeapObject* object) {
if (V8_UNLIKELY(object->IsJSArrayBuffer())) { if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
object->GetHeap()->array_buffer_tracker()->Promote( object->GetHeap()->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(object)); JSArrayBuffer::cast(object), JSArrayBuffer::cast(object));
} }
RecordMigratedSlotVisitor visitor; RecordMigratedSlotVisitor visitor;
object->IterateBodyFast(&visitor); object->IterateBodyFast(&visitor);
...@@ -1838,8 +1841,16 @@ class MarkCompactCollector::EvacuateOldSpaceVisitor final ...@@ -1838,8 +1841,16 @@ class MarkCompactCollector::EvacuateOldSpaceVisitor final
HeapObject* target_object = nullptr; HeapObject* target_object = nullptr;
if (TryEvacuateObject(target_space, object, &target_object)) { if (TryEvacuateObject(target_space, object, &target_object)) {
DCHECK(object->map_word().IsForwardingAddress()); DCHECK(object->map_word().IsForwardingAddress());
if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->Compact(
JSArrayBuffer::cast(target_object),
reinterpret_cast<JSArrayBuffer*>(object));
}
return true; return true;
} }
if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
}
return false; return false;
} }
}; };
......
...@@ -295,7 +295,13 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -295,7 +295,13 @@ class ScavengingVisitor : public StaticVisitorBase {
DCHECK(map_word.IsForwardingAddress()); DCHECK(map_word.IsForwardingAddress());
HeapObject* target = map_word.ToForwardingAddress(); HeapObject* target = map_word.ToForwardingAddress();
if (!heap->InNewSpace(target)) { if (!heap->InNewSpace(target)) {
heap->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target)); heap->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(target),
reinterpret_cast<JSArrayBuffer*>(object));
} else {
heap->array_buffer_tracker()->SemiSpaceCopy(
JSArrayBuffer::cast(target),
reinterpret_cast<JSArrayBuffer*>(object));
} }
} }
......
...@@ -100,6 +100,7 @@ ...@@ -100,6 +100,7 @@
'gay-shortest.cc', 'gay-shortest.cc',
'heap/heap-tester.h', 'heap/heap-tester.h',
'heap/test-alloc.cc', 'heap/test-alloc.cc',
'heap/test-array-buffer-tracker.cc',
'heap/test-compaction.cc', 'heap/test-compaction.cc',
'heap/test-heap.cc', 'heap/test-heap.cc',
'heap/test-incremental-marking.cc', 'heap/test-incremental-marking.cc',
......
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/array-buffer-tracker.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/utils-inl.h"
namespace {
typedef i::ArrayBufferTracker Tracker;
void VerifyTrackedInNewSpace(Tracker* tracker, i::JSArrayBuffer* buf) {
CHECK(tracker->IsTrackedInYoungGenForTesting(buf));
CHECK(!tracker->IsTrackedInOldGenForTesting(buf));
}
void VerifyTrackedInOldSpace(Tracker* tracker, i::JSArrayBuffer* buf) {
CHECK(!tracker->IsTrackedInYoungGenForTesting(buf));
CHECK(tracker->IsTrackedInOldGenForTesting(buf));
}
void VerifyUntracked(Tracker* tracker, i::JSArrayBuffer* buf) {
CHECK(!tracker->IsTrackedInYoungGenForTesting(buf));
CHECK(!tracker->IsTrackedInOldGenForTesting(buf));
}
} // namespace
namespace v8 {
namespace internal {
// The following tests make sure that JSArrayBuffer tracking works expected when
// moving the objects through various spaces during GC phases.
TEST(ArrayBuffer_OnlyMC) {
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
Tracker* tracker = heap->array_buffer_tracker();
JSArrayBuffer* raw_ab = nullptr;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
VerifyTrackedInNewSpace(tracker, *buf);
heap->CollectGarbage(OLD_SPACE);
VerifyTrackedInNewSpace(tracker, *buf);
heap->CollectGarbage(OLD_SPACE);
VerifyTrackedInOldSpace(tracker, *buf);
raw_ab = *buf;
}
heap->CollectGarbage(OLD_SPACE);
VerifyUntracked(tracker, raw_ab);
}
TEST(ArrayBuffer_OnlyScavenge) {
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
Tracker* tracker = heap->array_buffer_tracker();
JSArrayBuffer* raw_ab = nullptr;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
VerifyTrackedInNewSpace(tracker, *buf);
heap->CollectGarbage(NEW_SPACE);
VerifyTrackedInNewSpace(tracker, *buf);
heap->CollectGarbage(NEW_SPACE);
VerifyTrackedInOldSpace(tracker, *buf);
heap->CollectGarbage(NEW_SPACE);
VerifyTrackedInOldSpace(tracker, *buf);
raw_ab = *buf;
}
heap->CollectGarbage(NEW_SPACE);
VerifyTrackedInOldSpace(tracker, raw_ab);
}
TEST(ArrayBuffer_ScavengeAndMC) {
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
Tracker* tracker = heap->array_buffer_tracker();
JSArrayBuffer* raw_ab = nullptr;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
VerifyTrackedInNewSpace(tracker, *buf);
heap->CollectGarbage(NEW_SPACE);
VerifyTrackedInNewSpace(tracker, *buf);
heap->CollectGarbage(NEW_SPACE);
VerifyTrackedInOldSpace(tracker, *buf);
heap->CollectGarbage(OLD_SPACE);
VerifyTrackedInOldSpace(tracker, *buf);
heap->CollectGarbage(NEW_SPACE);
VerifyTrackedInOldSpace(tracker, *buf);
raw_ab = *buf;
}
heap->CollectGarbage(NEW_SPACE);
VerifyTrackedInOldSpace(tracker, raw_ab);
heap->CollectGarbage(OLD_SPACE);
VerifyUntracked(tracker, raw_ab);
}
TEST(ArrayBuffer_IterateNotYetDiscoveredEntries) {
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
Tracker* tracker = heap->array_buffer_tracker();
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab1 = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
VerifyTrackedInNewSpace(tracker, *buf1);
heap->CollectGarbage(NEW_SPACE);
heap->CollectGarbage(NEW_SPACE);
VerifyTrackedInOldSpace(tracker, *buf1);
Local<v8::ArrayBuffer> ab2 = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf2 = v8::Utils::OpenHandle(*ab2);
Page* interesting_page = Page::FromAddress(buf2->address());
bool found_ab1 = false;
bool found_ab2 = false;
tracker->IterateNotYetDiscoveredEntries(
Tracker::kNewSpace, reinterpret_cast<Tracker::Key>(interesting_page),
reinterpret_cast<Tracker::Key>(interesting_page + Page::kPageSize),
[buf1, buf2, &found_ab1, &found_ab2](Tracker::Key key) {
if (key == buf1->address()) {
found_ab1 = true;
}
if (key == buf2->address()) {
found_ab2 = true;
}
return Tracker::kKeepEntry;
});
CHECK(!found_ab1);
CHECK(found_ab2);
}
TEST(ArrayBuffer_Compaction) {
FLAG_manual_evacuation_candidates_selection = true;
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
AbandonCurrentlyFreeMemory(heap->old_space());
Tracker* tracker = heap->array_buffer_tracker();
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab1 = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
VerifyTrackedInNewSpace(tracker, *buf1);
heap->CollectGarbage(NEW_SPACE);
heap->CollectGarbage(NEW_SPACE);
Page* page_before_gc = Page::FromAddress(buf1->address());
page_before_gc->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
VerifyTrackedInOldSpace(tracker, *buf1);
heap->CollectAllGarbage();
Page* page_after_gc = Page::FromAddress(buf1->address());
VerifyTrackedInOldSpace(tracker, *buf1);
CHECK_NE(page_before_gc, page_after_gc);
}
} // namespace internal
} // namespace v8
...@@ -142,6 +142,14 @@ static inline void SimulateIncrementalMarking(i::Heap* heap, ...@@ -142,6 +142,14 @@ static inline void SimulateIncrementalMarking(i::Heap* heap,
CHECK(marking->IsComplete()); CHECK(marking->IsComplete());
} }
static inline void AbandonCurrentlyFreeMemory(PagedSpace* space) {
space->EmptyAllocationInfo();
PageIterator pit(space);
while (pit.has_next()) {
pit.next()->MarkNeverAllocateForTesting();
}
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment