Commit a43b732e authored by mlippautz's avatar mlippautz Committed by Commit bot

Revert of Reland "[heap] Fine-grained JSArrayBuffer tracking" (patchset #2...

Revert of Reland "[heap] Fine-grained JSArrayBuffer tracking" (patchset #2 id:20001 of https://codereview.chromium.org/2024063002/ )

Reason for revert:
Breaking
  http://build.chromium.org/p/client.v8/builders/V8%20Linux64%20-%20avx2/builds/7972

Original issue's description:
> Track based on JSArrayBuffer addresses on pages instead of the attached
> backing store.
>
> Details of tracking:
> - Scavenge: New space pages are processes in bulk on the main thread
> - MC: Unswept pages are processed in bulk in parallel. All other pages
>   are processed by the sweeper concurrently.
>
> BUG=chromium:611688
> LOG=N
> TEST=cctest/test-array-buffer-tracker/*
> CQ_EXTRA_TRYBOTS=tryserver.v8:v8_linux_arm64_gc_stress_dbg,v8_linux_gc_stress_dbg,v8_mac_gc_stress_dbg,v8_linux64_tsan_rel,v8_mac64_asan_rel
>
> Committed: https://crrev.com/089da007bb990b5b29aab257aa836fdd3f3b8ce0
> Cr-Commit-Position: refs/heads/master@{#36608}

TBR=hpayer@chromium.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=chromium:611688

Review-Url: https://codereview.chromium.org/2028503003
Cr-Commit-Position: refs/heads/master@{#36609}
parent 089da007
...@@ -1166,7 +1166,6 @@ v8_source_set("v8_base") { ...@@ -1166,7 +1166,6 @@ v8_source_set("v8_base") {
"src/handles.h", "src/handles.h",
"src/hashmap.h", "src/hashmap.h",
"src/heap-symbols.h", "src/heap-symbols.h",
"src/heap/array-buffer-tracker-inl.h",
"src/heap/array-buffer-tracker.cc", "src/heap/array-buffer-tracker.cc",
"src/heap/array-buffer-tracker.h", "src/heap/array-buffer-tracker.h",
"src/heap/gc-idle-time-handler.cc", "src/heap/gc-idle-time-handler.cc",
......
...@@ -7417,7 +7417,7 @@ class Internals { ...@@ -7417,7 +7417,7 @@ class Internals {
kAmountOfExternalAllocatedMemoryOffset + kApiInt64Size; kAmountOfExternalAllocatedMemoryOffset + kApiInt64Size;
static const int kIsolateRootsOffset = static const int kIsolateRootsOffset =
kAmountOfExternalAllocatedMemoryAtLastGlobalGCOffset + kApiInt64Size + kAmountOfExternalAllocatedMemoryAtLastGlobalGCOffset + kApiInt64Size +
kApiPointerSize + kApiPointerSize; kApiPointerSize;
static const int kUndefinedValueRootIndex = 4; static const int kUndefinedValueRootIndex = 4;
static const int kTheHoleValueRootIndex = 5; static const int kTheHoleValueRootIndex = 5;
static const int kNullValueRootIndex = 6; static const int kNullValueRootIndex = 6;
......
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/heap.h"
#include "src/heap/mark-compact.h"
#include "src/isolate.h"
namespace v8 {
namespace internal {
template <typename Callback>
void LocalArrayBufferTracker::Process(Callback callback) {
JSArrayBuffer* new_buffer = nullptr;
size_t freed_memory = 0;
for (TrackingMap::iterator it = live_.begin(); it != live_.end();) {
switch (callback(it->first, &new_buffer)) {
case kKeepEntry:
it++;
break;
case kKeepAndUpdateEntry:
DCHECK_NOT_NULL(new_buffer);
if (Marking::IsBlack(Marking::MarkBitFrom(new_buffer))) {
Page::FromAddress(new_buffer->address())
->local_tracker<Page::kCreateIfNotPresent>()
->AddLive(new_buffer, it->second);
} else {
Page::FromAddress(new_buffer->address())
->local_tracker<Page::kCreateIfNotPresent>()
->Add(new_buffer, it->second);
}
live_.erase(it++);
break;
case kRemoveEntry:
heap_->isolate()->array_buffer_allocator()->Free(it->second.first,
it->second.second);
freed_memory += it->second.second;
live_.erase(it++);
break;
default:
UNREACHABLE();
}
}
if (freed_memory > 0) {
heap_->update_amount_of_external_allocated_freed_memory(
static_cast<intptr_t>(freed_memory));
}
not_yet_discovered_.clear();
started_ = false;
}
template <LocalArrayBufferTracker::LivenessIndicator liveness_indicator>
void LocalArrayBufferTracker::ScanAndFreeDead() {
switch (liveness_indicator) {
case kForwardingPointer:
Process([](JSArrayBuffer* old_buffer, JSArrayBuffer** new_buffer) {
MapWord map_word = old_buffer->map_word();
if (map_word.IsForwardingAddress()) {
*new_buffer = JSArrayBuffer::cast(map_word.ToForwardingAddress());
return LocalArrayBufferTracker::kKeepAndUpdateEntry;
}
return LocalArrayBufferTracker::kRemoveEntry;
});
break;
case kMarkBit:
Process([](JSArrayBuffer* old_buffer, JSArrayBuffer**) {
if (Marking::IsBlackOrGrey(Marking::MarkBitFrom(old_buffer))) {
return LocalArrayBufferTracker::kKeepEntry;
}
return LocalArrayBufferTracker::kRemoveEntry;
});
break;
case kForwardingPointerOrMarkBit:
Process([](JSArrayBuffer* old_buffer, JSArrayBuffer** new_buffer) {
if (Marking::IsBlackOrGrey(Marking::MarkBitFrom(old_buffer))) {
return LocalArrayBufferTracker::kKeepEntry;
}
MapWord map_word = old_buffer->map_word();
if (map_word.IsForwardingAddress()) {
*new_buffer = JSArrayBuffer::cast(map_word.ToForwardingAddress());
return LocalArrayBufferTracker::kKeepAndUpdateEntry;
}
return LocalArrayBufferTracker::kRemoveEntry;
});
break;
default:
UNREACHABLE();
}
}
} // namespace internal
} // namespace v8
This diff is collapsed.
...@@ -7,7 +7,6 @@ ...@@ -7,7 +7,6 @@
#include <map> #include <map>
#include "src/allocation.h"
#include "src/base/platform/mutex.h" #include "src/base/platform/mutex.h"
#include "src/globals.h" #include "src/globals.h"
...@@ -16,112 +15,61 @@ namespace internal { ...@@ -16,112 +15,61 @@ namespace internal {
// Forward declarations. // Forward declarations.
class Heap; class Heap;
class Page;
class JSArrayBuffer; class JSArrayBuffer;
// LocalArrayBufferTracker is tracker for live and dead JSArrayBuffer objects. class ArrayBufferTracker {
//
// It consists of two sets, a live, and a not yet discovered set of buffers.
// Upon registration (in the ArrayBufferTracker) the buffers are added to both
// sets. When a buffer is encountered as live (or added is live) it is removed
// from the not yet discovered set. Finally, after each round (sometime during
// GC) the left over not yet discovered buffers are cleaned up. Upon starting
// a new round the not yet discovered buffers are initialized from the live set.
//
// Caveats:
// - Between cleaning up the buffers using |Free| we always need a |Reset| and
// thus another marking phase.
// - LocalArrayBufferTracker is completely unlocked. Calls need to ensure
// exclusive access.
class LocalArrayBufferTracker {
public: public:
typedef std::pair<void*, size_t> Value; explicit ArrayBufferTracker(Heap* heap) : heap_(heap) {}
typedef JSArrayBuffer* Key; ~ArrayBufferTracker();
enum LivenessIndicator { inline Heap* heap() { return heap_; }
kForwardingPointer,
kMarkBit,
kForwardingPointerOrMarkBit
};
enum CallbackResult { kKeepEntry, kKeepAndUpdateEntry, kRemoveEntry };
explicit LocalArrayBufferTracker(Heap* heap) : heap_(heap), started_(false) {}
~LocalArrayBufferTracker();
void Add(Key key, const Value& value);
void AddLive(Key key, const Value& value);
Value Remove(Key key);
void MarkLive(Key key);
bool IsEmpty();
// Resets the tracking set, i.e., not yet discovered buffers are initialized
// from the remaining live set of buffers.
void Reset();
// Frees up any dead backing stores of not yet discovered array buffers.
// Requires that the buffers have been properly marked using MarkLive.
void FreeDead();
// Scans the whole tracker and decides based on liveness_indicator whether
// a JSArrayBuffer is still considered live.
template <LivenessIndicator liveness_indicator>
inline void ScanAndFreeDead();
bool IsTracked(Key key) { return live_.find(key) != live_.end(); }
private:
// TODO(mlippautz): Switch to unordered_map once it is supported on all
// platforms.
typedef std::map<Key, Value> TrackingMap;
// Processes buffers one by one. The CallbackResult decides whether the buffer
// will be dropped or not.
//
// Callback should be of type:
// CallbackResult fn(JSArrayBuffer*, JSArrayBuffer**);
template <typename Callback>
inline void Process(Callback callback);
Heap* heap_;
// |live_| maps tracked JSArrayBuffers to the internally allocated backing
// store and length. For each GC round |not_yet_discovered_| is initialized
// as a copy of |live_|. Upon finding a JSArrayBuffer during GC, the buffer
// is removed from |not_yet_discovered_|. At the end of a GC, we free up the
// remaining JSArrayBuffers in |not_yet_discovered_|.
TrackingMap live_;
TrackingMap not_yet_discovered_;
bool started_;
};
class ArrayBufferTracker : public AllStatic {
public:
// The following methods are used to track raw C++ pointers to externally // The following methods are used to track raw C++ pointers to externally
// allocated memory used as backing store in live array buffers. // allocated memory used as backing store in live array buffers.
// Register/unregister a new JSArrayBuffer |buffer| for tracking. // A new ArrayBuffer was created with |data| as backing store.
static void RegisterNew(Heap* heap, JSArrayBuffer* buffer); void RegisterNew(JSArrayBuffer* buffer);
static void Unregister(Heap* heap, JSArrayBuffer* buffer);
// Frees all backing store pointers for dead JSArrayBuffers in new space. // The backing store |data| is no longer owned by V8.
static void FreeDeadInNewSpace(Heap* heap); void Unregister(JSArrayBuffer* buffer);
static void FreeDead(Page* page); // A live ArrayBuffer was discovered during marking/scavenge.
void MarkLive(JSArrayBuffer* buffer);
template <LocalArrayBufferTracker::LivenessIndicator liveness_indicator> // Frees all backing store pointers that weren't discovered in the previous
static void ScanAndFreeDeadArrayBuffers(Page* page); // marking or scavenge phase.
void FreeDead(bool from_scavenge);
// A live JSArrayBuffer was discovered during marking. // Prepare for a new scavenge phase. A new marking phase is implicitly
static void MarkLive(Heap* heap, JSArrayBuffer* buffer); // prepared by finishing the previous one.
void PrepareDiscoveryInNewSpace();
// Resets all trackers in old space. Is required to be called from the main // An ArrayBuffer moved from new space to old space.
// thread. void Promote(JSArrayBuffer* buffer);
static void ResetTrackersInOldSpace(Heap* heap);
static bool IsTracked(JSArrayBuffer* buffer); private:
}; base::Mutex mutex_;
Heap* heap_;
// |live_array_buffers_| maps externally allocated memory used as backing
// store for ArrayBuffers to the length of the respective memory blocks.
//
// At the beginning of mark/compact, |not_yet_discovered_array_buffers_| is
// a copy of |live_array_buffers_| and we remove pointers as we discover live
// ArrayBuffer objects during marking. At the end of mark/compact, the
// remaining memory blocks can be freed.
std::map<void*, size_t> live_array_buffers_;
std::map<void*, size_t> not_yet_discovered_array_buffers_;
// To be able to free memory held by ArrayBuffers during scavenge as well, we
// have a separate list of allocated memory held by ArrayBuffers in new space.
//
// Since mark/compact also evacuates the new space, all pointers in the
// |live_array_buffers_for_scavenge_| list are also in the
// |live_array_buffers_| list.
std::map<void*, size_t> live_array_buffers_for_scavenge_;
std::map<void*, size_t> not_yet_discovered_array_buffers_for_scavenge_;
};
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
#endif // V8_HEAP_ARRAY_BUFFER_TRACKER_H_ #endif // V8_HEAP_ARRAY_BUFFER_TRACKER_H_
...@@ -160,6 +160,7 @@ Heap::Heap() ...@@ -160,6 +160,7 @@ Heap::Heap()
gc_callbacks_depth_(0), gc_callbacks_depth_(0),
deserialization_complete_(false), deserialization_complete_(false),
strong_roots_list_(NULL), strong_roots_list_(NULL),
array_buffer_tracker_(NULL),
heap_iterator_depth_(0), heap_iterator_depth_(0),
force_oom_(false) { force_oom_(false) {
// Allow build-time customization of the max semispace size. Building // Allow build-time customization of the max semispace size. Building
...@@ -1625,6 +1626,8 @@ void Heap::Scavenge() { ...@@ -1625,6 +1626,8 @@ void Heap::Scavenge() {
scavenge_collector_->SelectScavengingVisitorsTable(); scavenge_collector_->SelectScavengingVisitorsTable();
array_buffer_tracker()->PrepareDiscoveryInNewSpace();
// Flip the semispaces. After flipping, to space is empty, from space has // Flip the semispaces. After flipping, to space is empty, from space has
// live objects. // live objects.
new_space_.Flip(); new_space_.Flip();
...@@ -1744,7 +1747,7 @@ void Heap::Scavenge() { ...@@ -1744,7 +1747,7 @@ void Heap::Scavenge() {
// Set age mark. // Set age mark.
new_space_.set_age_mark(new_space_.top()); new_space_.set_age_mark(new_space_.top());
ArrayBufferTracker::FreeDeadInNewSpace(this); array_buffer_tracker()->FreeDead(true);
// Update how much has survived scavenge. // Update how much has survived scavenge.
IncrementYoungSurvivorsCounter(static_cast<int>( IncrementYoungSurvivorsCounter(static_cast<int>(
...@@ -2024,12 +2027,12 @@ HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) { ...@@ -2024,12 +2027,12 @@ HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) {
void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) { void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) {
ArrayBufferTracker::RegisterNew(this, buffer); return array_buffer_tracker()->RegisterNew(buffer);
} }
void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) { void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) {
ArrayBufferTracker::Unregister(this, buffer); return array_buffer_tracker()->Unregister(buffer);
} }
...@@ -5322,6 +5325,8 @@ bool Heap::SetUp() { ...@@ -5322,6 +5325,8 @@ bool Heap::SetUp() {
scavenge_job_ = new ScavengeJob(); scavenge_job_ = new ScavengeJob();
array_buffer_tracker_ = new ArrayBufferTracker(this);
LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
LOG(isolate_, IntPtrTEvent("heap-available", Available())); LOG(isolate_, IntPtrTEvent("heap-available", Available()));
...@@ -5481,6 +5486,9 @@ void Heap::TearDown() { ...@@ -5481,6 +5486,9 @@ void Heap::TearDown() {
delete scavenge_job_; delete scavenge_job_;
scavenge_job_ = nullptr; scavenge_job_ = nullptr;
delete array_buffer_tracker_;
array_buffer_tracker_ = nullptr;
isolate_->global_handles()->TearDown(); isolate_->global_handles()->TearDown();
external_string_table_.TearDown(); external_string_table_.TearDown();
......
...@@ -820,16 +820,6 @@ class Heap { ...@@ -820,16 +820,6 @@ class Heap {
amount_of_external_allocated_memory_ += delta; amount_of_external_allocated_memory_ += delta;
} }
void update_amount_of_external_allocated_freed_memory(intptr_t freed) {
amount_of_external_allocated_memory_freed_.Increment(freed);
}
void account_amount_of_external_allocated_freed_memory() {
amount_of_external_allocated_memory_ -=
amount_of_external_allocated_memory_freed_.Value();
amount_of_external_allocated_memory_freed_.SetValue(0);
}
void DeoptMarkedAllocationSites(); void DeoptMarkedAllocationSites();
bool DeoptMaybeTenuredAllocationSites() { bool DeoptMaybeTenuredAllocationSites() {
...@@ -1364,6 +1354,10 @@ class Heap { ...@@ -1364,6 +1354,10 @@ class Heap {
void RegisterNewArrayBuffer(JSArrayBuffer* buffer); void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
void UnregisterArrayBuffer(JSArrayBuffer* buffer); void UnregisterArrayBuffer(JSArrayBuffer* buffer);
inline ArrayBufferTracker* array_buffer_tracker() {
return array_buffer_tracker_;
}
// =========================================================================== // ===========================================================================
// Allocation site tracking. ================================================= // Allocation site tracking. =================================================
// =========================================================================== // ===========================================================================
...@@ -2000,8 +1994,6 @@ class Heap { ...@@ -2000,8 +1994,6 @@ class Heap {
// Caches the amount of external memory registered at the last global gc. // Caches the amount of external memory registered at the last global gc.
int64_t amount_of_external_allocated_memory_at_last_global_gc_; int64_t amount_of_external_allocated_memory_at_last_global_gc_;
base::AtomicNumber<intptr_t> amount_of_external_allocated_memory_freed_;
// This can be calculated directly from a pointer to the heap; however, it is // This can be calculated directly from a pointer to the heap; however, it is
// more expedient to get at the isolate directly from within Heap methods. // more expedient to get at the isolate directly from within Heap methods.
Isolate* isolate_; Isolate* isolate_;
...@@ -2240,6 +2232,8 @@ class Heap { ...@@ -2240,6 +2232,8 @@ class Heap {
StrongRootsList* strong_roots_list_; StrongRootsList* strong_roots_list_;
ArrayBufferTracker* array_buffer_tracker_;
// The depth of HeapIterator nestings. // The depth of HeapIterator nestings.
int heap_iterator_depth_; int heap_iterator_depth_;
......
...@@ -547,7 +547,6 @@ void IncrementalMarking::StartMarking() { ...@@ -547,7 +547,6 @@ void IncrementalMarking::StartMarking() {
MarkCompactCollector::kMaxMarkingDequeSize); MarkCompactCollector::kMaxMarkingDequeSize);
ActivateIncrementalWriteBarrier(); ActivateIncrementalWriteBarrier();
ArrayBufferTracker::ResetTrackersInOldSpace(heap_);
// Marking bits are cleared by the sweeper. // Marking bits are cleared by the sweeper.
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
......
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
#include "src/frames-inl.h" #include "src/frames-inl.h"
#include "src/gdb-jit.h" #include "src/gdb-jit.h"
#include "src/global-handles.h" #include "src/global-handles.h"
#include "src/heap/array-buffer-tracker-inl.h" #include "src/heap/array-buffer-tracker.h"
#include "src/heap/gc-tracer.h" #include "src/heap/gc-tracer.h"
#include "src/heap/incremental-marking.h" #include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact-inl.h" #include "src/heap/mark-compact-inl.h"
...@@ -872,10 +872,6 @@ void MarkCompactCollector::Prepare() { ...@@ -872,10 +872,6 @@ void MarkCompactCollector::Prepare() {
space = spaces.next()) { space = spaces.next()) {
space->PrepareForMarkCompact(); space->PrepareForMarkCompact();
} }
if (!was_marked_incrementally_) {
ArrayBufferTracker::ResetTrackersInOldSpace(heap_);
}
heap()->account_amount_of_external_allocated_freed_memory();
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
if (!was_marked_incrementally_ && FLAG_verify_heap) { if (!was_marked_incrementally_ && FLAG_verify_heap) {
...@@ -1731,12 +1727,20 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final ...@@ -1731,12 +1727,20 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
if (heap_->ShouldBePromoted(object->address(), size) && if (heap_->ShouldBePromoted(object->address(), size) &&
TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object,
&target_object)) { &target_object)) {
// If we end up needing more special cases, we should factor this out.
if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(target_object));
}
promoted_size_ += size; promoted_size_ += size;
return true; return true;
} }
HeapObject* target = nullptr; HeapObject* target = nullptr;
AllocationSpace space = AllocateTargetObject(object, &target); AllocationSpace space = AllocateTargetObject(object, &target);
MigrateObject(HeapObject::cast(target), object, size, space); MigrateObject(HeapObject::cast(target), object, size, space);
if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
}
semispace_copied_size_ += size; semispace_copied_size_ += size;
return true; return true;
} }
...@@ -1861,6 +1865,10 @@ class MarkCompactCollector::EvacuateNewSpacePageVisitor final ...@@ -1861,6 +1865,10 @@ class MarkCompactCollector::EvacuateNewSpacePageVisitor final
} }
inline bool Visit(HeapObject* object) { inline bool Visit(HeapObject* object) {
if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
object->GetHeap()->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(object));
}
RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
object->IterateBodyFast(&visitor); object->IterateBodyFast(&visitor);
promoted_size_ += object->Size(); promoted_size_ += object->Size();
...@@ -1901,9 +1909,6 @@ class MarkCompactCollector::EvacuateRecordOnlyVisitor final ...@@ -1901,9 +1909,6 @@ class MarkCompactCollector::EvacuateRecordOnlyVisitor final
inline bool Visit(HeapObject* object) { inline bool Visit(HeapObject* object) {
RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
object->IterateBody(&visitor); object->IterateBody(&visitor);
if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
ArrayBufferTracker::MarkLive(heap_, JSArrayBuffer::cast(object));
}
return true; return true;
} }
...@@ -3122,14 +3127,11 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) { ...@@ -3122,14 +3127,11 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
switch (ComputeEvacuationMode(page)) { switch (ComputeEvacuationMode(page)) {
case kObjectsNewToOld: case kObjectsNewToOld:
result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_); result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_);
ArrayBufferTracker::ScanAndFreeDeadArrayBuffers<
LocalArrayBufferTracker::kForwardingPointer>(page);
DCHECK(result); DCHECK(result);
USE(result); USE(result);
break; break;
case kPageNewToOld: case kPageNewToOld:
result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor); result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor);
// ArrayBufferTracker will be updated during sweeping.
DCHECK(result); DCHECK(result);
USE(result); USE(result);
break; break;
...@@ -3143,16 +3145,12 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) { ...@@ -3143,16 +3145,12 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
page->SetFlag(Page::COMPACTION_WAS_ABORTED); page->SetFlag(Page::COMPACTION_WAS_ABORTED);
EvacuateRecordOnlyVisitor record_visitor(collector_->heap()); EvacuateRecordOnlyVisitor record_visitor(collector_->heap());
result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor); result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor);
ArrayBufferTracker::ScanAndFreeDeadArrayBuffers<
LocalArrayBufferTracker::kForwardingPointerOrMarkBit>(page);
DCHECK(result); DCHECK(result);
USE(result); USE(result);
// We need to return failure here to indicate that we want this page // We need to return failure here to indicate that we want this page
// added to the sweeper. // added to the sweeper.
return false; return false;
} }
ArrayBufferTracker::ScanAndFreeDeadArrayBuffers<
LocalArrayBufferTracker::kForwardingPointer>(page);
break; break;
default: default:
UNREACHABLE(); UNREACHABLE();
...@@ -3388,7 +3386,6 @@ int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p, ...@@ -3388,7 +3386,6 @@ int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p,
freed_bytes = space->UnaccountedFree(free_start, size); freed_bytes = space->UnaccountedFree(free_start, size);
max_freed_bytes = Max(freed_bytes, max_freed_bytes); max_freed_bytes = Max(freed_bytes, max_freed_bytes);
} }
ArrayBufferTracker::FreeDead(p);
p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); p->concurrent_sweeping_state().SetValue(Page::kSweepingDone);
return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes));
} }
...@@ -3532,6 +3529,11 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { ...@@ -3532,6 +3529,11 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
} }
} }
// EvacuateNewSpaceAndCandidates iterates over new space objects and for
// ArrayBuffers either re-registers them as live or promotes them. This is
// needed to properly free them.
heap()->array_buffer_tracker()->FreeDead(false);
// Deallocate evacuated candidate pages. // Deallocate evacuated candidate pages.
ReleaseEvacuationCandidates(); ReleaseEvacuationCandidates();
} }
......
...@@ -105,6 +105,11 @@ int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer( ...@@ -105,6 +105,11 @@ int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
Map* map, HeapObject* object) { Map* map, HeapObject* object) {
typedef FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor, int> typedef FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor, int>
JSArrayBufferBodyVisitor; JSArrayBufferBodyVisitor;
if (!JSArrayBuffer::cast(object)->is_external()) {
Heap* heap = map->GetHeap();
heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
}
return JSArrayBufferBodyVisitor::Visit(map, object); return JSArrayBufferBodyVisitor::Visit(map, object);
} }
...@@ -528,7 +533,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer( ...@@ -528,7 +533,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
if (!JSArrayBuffer::cast(object)->is_external() && if (!JSArrayBuffer::cast(object)->is_external() &&
!heap->InNewSpace(object)) { !heap->InNewSpace(object)) {
ArrayBufferTracker::MarkLive(heap, JSArrayBuffer::cast(object)); heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
} }
} }
......
...@@ -295,6 +295,14 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -295,6 +295,14 @@ class ScavengingVisitor : public StaticVisitorBase {
PromotionMode promotion_mode) { PromotionMode promotion_mode) {
ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object, ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object,
promotion_mode); promotion_mode);
Heap* heap = map->GetHeap();
MapWord map_word = object->map_word();
DCHECK(map_word.IsForwardingAddress());
HeapObject* target = map_word.ToForwardingAddress();
if (!heap->InNewSpace(target)) {
heap->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target));
}
} }
static inline void EvacuateByteArray(Map* map, HeapObject** slot, static inline void EvacuateByteArray(Map* map, HeapObject** slot,
......
...@@ -270,6 +270,7 @@ template <Page::InitializationMode mode> ...@@ -270,6 +270,7 @@ template <Page::InitializationMode mode>
Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable, Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable,
PagedSpace* owner) { PagedSpace* owner) {
Page* page = reinterpret_cast<Page*>(chunk); Page* page = reinterpret_cast<Page*>(chunk);
page->mutex_ = new base::Mutex();
DCHECK(page->area_size() <= kAllocatableMemory); DCHECK(page->area_size() <= kAllocatableMemory);
DCHECK(chunk->owner() == owner); DCHECK(chunk->owner() == owner);
......
...@@ -511,14 +511,13 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size, ...@@ -511,14 +511,13 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
chunk->progress_bar_ = 0; chunk->progress_bar_ = 0;
chunk->high_water_mark_.SetValue(static_cast<intptr_t>(area_start - base)); chunk->high_water_mark_.SetValue(static_cast<intptr_t>(area_start - base));
chunk->concurrent_sweeping_state().SetValue(kSweepingDone); chunk->concurrent_sweeping_state().SetValue(kSweepingDone);
chunk->mutex_ = new base::Mutex(); chunk->mutex_ = nullptr;
chunk->available_in_free_list_ = 0; chunk->available_in_free_list_ = 0;
chunk->wasted_memory_ = 0; chunk->wasted_memory_ = 0;
chunk->ResetLiveBytes(); chunk->ResetLiveBytes();
Bitmap::Clear(chunk); Bitmap::Clear(chunk);
chunk->set_next_chunk(nullptr); chunk->set_next_chunk(nullptr);
chunk->set_prev_chunk(nullptr); chunk->set_prev_chunk(nullptr);
chunk->local_tracker_.SetValue(nullptr);
DCHECK(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset); DCHECK(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset);
DCHECK(OFFSET_OF(MemoryChunk, live_byte_count_) == kLiveBytesOffset); DCHECK(OFFSET_OF(MemoryChunk, live_byte_count_) == kLiveBytesOffset);
...@@ -985,8 +984,6 @@ void MemoryChunk::ReleaseAllocatedMemory() { ...@@ -985,8 +984,6 @@ void MemoryChunk::ReleaseAllocatedMemory() {
if (old_to_old_slots_ != nullptr) ReleaseOldToOldSlots(); if (old_to_old_slots_ != nullptr) ReleaseOldToOldSlots();
if (typed_old_to_new_slots_ != nullptr) ReleaseTypedOldToNewSlots(); if (typed_old_to_new_slots_ != nullptr) ReleaseTypedOldToNewSlots();
if (typed_old_to_old_slots_ != nullptr) ReleaseTypedOldToOldSlots(); if (typed_old_to_old_slots_ != nullptr) ReleaseTypedOldToOldSlots();
if (local_tracker_.Value() != nullptr) ReleaseLocalTracker();
} }
static SlotSet* AllocateSlotSet(size_t size, Address page_start) { static SlotSet* AllocateSlotSet(size_t size, Address page_start) {
...@@ -1038,12 +1035,6 @@ void MemoryChunk::ReleaseTypedOldToOldSlots() { ...@@ -1038,12 +1035,6 @@ void MemoryChunk::ReleaseTypedOldToOldSlots() {
delete typed_old_to_old_slots_; delete typed_old_to_old_slots_;
typed_old_to_old_slots_ = nullptr; typed_old_to_old_slots_ = nullptr;
} }
void MemoryChunk::ReleaseLocalTracker() {
delete local_tracker_.Value();
local_tracker_.SetValue(nullptr);
}
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// PagedSpace implementation // PagedSpace implementation
......
...@@ -14,7 +14,6 @@ ...@@ -14,7 +14,6 @@
#include "src/base/platform/mutex.h" #include "src/base/platform/mutex.h"
#include "src/flags.h" #include "src/flags.h"
#include "src/hashmap.h" #include "src/hashmap.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/list.h" #include "src/list.h"
#include "src/objects.h" #include "src/objects.h"
#include "src/utils.h" #include "src/utils.h"
...@@ -469,8 +468,6 @@ class MemoryChunk { ...@@ -469,8 +468,6 @@ class MemoryChunk {
kSweepingInProgress, kSweepingInProgress,
}; };
enum ArrayBufferTrackerAccessMode { kDontCreate, kCreateIfNotPresent };
// Every n write barrier invocations we go to runtime even though // Every n write barrier invocations we go to runtime even though
// we could have handled it in generated code. This lets us check // we could have handled it in generated code. This lets us check
// whether we have hit the limit and should do some more marking. // whether we have hit the limit and should do some more marking.
...@@ -526,8 +523,7 @@ class MemoryChunk { ...@@ -526,8 +523,7 @@ class MemoryChunk {
+ kPointerSize // AtomicValue next_chunk_ + kPointerSize // AtomicValue next_chunk_
+ kPointerSize // AtomicValue prev_chunk_ + kPointerSize // AtomicValue prev_chunk_
// FreeListCategory categories_[kNumberOfCategories] // FreeListCategory categories_[kNumberOfCategories]
+ FreeListCategory::kSize * kNumberOfCategories + + FreeListCategory::kSize * kNumberOfCategories;
kPointerSize; // LocalArrayBufferTracker tracker_
// We add some more space to the computed header size to amount for missing // We add some more space to the computed header size to amount for missing
// alignment requirements in our computation. // alignment requirements in our computation.
...@@ -646,21 +642,6 @@ class MemoryChunk { ...@@ -646,21 +642,6 @@ class MemoryChunk {
void AllocateTypedOldToOldSlots(); void AllocateTypedOldToOldSlots();
void ReleaseTypedOldToOldSlots(); void ReleaseTypedOldToOldSlots();
template <ArrayBufferTrackerAccessMode tracker_access>
inline LocalArrayBufferTracker* local_tracker() {
LocalArrayBufferTracker* tracker = local_tracker_.Value();
if (tracker == nullptr && tracker_access == kCreateIfNotPresent) {
tracker = new LocalArrayBufferTracker(heap_);
if (!local_tracker_.TrySetValue(nullptr, tracker)) {
tracker = local_tracker_.Value();
}
DCHECK_NOT_NULL(tracker);
}
return tracker;
}
void ReleaseLocalTracker();
Address area_start() { return area_start_; } Address area_start() { return area_start_; }
Address area_end() { return area_end_; } Address area_end() { return area_end_; }
int area_size() { return static_cast<int>(area_end() - area_start()); } int area_size() { return static_cast<int>(area_end() - area_start()); }
...@@ -846,8 +827,6 @@ class MemoryChunk { ...@@ -846,8 +827,6 @@ class MemoryChunk {
FreeListCategory categories_[kNumberOfCategories]; FreeListCategory categories_[kNumberOfCategories];
base::AtomicValue<LocalArrayBufferTracker*> local_tracker_;
private: private:
void InitializeReservedMemory() { reservation_.Reset(); } void InitializeReservedMemory() { reservation_.Reset(); }
...@@ -2301,16 +2280,6 @@ class PagedSpace : public Space { ...@@ -2301,16 +2280,6 @@ class PagedSpace : public Space {
inline void UnlinkFreeListCategories(Page* page); inline void UnlinkFreeListCategories(Page* page);
inline intptr_t RelinkFreeListCategories(Page* page); inline intptr_t RelinkFreeListCategories(Page* page);
// Callback signature:
// void Callback(Page*);
template <typename Callback>
void ForAllPages(Callback callback) {
PageIterator it(this);
while (it.has_next()) {
callback(it.next());
}
}
protected: protected:
// PagedSpaces that should be included in snapshots have different, i.e., // PagedSpaces that should be included in snapshots have different, i.e.,
// smaller, initial pages. // smaller, initial pages.
......
...@@ -831,7 +831,6 @@ ...@@ -831,7 +831,6 @@
'handles.h', 'handles.h',
'hashmap.h', 'hashmap.h',
'heap-symbols.h', 'heap-symbols.h',
'heap/array-buffer-tracker-inl.h',
'heap/array-buffer-tracker.cc', 'heap/array-buffer-tracker.cc',
'heap/array-buffer-tracker.h', 'heap/array-buffer-tracker.h',
'heap/memory-reducer.cc', 'heap/memory-reducer.cc',
......
...@@ -102,7 +102,6 @@ ...@@ -102,7 +102,6 @@
'heap/heap-utils.cc', 'heap/heap-utils.cc',
'heap/heap-utils.h', 'heap/heap-utils.h',
'heap/test-alloc.cc', 'heap/test-alloc.cc',
'heap/test-array-buffer-tracker.cc',
'heap/test-compaction.cc', 'heap/test-compaction.cc',
'heap/test-heap.cc', 'heap/test-heap.cc',
'heap/test-incremental-marking.cc', 'heap/test-incremental-marking.cc',
......
...@@ -141,21 +141,6 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) { ...@@ -141,21 +141,6 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) {
space->ClearStats(); space->ClearStats();
} }
void AbandonCurrentlyFreeMemory(PagedSpace* space) {
space->EmptyAllocationInfo();
PageIterator pit(space);
while (pit.has_next()) {
pit.next()->MarkNeverAllocateForTesting();
}
}
void GcAndSweep(Heap* heap, AllocationSpace space) {
heap->CollectGarbage(space);
if (heap->mark_compact_collector()->sweeping_in_progress()) {
heap->mark_compact_collector()->EnsureSweepingCompleted();
}
}
} // namespace heap } // namespace heap
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -40,10 +40,6 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion = true); ...@@ -40,10 +40,6 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion = true);
// Helper function that simulates a full old-space in the heap. // Helper function that simulates a full old-space in the heap.
void SimulateFullSpace(v8::internal::PagedSpace* space); void SimulateFullSpace(v8::internal::PagedSpace* space);
void AbandonCurrentlyFreeMemory(PagedSpace* space);
void GcAndSweep(Heap* heap, AllocationSpace space);
} // namespace heap } // namespace heap
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/array-buffer-tracker.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/heap-utils.h"
namespace {
typedef i::LocalArrayBufferTracker LocalTracker;
bool IsTracked(i::JSArrayBuffer* buf) {
return i::ArrayBufferTracker::IsTracked(buf);
}
bool IsTrackedInOldSpace(i::JSArrayBuffer* buf) {
return !i::Page::FromAddress(buf->address())->InNewSpace() && IsTracked(buf);
}
bool IsTrackedInNewSpace(i::JSArrayBuffer* buf) {
return i::Page::FromAddress(buf->address())->InNewSpace() && IsTracked(buf);
}
} // namespace
namespace v8 {
namespace internal {
// The following tests make sure that JSArrayBuffer tracking works expected when
// moving the objects through various spaces during GC phases.
TEST(ArrayBuffer_OnlyMC) {
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
JSArrayBuffer* raw_ab = nullptr;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
CHECK(IsTrackedInNewSpace(*buf));
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(IsTrackedInNewSpace(*buf));
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(IsTrackedInOldSpace(*buf));
raw_ab = *buf;
// Prohibit page from being released.
Page::FromAddress(buf->address())->MarkNeverEvacuate();
}
// 2 GCs are needed because we promote to old space as live, meaning that
// we will survive one GC.
heap::GcAndSweep(heap, OLD_SPACE);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(!IsTracked(raw_ab));
}
TEST(ArrayBuffer_OnlyScavenge) {
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
JSArrayBuffer* raw_ab = nullptr;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
CHECK(IsTrackedInNewSpace(*buf));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedInNewSpace(*buf));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedInOldSpace(*buf));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedInOldSpace(*buf));
raw_ab = *buf;
// Prohibit page from being released.
Page::FromAddress(buf->address())->MarkNeverEvacuate();
}
// 2 GCs are needed because we promote to old space as live, meaning that
// we will survive one GC.
heap::GcAndSweep(heap, OLD_SPACE);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(!IsTracked(raw_ab));
}
TEST(ArrayBuffer_ScavengeAndMC) {
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
JSArrayBuffer* raw_ab = nullptr;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
CHECK(IsTrackedInNewSpace(*buf));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedInNewSpace(*buf));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedInOldSpace(*buf));
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(IsTrackedInOldSpace(*buf));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedInOldSpace(*buf));
raw_ab = *buf;
// Prohibit page from being released.
Page::FromAddress(buf->address())->MarkNeverEvacuate();
}
// 2 GCs are needed because we promote to old space as live, meaning that
// we will survive one GC.
heap::GcAndSweep(heap, OLD_SPACE);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(!IsTracked(raw_ab));
}
TEST(ArrayBuffer_Compaction) {
FLAG_manual_evacuation_candidates_selection = true;
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
heap::AbandonCurrentlyFreeMemory(heap->old_space());
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab1 = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
CHECK(IsTrackedInNewSpace(*buf1));
heap::GcAndSweep(heap, NEW_SPACE);
heap::GcAndSweep(heap, NEW_SPACE);
Page* page_before_gc = Page::FromAddress(buf1->address());
page_before_gc->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
CHECK(IsTrackedInOldSpace(*buf1));
heap->CollectAllGarbage();
Page* page_after_gc = Page::FromAddress(buf1->address());
CHECK(IsTrackedInOldSpace(*buf1));
CHECK_NE(page_before_gc, page_after_gc);
}
TEST(ArrayBuffer_UnregisterDuringSweep) {
// Regular pages in old space (without compaction) are processed concurrently
// in the sweeper. If we happen to unregister a buffer (either explicitly, or
// implicitly through e.g. |Externalize|) we need to sync with the sweeper
// task.
//
// Note: This test will will only fail on TSAN configurations.
// Disable verify-heap since it forces sweeping to be completed in the
// epilogue of the GC.
#ifdef VERIFY_HEAP
i::FLAG_verify_heap = false;
#endif // VERIFY_HEAP
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
{
v8::HandleScope handle_scope(isolate);
// Allocate another buffer on the same page to force processing a
// non-empty set of buffers in the last GC.
Local<v8::ArrayBuffer> ab2 = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf2 = v8::Utils::OpenHandle(*ab2);
CHECK(IsTrackedInNewSpace(*buf));
CHECK(IsTrackedInNewSpace(*buf));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedInNewSpace(*buf));
CHECK(IsTrackedInNewSpace(*buf));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedInOldSpace(*buf));
CHECK(IsTrackedInOldSpace(*buf2));
}
heap->CollectGarbage(OLD_SPACE);
// |Externalize| will cause the buffer to be |Unregister|ed. Without
// barriers and proper synchronization this will trigger a data race on
// TSAN.
v8::ArrayBuffer::Contents contents = ab->Externalize();
heap->isolate()->array_buffer_allocator()->Free(contents.Data(),
contents.ByteLength());
}
}
TEST(ArrayBuffer_NonLivePromotion) {
// The test verifies that the marking state is preserved when promoting
// a buffer to old space.
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
JSArrayBuffer* raw_ab = nullptr;
{
v8::HandleScope handle_scope(isolate);
Handle<FixedArray> root =
heap->isolate()->factory()->NewFixedArray(1, TENURED);
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
root->set(0, *buf); // Buffer that should not be promoted as live.
}
heap::SimulateIncrementalMarking(heap, false);
CHECK(IsTrackedInNewSpace(JSArrayBuffer::cast(root->get(0))));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedInNewSpace(JSArrayBuffer::cast(root->get(0))));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedInOldSpace(JSArrayBuffer::cast(root->get(0))));
raw_ab = JSArrayBuffer::cast(root->get(0));
root->set(0, heap->undefined_value());
heap::SimulateIncrementalMarking(heap, true);
// Prohibit page from being released.
Page::FromAddress(raw_ab->address())->MarkNeverEvacuate();
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(!IsTracked(raw_ab));
}
}
TEST(ArrayBuffer_LivePromotion) {
// The test verifies that the marking state is preserved when promoting
// a buffer to old space.
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
JSArrayBuffer* raw_ab = nullptr;
{
v8::HandleScope handle_scope(isolate);
Handle<FixedArray> root =
heap->isolate()->factory()->NewFixedArray(1, TENURED);
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
root->set(0, *buf); // Buffer that should be promoted as live.
}
heap::SimulateIncrementalMarking(heap, true);
CHECK(IsTrackedInNewSpace(JSArrayBuffer::cast(root->get(0))));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedInNewSpace(JSArrayBuffer::cast(root->get(0))));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedInOldSpace(JSArrayBuffer::cast(root->get(0))));
raw_ab = JSArrayBuffer::cast(root->get(0));
root->set(0, heap->undefined_value());
// Prohibit page from being released.
Page::FromAddress(raw_ab->address())->MarkNeverEvacuate();
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(IsTracked(raw_ab));
}
}
} // namespace internal
} // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment