Commit 839f3fd4 authored by mlippautz's avatar mlippautz Committed by Commit bot

Track based on JSArrayBuffer addresses on pages instead of the attached

backing store.

Details of tracking:
- Scavenge: New space pages are processes in bulk on the main thread
- MC: Unswept pages are processed in bulk in parallel. All other pages
  are processed by the sweeper concurrently.

BUG=chromium:611688
LOG=N
TEST=cctest/test-array-buffer-tracker/*
CQ_EXTRA_TRYBOTS=tryserver.v8:v8_linux_arm64_gc_stress_dbg,v8_linux_gc_stress_dbg,v8_mac_gc_stress_dbg,v8_linux64_tsan_rel,v8_mac64_asan_rel

Review-Url: https://codereview.chromium.org/2036643002
Cr-Commit-Position: refs/heads/master@{#36798}
parent ba3703db
......@@ -7443,7 +7443,7 @@ class Internals {
kAmountOfExternalAllocatedMemoryOffset + kApiInt64Size;
static const int kIsolateRootsOffset =
kAmountOfExternalAllocatedMemoryAtLastGlobalGCOffset + kApiInt64Size +
kApiPointerSize;
kApiPointerSize + kApiPointerSize;
static const int kUndefinedValueRootIndex = 4;
static const int kTheHoleValueRootIndex = 5;
static const int kNullValueRootIndex = 6;
......
This diff is collapsed.
......@@ -7,69 +7,97 @@
#include <map>
#include "src/allocation.h"
#include "src/base/platform/mutex.h"
#include "src/globals.h"
namespace v8 {
namespace internal {
// Forward declarations.
class Heap;
class JSArrayBuffer;
class Page;
class ArrayBufferTracker {
class ArrayBufferTracker : public AllStatic {
public:
explicit ArrayBufferTracker(Heap* heap) : heap_(heap) {}
~ArrayBufferTracker();
inline Heap* heap() { return heap_; }
enum ProcessingMode {
kUpdateForwardedRemoveOthers,
kUpdateForwardedKeepOthers,
};
// The following methods are used to track raw C++ pointers to externally
// allocated memory used as backing store in live array buffers.
// A new ArrayBuffer was created with |data| as backing store.
void RegisterNew(JSArrayBuffer* buffer);
// Register/unregister a new JSArrayBuffer |buffer| for tracking. Guards all
// access to the tracker by taking the page lock for the corresponding page.
static void RegisterNew(Heap* heap, JSArrayBuffer* buffer);
static void Unregister(Heap* heap, JSArrayBuffer* buffer);
// The backing store |data| is no longer owned by V8.
void Unregister(JSArrayBuffer* buffer);
// Frees all backing store pointers for dead JSArrayBuffers in new space.
// Does not take any locks and can only be called during Scavenge.
static void FreeDeadInNewSpace(Heap* heap);
// A live ArrayBuffer was discovered during marking/scavenge.
void MarkLive(JSArrayBuffer* buffer);
// Frees all backing store pointers for dead JSArrayBuffer on a given page.
// Requires marking information to be present. Requires the page lock to be
// taken by the caller.
static void FreeDead(Page* page);
// Frees all backing store pointers that weren't discovered in the previous
// marking or scavenge phase.
void FreeDead(bool from_scavenge);
// Frees all remaining, live or dead, array buffers on a page. Only useful
// during tear down.
static void FreeAll(Page* page);
// Prepare for a new scavenge phase. A new marking phase is implicitly
// prepared by finishing the previous one.
void PrepareDiscoveryInNewSpace();
// Processes all array buffers on a given page. |mode| specifies the action
// to perform on the buffers. Returns whether the tracker is empty or not.
static bool ProcessBuffers(Page* page, ProcessingMode mode);
// An ArrayBuffer moved from new space to old space.
void Promote(JSArrayBuffer* buffer);
// Returns whether a buffer is currently tracked.
static bool IsTracked(JSArrayBuffer* buffer);
};
private:
base::Mutex mutex_;
Heap* heap_;
// LocalArrayBufferTracker tracks internalized array buffers.
//
// Never use directly but instead always call through |ArrayBufferTracker|.
class LocalArrayBufferTracker {
public:
typedef std::pair<void*, size_t> Value;
typedef JSArrayBuffer* Key;
// |live_array_buffers_| maps externally allocated memory used as backing
// store for ArrayBuffers to the length of the respective memory blocks.
//
// At the beginning of mark/compact, |not_yet_discovered_array_buffers_| is
// a copy of |live_array_buffers_| and we remove pointers as we discover live
// ArrayBuffer objects during marking. At the end of mark/compact, the
// remaining memory blocks can be freed.
std::map<void*, size_t> live_array_buffers_;
std::map<void*, size_t> not_yet_discovered_array_buffers_;
// To be able to free memory held by ArrayBuffers during scavenge as well, we
// have a separate list of allocated memory held by ArrayBuffers in new space.
enum CallbackResult { kKeepEntry, kUpdateEntry, kRemoveEntry };
enum FreeMode { kFreeDead, kFreeAll };
explicit LocalArrayBufferTracker(Heap* heap) : heap_(heap) {}
~LocalArrayBufferTracker();
void Add(Key key, const Value& value);
Value Remove(Key key);
// Frees up array buffers determined by |free_mode|.
template <FreeMode free_mode>
void Free();
// Processes buffers one by one. The CallbackResult of the callback decides
// what action to take on the buffer.
//
// Since mark/compact also evacuates the new space, all pointers in the
// |live_array_buffers_for_scavenge_| list are also in the
// |live_array_buffers_| list.
std::map<void*, size_t> live_array_buffers_for_scavenge_;
std::map<void*, size_t> not_yet_discovered_array_buffers_for_scavenge_;
// Callback should be of type:
// CallbackResult fn(JSArrayBuffer* buffer, JSArrayBuffer** new_buffer);
template <typename Callback>
inline void Process(Callback callback);
bool IsEmpty() { return array_buffers_.empty(); }
bool IsTracked(Key key) {
return array_buffers_.find(key) != array_buffers_.end();
}
private:
// TODO(mlippautz): Switch to unordered_map once it is supported on all
// platforms.
typedef std::map<Key, Value> TrackingMap;
Heap* heap_;
TrackingMap array_buffers_;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_ARRAY_BUFFER_TRACKER_H_
......@@ -160,7 +160,6 @@ Heap::Heap()
gc_callbacks_depth_(0),
deserialization_complete_(false),
strong_roots_list_(NULL),
array_buffer_tracker_(NULL),
heap_iterator_depth_(0),
force_oom_(false) {
// Allow build-time customization of the max semispace size. Building
......@@ -1635,8 +1634,6 @@ void Heap::Scavenge() {
mark_compact_collector()->RegisterWrappersWithEmbedderHeapTracer();
}
array_buffer_tracker()->PrepareDiscoveryInNewSpace();
// Flip the semispaces. After flipping, to space is empty, from space has
// live objects.
new_space_.Flip();
......@@ -1761,7 +1758,7 @@ void Heap::Scavenge() {
// Set age mark.
new_space_.set_age_mark(new_space_.top());
array_buffer_tracker()->FreeDead(true);
ArrayBufferTracker::FreeDeadInNewSpace(this);
// Update how much has survived scavenge.
IncrementYoungSurvivorsCounter(static_cast<int>(
......@@ -2047,12 +2044,12 @@ HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) {
void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) {
return array_buffer_tracker()->RegisterNew(buffer);
ArrayBufferTracker::RegisterNew(this, buffer);
}
void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) {
return array_buffer_tracker()->Unregister(buffer);
ArrayBufferTracker::Unregister(this, buffer);
}
......@@ -5348,8 +5345,6 @@ bool Heap::SetUp() {
scavenge_job_ = new ScavengeJob();
array_buffer_tracker_ = new ArrayBufferTracker(this);
LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
LOG(isolate_, IntPtrTEvent("heap-available", Available()));
......@@ -5509,9 +5504,6 @@ void Heap::TearDown() {
delete scavenge_job_;
scavenge_job_ = nullptr;
delete array_buffer_tracker_;
array_buffer_tracker_ = nullptr;
isolate_->global_handles()->TearDown();
external_string_table_.TearDown();
......
......@@ -822,6 +822,16 @@ class Heap {
amount_of_external_allocated_memory_ += delta;
}
void update_amount_of_external_allocated_freed_memory(intptr_t freed) {
amount_of_external_allocated_memory_freed_.Increment(freed);
}
void account_amount_of_external_allocated_freed_memory() {
amount_of_external_allocated_memory_ -=
amount_of_external_allocated_memory_freed_.Value();
amount_of_external_allocated_memory_freed_.SetValue(0);
}
void DeoptMarkedAllocationSites();
bool DeoptMaybeTenuredAllocationSites() {
......@@ -1356,10 +1366,6 @@ class Heap {
void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
void UnregisterArrayBuffer(JSArrayBuffer* buffer);
inline ArrayBufferTracker* array_buffer_tracker() {
return array_buffer_tracker_;
}
// ===========================================================================
// Allocation site tracking. =================================================
// ===========================================================================
......@@ -1997,6 +2003,8 @@ class Heap {
// Caches the amount of external memory registered at the last global gc.
int64_t amount_of_external_allocated_memory_at_last_global_gc_;
base::AtomicNumber<intptr_t> amount_of_external_allocated_memory_freed_;
// This can be calculated directly from a pointer to the heap; however, it is
// more expedient to get at the isolate directly from within Heap methods.
Isolate* isolate_;
......@@ -2235,8 +2243,6 @@ class Heap {
StrongRootsList* strong_roots_list_;
ArrayBufferTracker* array_buffer_tracker_;
// The depth of HeapIterator nestings.
int heap_iterator_depth_;
......
......@@ -872,6 +872,7 @@ void MarkCompactCollector::Prepare() {
space = spaces.next()) {
space->PrepareForMarkCompact();
}
heap()->account_amount_of_external_allocated_freed_memory();
#ifdef VERIFY_HEAP
if (!was_marked_incrementally_ && FLAG_verify_heap) {
......@@ -1751,20 +1752,12 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
if (heap_->ShouldBePromoted<DEFAULT_PROMOTION>(object->address(), size) &&
TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object,
&target_object)) {
// If we end up needing more special cases, we should factor this out.
if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(target_object));
}
promoted_size_ += size;
return true;
}
HeapObject* target = nullptr;
AllocationSpace space = AllocateTargetObject(object, &target);
MigrateObject(HeapObject::cast(target), object, size, space);
if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
}
semispace_copied_size_ += size;
return true;
}
......@@ -1889,10 +1882,6 @@ class MarkCompactCollector::EvacuateNewSpacePageVisitor final
}
inline bool Visit(HeapObject* object) {
if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
object->GetHeap()->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(object));
}
RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
object->IterateBodyFast(&visitor);
promoted_size_ += object->Size();
......@@ -3167,11 +3156,14 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
switch (ComputeEvacuationMode(page)) {
case kObjectsNewToOld:
result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_);
ArrayBufferTracker::ProcessBuffers(
page, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
DCHECK(result);
USE(result);
break;
case kPageNewToOld:
result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor);
// ArrayBufferTracker will be updated during sweeping.
DCHECK(result);
USE(result);
break;
......@@ -3185,12 +3177,17 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
page->SetFlag(Page::COMPACTION_WAS_ABORTED);
EvacuateRecordOnlyVisitor record_visitor(collector_->heap());
result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor);
ArrayBufferTracker::ProcessBuffers(
page, ArrayBufferTracker::kUpdateForwardedKeepOthers);
DCHECK(result);
USE(result);
// We need to return failure here to indicate that we want this page
// added to the sweeper.
return false;
}
ArrayBufferTracker::ProcessBuffers(
page, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
break;
default:
UNREACHABLE();
......@@ -3370,6 +3367,10 @@ int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p,
DCHECK((p->skip_list() == NULL) || (skip_list_mode == REBUILD_SKIP_LIST));
DCHECK(parallelism == SWEEP_ON_MAIN_THREAD || sweeping_mode == SWEEP_ONLY);
// Before we sweep objects on the page, we free dead array buffers which
// requires valid mark bits.
ArrayBufferTracker::FreeDead(p);
Address free_start = p->area_start();
DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0);
......@@ -3569,11 +3570,6 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
}
}
// EvacuateNewSpaceAndCandidates iterates over new space objects and for
// ArrayBuffers either re-registers them as live or promotes them. This is
// needed to properly free them.
heap()->array_buffer_tracker()->FreeDead(false);
// Deallocate evacuated candidate pages.
ReleaseEvacuationCandidates();
}
......@@ -3909,6 +3905,7 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
PrintIsolate(isolate(), "sweeping: released page: %p",
static_cast<void*>(p));
}
ArrayBufferTracker::FreeAll(p);
space->ReleasePage(p);
continue;
}
......
......@@ -77,7 +77,10 @@ void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
&FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode,
int>::Visit);
table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
table_.Register(
kVisitJSArrayBuffer,
&FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
int>::Visit);
table_.Register(kVisitFreeSpace, &VisitFreeSpace);
......@@ -99,21 +102,6 @@ void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
kVisitStructGeneric>();
}
template <typename StaticVisitor>
int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
Map* map, HeapObject* object) {
typedef FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor, int>
JSArrayBufferBodyVisitor;
if (!JSArrayBuffer::cast(object)->is_external()) {
Heap* heap = map->GetHeap();
heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
}
return JSArrayBufferBodyVisitor::Visit(map, object);
}
template <typename StaticVisitor>
int StaticNewSpaceVisitor<StaticVisitor>::VisitBytecodeArray(
Map* map, HeapObject* object) {
......@@ -185,7 +173,10 @@ void StaticMarkingVisitor<StaticVisitor>::Initialize() {
table_.Register(kVisitJSFunction, &VisitJSFunction);
table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
table_.Register(
kVisitJSArrayBuffer,
&FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
void>::Visit);
// Registration for kVisitJSRegExp is done by StaticVisitor.
......@@ -520,24 +511,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
JSObjectVisitor::Visit(map, object);
}
template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
typedef FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
void> JSArrayBufferBodyVisitor;
JSArrayBufferBodyVisitor::Visit(map, object);
if (!JSArrayBuffer::cast(object)->is_external() &&
!heap->InNewSpace(object)) {
heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
}
}
template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray(
Map* map, HeapObject* object) {
......
......@@ -300,7 +300,6 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
return FreeSpace::cast(object)->size();
}
INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
INLINE(static int VisitBytecodeArray(Map* map, HeapObject* object));
class DataObjectVisitor {
......@@ -379,7 +378,6 @@ class StaticMarkingVisitor : public StaticVisitorBase {
INLINE(static void VisitWeakCollection(Map* map, HeapObject* object));
INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object));
INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
INLINE(static void VisitBytecodeArray(Map* map, HeapObject* object));
......
......@@ -36,7 +36,8 @@ class ScavengingVisitor : public StaticVisitorBase {
table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer);
table_.Register(kVisitJSArrayBuffer,
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
table_.Register(
kVisitNativeContext,
......@@ -281,19 +282,6 @@ class ScavengingVisitor : public StaticVisitorBase {
object_size);
}
static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
HeapObject* object) {
ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
Heap* heap = map->GetHeap();
MapWord map_word = object->map_word();
DCHECK(map_word.IsForwardingAddress());
HeapObject* target = map_word.ToForwardingAddress();
if (!heap->InNewSpace(target)) {
heap->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target));
}
}
static inline void EvacuateByteArray(Map* map, HeapObject** slot,
HeapObject* object) {
int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
......
......@@ -270,7 +270,6 @@ template <Page::InitializationMode mode>
Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable,
PagedSpace* owner) {
Page* page = reinterpret_cast<Page*>(chunk);
page->mutex_ = new base::Mutex();
DCHECK(page->area_size() <= kAllocatableMemory);
DCHECK(chunk->owner() == owner);
......
......@@ -8,6 +8,7 @@
#include "src/base/platform/platform.h"
#include "src/base/platform/semaphore.h"
#include "src/full-codegen/full-codegen.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/slot-set.h"
#include "src/macro-assembler.h"
#include "src/msan.h"
......@@ -511,13 +512,14 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
chunk->progress_bar_ = 0;
chunk->high_water_mark_.SetValue(static_cast<intptr_t>(area_start - base));
chunk->concurrent_sweeping_state().SetValue(kSweepingDone);
chunk->mutex_ = nullptr;
chunk->mutex_ = new base::Mutex();
chunk->available_in_free_list_ = 0;
chunk->wasted_memory_ = 0;
chunk->ResetLiveBytes();
Bitmap::Clear(chunk);
chunk->set_next_chunk(nullptr);
chunk->set_prev_chunk(nullptr);
chunk->local_tracker_ = nullptr;
DCHECK(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset);
DCHECK(OFFSET_OF(MemoryChunk, live_byte_count_) == kLiveBytesOffset);
......@@ -1010,6 +1012,7 @@ void MemoryChunk::ReleaseAllocatedMemory() {
if (old_to_old_slots_ != nullptr) ReleaseOldToOldSlots();
if (typed_old_to_new_slots_ != nullptr) ReleaseTypedOldToNewSlots();
if (typed_old_to_old_slots_ != nullptr) ReleaseTypedOldToOldSlots();
if (local_tracker_ != nullptr) ReleaseLocalTracker();
}
static SlotSet* AllocateSlotSet(size_t size, Address page_start) {
......@@ -1061,6 +1064,18 @@ void MemoryChunk::ReleaseTypedOldToOldSlots() {
delete typed_old_to_old_slots_;
typed_old_to_old_slots_ = nullptr;
}
void MemoryChunk::AllocateLocalTracker() {
DCHECK_NULL(local_tracker_);
local_tracker_ = new LocalArrayBufferTracker(heap());
}
void MemoryChunk::ReleaseLocalTracker() {
DCHECK_NOT_NULL(local_tracker_);
delete local_tracker_;
local_tracker_ = nullptr;
}
// -----------------------------------------------------------------------------
// PagedSpace implementation
......@@ -1101,7 +1116,9 @@ bool PagedSpace::HasBeenSetUp() { return true; }
void PagedSpace::TearDown() {
PageIterator iterator(this);
while (iterator.has_next()) {
heap()->memory_allocator()->Free<MemoryAllocator::kFull>(iterator.next());
Page* page = iterator.next();
ArrayBufferTracker::FreeAll(page);
heap()->memory_allocator()->Free<MemoryAllocator::kFull>(page);
}
anchor_.set_next_page(&anchor_);
anchor_.set_prev_page(&anchor_);
......@@ -1719,7 +1736,14 @@ void SemiSpace::SetUp(int initial_capacity, int maximum_capacity) {
void SemiSpace::TearDown() {
// Properly uncommit memory to keep the allocator counters in sync.
if (is_committed()) Uncommit();
if (is_committed()) {
NewSpacePageIterator it(this);
while (it.has_next()) {
Page* page = it.next();
ArrayBufferTracker::FreeAll(page);
}
Uncommit();
}
current_capacity_ = maximum_capacity_ = 0;
}
......
......@@ -27,6 +27,7 @@ class CompactionSpace;
class CompactionSpaceCollection;
class FreeList;
class Isolate;
class LocalArrayBufferTracker;
class MemoryAllocator;
class MemoryChunk;
class Page;
......@@ -528,7 +529,8 @@ class MemoryChunk {
+ kPointerSize // AtomicValue next_chunk_
+ kPointerSize // AtomicValue prev_chunk_
// FreeListCategory categories_[kNumberOfCategories]
+ FreeListCategory::kSize * kNumberOfCategories;
+ FreeListCategory::kSize * kNumberOfCategories +
kPointerSize; // LocalArrayBufferTracker* local_tracker_;
// We add some more space to the computed header size to amount for missing
// alignment requirements in our computation.
......@@ -638,6 +640,7 @@ class MemoryChunk {
inline TypedSlotSet* typed_old_to_old_slots() {
return typed_old_to_old_slots_;
}
inline LocalArrayBufferTracker* local_tracker() { return local_tracker_; }
void AllocateOldToNewSlots();
void ReleaseOldToNewSlots();
......@@ -647,6 +650,8 @@ class MemoryChunk {
void ReleaseTypedOldToNewSlots();
void AllocateTypedOldToOldSlots();
void ReleaseTypedOldToOldSlots();
void AllocateLocalTracker();
void ReleaseLocalTracker();
Address area_start() { return area_start_; }
Address area_end() { return area_end_; }
......@@ -833,6 +838,8 @@ class MemoryChunk {
FreeListCategory categories_[kNumberOfCategories];
LocalArrayBufferTracker* local_tracker_;
private:
void InitializeReservedMemory() { reservation_.Reset(); }
......
......@@ -62,6 +62,7 @@ executable("cctest") {
"heap/heap-utils.cc",
"heap/heap-utils.h",
"heap/test-alloc.cc",
"heap/test-array-buffer-tracker.cc",
"heap/test-compaction.cc",
"heap/test-heap.cc",
"heap/test-incremental-marking.cc",
......
......@@ -103,6 +103,7 @@
'heap/heap-utils.cc',
'heap/heap-utils.h',
'heap/test-alloc.cc',
'heap/test-array-buffer-tracker.cc',
'heap/test-compaction.cc',
'heap/test-heap.cc',
'heap/test-incremental-marking.cc',
......
......@@ -141,6 +141,21 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) {
space->ClearStats();
}
void AbandonCurrentlyFreeMemory(PagedSpace* space) {
space->EmptyAllocationInfo();
PageIterator pit(space);
while (pit.has_next()) {
pit.next()->MarkNeverAllocateForTesting();
}
}
void GcAndSweep(Heap* heap, AllocationSpace space) {
heap->CollectGarbage(space);
if (heap->mark_compact_collector()->sweeping_in_progress()) {
heap->mark_compact_collector()->EnsureSweepingCompleted();
}
}
} // namespace heap
} // namespace internal
} // namespace v8
......@@ -40,6 +40,10 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion = true);
// Helper function that simulates a full old-space in the heap.
void SimulateFullSpace(v8::internal::PagedSpace* space);
void AbandonCurrentlyFreeMemory(PagedSpace* space);
void GcAndSweep(Heap* heap, AllocationSpace space);
} // namespace heap
} // namespace internal
} // namespace v8
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment