Commit 11926e6e authored by Peter Marshall's avatar Peter Marshall Committed by Commit Bot

[gc] Attach page to LocalArrayBufferTracker rather than space

Each local ABT is logically for one page. We passed the space in to the
constructor, but this is wrong because the space a page is in can
change, e.g. for compaction pages.

Instead, pass the page to the local ABT constructor and always get the
space from this. To do this we need to push the AllocateLocalTracker()
helper and friends down to Page, rather than its superclass MemoryChunk.

Unfortunately, we need to keep ReleaseLocalTracker() on MemoryChunk even
though only Pages can have local trackers, because we can't do virtual
dispatch on MemoryChunk::ReleaseAllocatedMemory() which would allow us
to clean up the tracker memory nicely for pages only.

We also have to make sure we update external bytes accounting properly
when swapping spaces, as in SemiSpace::Swap().

Change-Id: Iff02e41dd12a6b04a57fcc32f9e2b4f049fcbc24
Reviewed-on: https://chromium-review.googlesource.com/1107635
Commit-Queue: Peter Marshall <petermarshall@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53930}
parent e90af2c7
......@@ -53,10 +53,12 @@ void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
heap->update_external_memory(-static_cast<intptr_t>(length));
}
Space* LocalArrayBufferTracker::space() { return page_->owner(); }
template <typename Callback>
void LocalArrayBufferTracker::Free(Callback should_free) {
size_t freed_memory = 0;
Isolate* isolate = space_->heap()->isolate();
Isolate* isolate = page_->heap()->isolate();
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
JSArrayBuffer* buffer = reinterpret_cast<JSArrayBuffer*>(it->first);
......@@ -74,10 +76,10 @@ void LocalArrayBufferTracker::Free(Callback should_free) {
}
if (freed_memory > 0) {
// Update the Space with any freed backing-store bytes.
space_->DecrementExternalBackingStoreBytes(freed_memory);
space()->DecrementExternalBackingStoreBytes(freed_memory);
// TODO(wez): Remove backing-store from external memory accounting.
space_->heap()->update_external_memory_concurrently_freed(
page_->heap()->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory));
}
}
......@@ -97,7 +99,7 @@ void ArrayBufferTracker::FreeDead(Page* page, MarkingState* marking_state) {
void LocalArrayBufferTracker::Add(JSArrayBuffer* buffer, size_t length) {
// Track the backing-store usage against the owning Space.
space_->IncrementExternalBackingStoreBytes(length);
space()->IncrementExternalBackingStoreBytes(length);
auto ret = array_buffers_.insert({buffer, {buffer->backing_store(), length}});
USE(ret);
......@@ -108,7 +110,7 @@ void LocalArrayBufferTracker::Add(JSArrayBuffer* buffer, size_t length) {
void LocalArrayBufferTracker::Remove(JSArrayBuffer* buffer, size_t length) {
// Remove the backing-store accounting from the owning Space.
space_->DecrementExternalBackingStoreBytes(length);
space()->DecrementExternalBackingStoreBytes(length);
TrackingData::iterator it = array_buffers_.find(buffer);
// Check that we indeed find a key to remove.
......
......@@ -62,10 +62,10 @@ void LocalArrayBufferTracker::Process(Callback callback) {
}
if (moved_memory || freed_memory) {
// Update the Space with any moved or freed backing-store bytes.
space_->DecrementExternalBackingStoreBytes(freed_memory + moved_memory);
space()->DecrementExternalBackingStoreBytes(freed_memory + moved_memory);
// TODO(wez): Remove backing-store from external memory accounting.
space_->heap()->update_external_memory_concurrently_freed(
page_->heap()->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory));
}
......@@ -73,7 +73,7 @@ void LocalArrayBufferTracker::Process(Callback callback) {
// Pass the backing stores that need to be freed to the main thread for later
// distribution.
space_->heap()->array_buffer_collector()->AddGarbageAllocations(
page_->heap()->array_buffer_collector()->AddGarbageAllocations(
std::move(backing_stores_to_free));
}
......
......@@ -67,7 +67,7 @@ class LocalArrayBufferTracker {
enum CallbackResult { kKeepEntry, kUpdateEntry, kRemoveEntry };
enum FreeMode { kFreeDead, kFreeAll };
explicit LocalArrayBufferTracker(Space* space) : space_(space) {}
explicit LocalArrayBufferTracker(Page* page) : page_(page) {}
~LocalArrayBufferTracker();
inline void Add(JSArrayBuffer* buffer, size_t length);
......@@ -118,7 +118,9 @@ class LocalArrayBufferTracker {
typedef std::unordered_map<JSArrayBuffer*, BackingStoreAndLength, Hasher>
TrackingData;
Space* space_;
inline Space* space();
Page* page_;
// The set contains raw heap pointers which are removed by the GC upon
// processing the tracker through its owning page.
TrackingData array_buffers_;
......
......@@ -909,6 +909,15 @@ MemoryChunk* MemoryAllocator::AllocateChunk(size_t reserve_area_size,
void Page::ResetAllocatedBytes() { allocated_bytes_ = area_size(); }
void Page::AllocateLocalTracker() {
DCHECK_NULL(local_tracker_);
local_tracker_ = new LocalArrayBufferTracker(this);
}
bool Page::contains_array_buffers() {
return local_tracker_ != nullptr && !local_tracker_->IsEmpty();
}
void Page::ResetFreeListStatistics() {
wasted_memory_ = 0;
}
......@@ -1251,10 +1260,6 @@ bool MemoryAllocator::CommitExecutableMemory(VirtualMemory* vm, Address start,
// -----------------------------------------------------------------------------
// MemoryChunk implementation
bool MemoryChunk::contains_array_buffers() {
return local_tracker() != nullptr && !local_tracker()->IsEmpty();
}
void MemoryChunk::ReleaseAllocatedMemory() {
if (skip_list_ != nullptr) {
delete skip_list_;
......@@ -1372,11 +1377,6 @@ void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject* object,
}
}
void MemoryChunk::AllocateLocalTracker() {
DCHECK_NULL(local_tracker_);
local_tracker_ = new LocalArrayBufferTracker(owner());
}
void MemoryChunk::ReleaseLocalTracker() {
DCHECK_NOT_NULL(local_tracker_);
delete local_tracker_;
......@@ -1494,10 +1494,12 @@ void PagedSpace::RefillFreeList() {
void PagedSpace::MergeCompactionSpace(CompactionSpace* other) {
base::LockGuard<base::Mutex> guard(mutex());
IncrementExternalBackingStoreBytes(other->ExternalBackingStoreBytes());
other->DecrementExternalBackingStoreBytes(other->ExternalBackingStoreBytes());
DCHECK(identity() == other->identity());
// Unmerged fields:
// area_size_
other->FreeLinearAllocationArea();
// The linear allocation area of {other} should be destroyed now.
......@@ -2572,6 +2574,10 @@ void SemiSpace::Swap(SemiSpace* from, SemiSpace* to) {
std::swap(from->memory_chunk_list_, to->memory_chunk_list_);
std::swap(from->current_page_, to->current_page_);
size_t from_backing_bytes = from->ExternalBackingStoreBytes();
from->external_backing_store_bytes_ = to->ExternalBackingStoreBytes();
to->external_backing_store_bytes_ = from_backing_bytes;
to->FixPagesFlags(saved_to_space_flags, Page::kCopyOnFlipFlagsMask);
from->FixPagesFlags(0, 0);
}
......
......@@ -257,7 +257,7 @@ class MemoryChunk {
IS_EXECUTABLE = 1u << 0,
POINTERS_TO_HERE_ARE_INTERESTING = 1u << 1,
POINTERS_FROM_HERE_ARE_INTERESTING = 1u << 2,
// A page in new space has one of the next to flags set.
// A page in new space has one of the next two flags set.
IN_FROM_SPACE = 1u << 3,
IN_TO_SPACE = 1u << 4,
NEW_SPACE_BELOW_AGE_MARK = 1u << 5,
......@@ -499,10 +499,7 @@ class MemoryChunk {
void RegisterObjectWithInvalidatedSlots(HeapObject* object, int size);
InvalidatedSlots* invalidated_slots() { return invalidated_slots_; }
void AllocateLocalTracker();
void ReleaseLocalTracker();
inline LocalArrayBufferTracker* local_tracker() { return local_tracker_; }
bool contains_array_buffers();
void AllocateYoungGenerationBitmap();
void ReleaseYoungGenerationBitmap();
......@@ -796,6 +793,10 @@ class Page : public MemoryChunk {
DCHECK(SweepingDone());
}
void AllocateLocalTracker();
inline LocalArrayBufferTracker* local_tracker() { return local_tracker_; }
bool contains_array_buffers();
void ResetFreeListStatistics();
size_t AvailableInFreeList();
......@@ -883,12 +884,12 @@ class LargePage : public MemoryChunk {
class Space : public Malloced {
public:
Space(Heap* heap, AllocationSpace id)
: allocation_observers_paused_(false),
: external_backing_store_bytes_(0),
allocation_observers_paused_(false),
heap_(heap),
id_(id),
committed_(0),
max_committed_(0),
external_backing_store_bytes_(0) {}
max_committed_(0) {}
virtual ~Space() {}
......@@ -989,6 +990,9 @@ class Space : public Malloced {
// The List manages the pages that belong to the given space.
base::List<MemoryChunk> memory_chunk_list_;
// Tracks off-heap memory used by this space.
std::atomic<size_t> external_backing_store_bytes_;
private:
bool allocation_observers_paused_;
Heap* heap_;
......@@ -998,9 +1002,6 @@ class Space : public Malloced {
size_t committed_;
size_t max_committed_;
// Tracks off-heap memory used by this space.
std::atomic<size_t> external_backing_store_bytes_;
DISALLOW_COPY_AND_ASSIGN(Space);
};
......
......@@ -377,6 +377,46 @@ TEST(ArrayBuffer_ExternalBackingStoreSizeDecreases) {
CHECK_EQ(0, backing_store_after - backing_store_before);
}
TEST(ArrayBuffer_ExternalBackingStoreSizeIncreasesMarkCompact) {
if (FLAG_never_compact) return;
ManualGCScope manual_gc_scope;
FLAG_manual_evacuation_candidates_selection = true;
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
heap::AbandonCurrentlyFreeMemory(heap->old_space());
const size_t backing_store_before =
heap->old_space()->ExternalBackingStoreBytes();
const size_t kArraybufferSize = 117;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab1 =
v8::ArrayBuffer::New(isolate, kArraybufferSize);
Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
CHECK(IsTracked(*buf1));
heap::GcAndSweep(heap, NEW_SPACE);
heap::GcAndSweep(heap, NEW_SPACE);
Page* page_before_gc = Page::FromAddress(buf1->address());
heap::ForceEvacuationCandidate(page_before_gc);
CHECK(IsTracked(*buf1));
CcTest::CollectAllGarbage();
const size_t backing_store_after =
heap->old_space()->ExternalBackingStoreBytes();
CHECK_EQ(kArraybufferSize, backing_store_after - backing_store_before);
}
heap::GcAndSweep(heap, OLD_SPACE);
const size_t backing_store_after =
heap->old_space()->ExternalBackingStoreBytes();
CHECK_EQ(0, backing_store_after - backing_store_before);
}
} // namespace heap
} // namespace internal
} // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment