Commit 3c0fb324 authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

[heap] Remove ArrayBufferTracker

ArrayBufferTracker was superseded by ArrayBufferList and
ArrayBufferSweeper. Now that ArrayBufferSweeper is used in production,
we can remove the unused ArrayBufferTracker mechanism.

Bug: v8:10064
Change-Id: I479169c76b6c5c634672024f77e689bb64a36504
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2339105Reviewed-by: 's avatarSimon Zünd <szuend@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69351}
parent 01a96a90
......@@ -130,9 +130,6 @@ declare_args() {
# Sets -dV8_CONCURRENT_MARKING
v8_enable_concurrent_marking = true
# Sets -dV8_ARRAY_BUFFER_EXTENSION
v8_enable_array_buffer_extension = true
# Runs mksnapshot with --turbo-profiling. After building in this
# configuration, any subsequent run of d8 will output information about usage
# of basic blocks in builtins.
......@@ -593,9 +590,6 @@ config("features") {
if (v8_enable_concurrent_marking) {
defines += [ "V8_CONCURRENT_MARKING" ]
}
if (v8_enable_array_buffer_extension) {
defines += [ "V8_ARRAY_BUFFER_EXTENSION" ]
}
if (v8_enable_lazy_source_positions) {
defines += [ "V8_ENABLE_LAZY_SOURCE_POSITIONS" ]
}
......@@ -2510,13 +2504,8 @@ v8_source_set("v8_base_without_compiler") {
"src/heap/allocation-observer.cc",
"src/heap/allocation-observer.h",
"src/heap/allocation-stats.h",
"src/heap/array-buffer-collector.cc",
"src/heap/array-buffer-collector.h",
"src/heap/array-buffer-sweeper.cc",
"src/heap/array-buffer-sweeper.h",
"src/heap/array-buffer-tracker-inl.h",
"src/heap/array-buffer-tracker.cc",
"src/heap/array-buffer-tracker.h",
"src/heap/barrier.h",
"src/heap/base-space.cc",
"src/heap/base-space.h",
......
......@@ -68,10 +68,8 @@ TNode<JSArrayBuffer> TypedArrayBuiltinsAssembler::AllocateEmptyOnHeapBuffer(
StoreJSArrayBufferBackingStore(
buffer,
EncodeExternalPointer(ReinterpretCast<RawPtrT>(IntPtrConstant(0))));
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kExtensionOffset,
IntPtrConstant(0));
}
for (int offset = JSArrayBuffer::kHeaderSize;
offset < JSArrayBuffer::kSizeWithEmbedderFields; offset += kTaggedSize) {
StoreObjectFieldNoWriteBarrier(buffer, offset, SmiConstant(0));
......
......@@ -387,8 +387,8 @@ DEFINE_BOOL(assert_types, false,
DEFINE_BOOL(allocation_site_pretenuring, true,
"pretenure with allocation sites")
DEFINE_BOOL(page_promotion, true, "promote pages based on utilization")
DEFINE_BOOL(always_promote_young_mc, true,
"always promote young objects during mark-compact")
DEFINE_BOOL_READONLY(always_promote_young_mc, true,
"always promote young objects during mark-compact")
DEFINE_INT(page_promotion_threshold, 70,
"min percentage of live bytes on a page to enable fast evacuation")
DEFINE_BOOL(trace_pretenuring, false,
......@@ -945,20 +945,11 @@ DEFINE_BOOL(write_protect_code_memory, true, "write protect code memory")
#endif
DEFINE_BOOL(concurrent_marking, V8_CONCURRENT_MARKING_BOOL,
"use concurrent marking")
#ifdef V8_ARRAY_BUFFER_EXTENSION
#define V8_ARRAY_BUFFER_EXTENSION_BOOL true
#else
#define V8_ARRAY_BUFFER_EXTENSION_BOOL false
#endif
DEFINE_BOOL_READONLY(array_buffer_extension, V8_ARRAY_BUFFER_EXTENSION_BOOL,
"enable array buffer tracking using extension objects")
DEFINE_IMPLICATION(array_buffer_extension, always_promote_young_mc)
DEFINE_BOOL(concurrent_array_buffer_sweeping, true,
"concurrently sweep array buffers")
DEFINE_BOOL(concurrent_allocation, false, "concurrently allocate in old space")
DEFINE_BOOL(local_heaps, false, "allow heap access from background tasks")
DEFINE_IMPLICATION(concurrent_inlining, local_heaps)
DEFINE_NEG_NEG_IMPLICATION(array_buffer_extension, local_heaps)
DEFINE_BOOL(stress_concurrent_allocation, false,
"start background threads that allocate memory")
DEFINE_IMPLICATION(stress_concurrent_allocation, concurrent_allocation)
......@@ -1814,7 +1805,6 @@ DEFINE_NEG_IMPLICATION(single_threaded_gc, concurrent_store_buffer)
#ifdef ENABLE_MINOR_MC
DEFINE_NEG_IMPLICATION(single_threaded_gc, minor_mc_parallel_marking)
#endif // ENABLE_MINOR_MC
DEFINE_NEG_IMPLICATION(single_threaded_gc, concurrent_array_buffer_freeing)
DEFINE_NEG_IMPLICATION(single_threaded_gc, concurrent_array_buffer_sweeping)
#undef FLAG
......
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/array-buffer-collector.h"
#include "src/base/template-utils.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/heap-inl.h"
#include "src/tasks/cancelable-task.h"
#include "src/tasks/task-utils.h"
namespace v8 {
namespace internal {
void ArrayBufferCollector::QueueOrFreeGarbageAllocations(
std::vector<std::shared_ptr<BackingStore>> backing_stores) {
if (heap_->ShouldReduceMemory()) {
// Destruct the vector, which destructs the std::shared_ptrs, freeing
// the backing stores.
backing_stores.clear();
} else {
base::MutexGuard guard(&allocations_mutex_);
allocations_.push_back(std::move(backing_stores));
}
}
void ArrayBufferCollector::PerformFreeAllocations() {
base::MutexGuard guard(&allocations_mutex_);
// Destruct the vector, which destructs the vecotr of std::shared_ptrs,
// freeing the backing stores if their refcount drops to zero.
allocations_.clear();
}
void ArrayBufferCollector::FreeAllocations() {
// TODO(wez): Remove backing-store from external memory accounting.
heap_->account_external_memory_concurrently_freed();
if (!heap_->IsTearingDown() && !heap_->ShouldReduceMemory() &&
FLAG_concurrent_array_buffer_freeing) {
V8::GetCurrentPlatform()->CallOnWorkerThread(
MakeCancelableTask(heap_->isolate(), [this] {
TRACE_BACKGROUND_GC(
heap_->tracer(),
GCTracer::BackgroundScope::BACKGROUND_ARRAY_BUFFER_FREE);
PerformFreeAllocations();
}));
} else {
// Fallback for when concurrency is disabled/restricted. This is e.g. the
// case when the GC should reduce memory. For such GCs the
// QueueOrFreeGarbageAllocations() call would immediately free the
// allocations and this call would free already queued ones.
PerformFreeAllocations();
}
}
} // namespace internal
} // namespace v8
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_ARRAY_BUFFER_COLLECTOR_H_
#define V8_HEAP_ARRAY_BUFFER_COLLECTOR_H_
#include <vector>
#include "src/base/platform/mutex.h"
#include "src/objects/js-array-buffer.h"
namespace v8 {
namespace internal {
class Heap;
// To support background processing of array buffer backing stores, we process
// array buffers using the ArrayBufferTracker class. The ArrayBufferCollector
// keeps track of garbage backing stores so that they can be freed on a
// background thread.
class ArrayBufferCollector {
public:
explicit ArrayBufferCollector(Heap* heap) : heap_(heap) {}
~ArrayBufferCollector() { PerformFreeAllocations(); }
// These allocations will be either
// - freed immediately when under memory pressure, or
// - queued for freeing in FreeAllocations() or during tear down.
//
// FreeAllocations() potentially triggers a background task for processing.
void QueueOrFreeGarbageAllocations(
std::vector<std::shared_ptr<BackingStore>> allocations);
// Calls FreeAllocations() on a background thread.
void FreeAllocations();
private:
class FreeingTask;
// Begin freeing the allocations added through QueueOrFreeGarbageAllocations.
// Also called by TearDown.
void PerformFreeAllocations();
Heap* const heap_;
base::Mutex allocations_mutex_;
std::vector<std::vector<std::shared_ptr<BackingStore>>> allocations_;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_ARRAY_BUFFER_COLLECTOR_H_
......@@ -67,7 +67,6 @@ size_t ArrayBufferList::BytesSlow() {
void ArrayBufferSweeper::EnsureFinished() {
if (!sweeping_in_progress_) return;
CHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
TryAbortResult abort_result =
heap_->isolate()->cancelable_task_manager()->TryAbort(job_.id);
......@@ -138,8 +137,6 @@ void ArrayBufferSweeper::RequestSweep(SweepingScope scope) {
if (young_.IsEmpty() && (old_.IsEmpty() || scope == SweepingScope::Young))
return;
CHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
if (!heap_->IsTearingDown() && !heap_->ShouldReduceMemory() &&
FLAG_concurrent_array_buffer_sweeping) {
Prepare(scope);
......@@ -208,7 +205,6 @@ void ArrayBufferSweeper::ReleaseAll(ArrayBufferList* list) {
void ArrayBufferSweeper::Append(JSArrayBuffer object,
ArrayBufferExtension* extension) {
CHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
size_t bytes = extension->accounting_length();
if (Heap::InYoungGeneration(object)) {
......
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_ARRAY_BUFFER_TRACKER_INL_H_
#define V8_HEAP_ARRAY_BUFFER_TRACKER_INL_H_
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/heap-inl.h"
#include "src/heap/spaces-inl.h"
#include "src/numbers/conversions-inl.h"
#include "src/objects/js-array-buffer-inl.h"
#include "src/objects/objects.h"
#define TRACE_BS(...) \
do { \
if (FLAG_trace_backing_store) PrintF(__VA_ARGS__); \
} while (false)
namespace v8 {
namespace internal {
void ArrayBufferTracker::RegisterNew(
Heap* heap, JSArrayBuffer buffer,
std::shared_ptr<BackingStore> backing_store) {
if (!backing_store) return;
// If {buffer_start} is {nullptr}, we don't have to track and free it.
if (!backing_store->buffer_start()) return;
// ArrayBuffer tracking works only for small objects.
DCHECK(!heap->IsLargeObject(buffer));
DCHECK_EQ(backing_store->buffer_start(), buffer.backing_store());
const size_t length = backing_store->PerIsolateAccountingLength();
Page* page = Page::FromHeapObject(buffer);
{
base::MutexGuard guard(page->mutex());
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) {
page->AllocateLocalTracker();
tracker = page->local_tracker();
}
DCHECK_NOT_NULL(tracker);
TRACE_BS("ABT:reg bs=%p mem=%p (length=%zu) cnt=%ld\n",
backing_store.get(), backing_store->buffer_start(),
backing_store->byte_length(), backing_store.use_count());
tracker->Add(buffer, std::move(backing_store));
}
// TODO(wez): Remove backing-store from external memory accounting.
// We may go over the limit of externally allocated memory here. We call the
// api function to trigger a GC in this case.
reinterpret_cast<v8::Isolate*>(heap->isolate())
->AdjustAmountOfExternalAllocatedMemory(length);
}
std::shared_ptr<BackingStore> ArrayBufferTracker::Unregister(
Heap* heap, JSArrayBuffer buffer) {
std::shared_ptr<BackingStore> backing_store;
Page* page = Page::FromHeapObject(buffer);
{
base::MutexGuard guard(page->mutex());
LocalArrayBufferTracker* tracker = page->local_tracker();
DCHECK_NOT_NULL(tracker);
backing_store = tracker->Remove(buffer);
}
// TODO(wez): Remove backing-store from external memory accounting.
const size_t length = backing_store->PerIsolateAccountingLength();
heap->update_external_memory(-static_cast<intptr_t>(length));
return backing_store;
}
std::shared_ptr<BackingStore> ArrayBufferTracker::Lookup(Heap* heap,
JSArrayBuffer buffer) {
if (buffer.backing_store() == nullptr) return {};
Page* page = Page::FromHeapObject(buffer);
base::MutexGuard guard(page->mutex());
LocalArrayBufferTracker* tracker = page->local_tracker();
DCHECK_NOT_NULL(tracker);
return tracker->Lookup(buffer);
}
template <typename Callback>
void LocalArrayBufferTracker::Free(Callback should_free) {
size_t freed_memory = 0;
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
// Unchecked cast because the map might already be dead at this point.
JSArrayBuffer buffer = JSArrayBuffer::unchecked_cast(it->first);
const size_t length = it->second->PerIsolateAccountingLength();
if (should_free(buffer)) {
// Destroy the shared pointer, (perhaps) freeing the backing store.
TRACE_BS("ABT:die bs=%p mem=%p (length=%zu) cnt=%ld\n",
it->second.get(), it->second->buffer_start(),
it->second->byte_length(), it->second.use_count());
it = array_buffers_.erase(it);
freed_memory += length;
} else {
++it;
}
}
if (freed_memory > 0) {
page_->DecrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, freed_memory);
// TODO(wez): Remove backing-store from external memory accounting.
page_->heap()->update_external_memory_concurrently_freed(freed_memory);
}
}
template <typename MarkingState>
void ArrayBufferTracker::FreeDead(Page* page, MarkingState* marking_state) {
// Callers need to ensure having the page lock.
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return;
tracker->Free([marking_state](JSArrayBuffer buffer) {
return marking_state->IsWhite(buffer);
});
if (tracker->IsEmpty()) {
page->ReleaseLocalTracker();
}
}
void LocalArrayBufferTracker::Add(JSArrayBuffer buffer,
std::shared_ptr<BackingStore> backing_store) {
auto length = backing_store->PerIsolateAccountingLength();
page_->IncrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, length);
AddInternal(buffer, std::move(backing_store));
}
void LocalArrayBufferTracker::AddInternal(
JSArrayBuffer buffer, std::shared_ptr<BackingStore> backing_store) {
auto ret = array_buffers_.insert({buffer, std::move(backing_store)});
USE(ret);
// Check that we indeed inserted a new value and did not overwrite an existing
// one (which would be a bug).
DCHECK(ret.second);
}
std::shared_ptr<BackingStore> LocalArrayBufferTracker::Remove(
JSArrayBuffer buffer) {
TrackingData::iterator it = array_buffers_.find(buffer);
// Check that we indeed find a key to remove.
DCHECK(it != array_buffers_.end());
// Steal the underlying shared pointer before erasing the entry.
std::shared_ptr<BackingStore> backing_store = std::move(it->second);
TRACE_BS("ABT:rm bs=%p mem=%p (length=%zu) cnt=%ld\n", backing_store.get(),
backing_store->buffer_start(), backing_store->byte_length(),
backing_store.use_count());
// Erase the entry.
array_buffers_.erase(it);
// Update accounting.
auto length = backing_store->PerIsolateAccountingLength();
page_->DecrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, length);
return backing_store;
}
std::shared_ptr<BackingStore> LocalArrayBufferTracker::Lookup(
JSArrayBuffer buffer) {
TrackingData::iterator it = array_buffers_.find(buffer);
if (it != array_buffers_.end()) {
return it->second;
}
return {};
}
#undef TRACE_BS
} // namespace internal
} // namespace v8
#endif // V8_HEAP_ARRAY_BUFFER_TRACKER_INL_H_
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/array-buffer-tracker.h"
#include <vector>
#include "src/heap/array-buffer-collector.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/heap.h"
#include "src/heap/memory-chunk-inl.h"
#include "src/heap/spaces.h"
#define TRACE_BS(...) \
do { \
if (FLAG_trace_backing_store) PrintF(__VA_ARGS__); \
} while (false)
namespace v8 {
namespace internal {
LocalArrayBufferTracker::~LocalArrayBufferTracker() {
CHECK(array_buffers_.empty());
}
template <typename Callback>
void LocalArrayBufferTracker::Process(Callback callback) {
std::vector<std::shared_ptr<BackingStore>> backing_stores_to_free;
TrackingData kept_array_buffers;
JSArrayBuffer new_buffer;
JSArrayBuffer old_buffer;
size_t freed_memory = 0;
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end(); ++it) {
old_buffer = it->first;
DCHECK_EQ(page_, Page::FromHeapObject(old_buffer));
const CallbackResult result = callback(old_buffer, &new_buffer);
if (result == kKeepEntry) {
kept_array_buffers.insert(std::move(*it));
} else if (result == kUpdateEntry) {
DCHECK_EQ(old_buffer.byte_length(), new_buffer.byte_length());
DCHECK(!new_buffer.is_null());
Page* target_page = Page::FromHeapObject(new_buffer);
{
base::MutexGuard guard(target_page->mutex());
LocalArrayBufferTracker* tracker = target_page->local_tracker();
if (tracker == nullptr) {
target_page->AllocateLocalTracker();
tracker = target_page->local_tracker();
}
DCHECK_NOT_NULL(tracker);
const size_t length = it->second->PerIsolateAccountingLength();
// We should decrement before adding to avoid potential overflows in
// the external memory counters.
tracker->AddInternal(new_buffer, std::move(it->second));
MemoryChunk::MoveExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer,
static_cast<MemoryChunk*>(page_),
static_cast<MemoryChunk*>(target_page), length);
}
} else if (result == kRemoveEntry) {
auto backing_store = std::move(it->second);
freed_memory += backing_store->PerIsolateAccountingLength();
TRACE_BS("ABT:queue bs=%p mem=%p (length=%zu) cnt=%ld\n",
backing_store.get(), backing_store->buffer_start(),
backing_store->byte_length(), backing_store.use_count());
if (!backing_store->is_shared()) {
// Only retain non-shared backing stores. For shared backing stores,
// drop the shared_ptr right away, since this should be cheap,
// as it only updates a refcount, except that last, which will
// destruct it, which is rare.
backing_stores_to_free.push_back(backing_store);
}
} else {
UNREACHABLE();
}
}
if (freed_memory) {
page_->DecrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, freed_memory);
// TODO(wez): Remove backing-store from external memory accounting.
page_->heap()->update_external_memory_concurrently_freed(freed_memory);
}
array_buffers_.swap(kept_array_buffers);
// Pass the backing stores that need to be freed to the main thread for
// potential later distribution.
page_->heap()->array_buffer_collector()->QueueOrFreeGarbageAllocations(
std::move(backing_stores_to_free));
}
void ArrayBufferTracker::PrepareToFreeDeadInNewSpace(Heap* heap) {
DCHECK_EQ(heap->gc_state(), Heap::HeapState::SCAVENGE);
for (Page* page :
PageRange(heap->new_space()->from_space().first_page(), nullptr)) {
bool empty = ProcessBuffers(page, kUpdateForwardedRemoveOthers);
CHECK(empty);
}
}
void ArrayBufferTracker::FreeAll(Page* page) {
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return;
tracker->Free([](JSArrayBuffer buffer) { return true; });
if (tracker->IsEmpty()) {
page->ReleaseLocalTracker();
}
}
bool ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) {
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return true;
DCHECK(page->SweepingDone());
tracker->Process([mode](JSArrayBuffer old_buffer, JSArrayBuffer* new_buffer) {
MapWord map_word = old_buffer.map_word();
if (map_word.IsForwardingAddress()) {
*new_buffer = JSArrayBuffer::cast(map_word.ToForwardingAddress());
return LocalArrayBufferTracker::kUpdateEntry;
}
return mode == kUpdateForwardedKeepOthers
? LocalArrayBufferTracker::kKeepEntry
: LocalArrayBufferTracker::kRemoveEntry;
});
return tracker->IsEmpty();
}
bool ArrayBufferTracker::IsTracked(JSArrayBuffer buffer) {
Page* page = Page::FromHeapObject(buffer);
{
base::MutexGuard guard(page->mutex());
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return false;
return tracker->IsTracked(buffer);
}
}
void ArrayBufferTracker::TearDown(Heap* heap) {
// ArrayBuffers can only be found in NEW_SPACE and OLD_SPACE.
for (Page* p : *heap->old_space()) {
FreeAll(p);
}
NewSpace* new_space = heap->new_space();
if (new_space->to_space().is_committed()) {
for (Page* p : new_space->to_space()) {
FreeAll(p);
}
}
#ifdef DEBUG
if (new_space->from_space().is_committed()) {
for (Page* p : new_space->from_space()) {
DCHECK(!p->contains_array_buffers());
}
}
#endif // DEBUG
}
} // namespace internal
} // namespace v8
#undef TRACE_BS
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_ARRAY_BUFFER_TRACKER_H_
#define V8_HEAP_ARRAY_BUFFER_TRACKER_H_
#include <unordered_map>
#include "src/base/platform/mutex.h"
#include "src/common/globals.h"
#include "src/objects/backing-store.h"
#include "src/objects/js-array-buffer.h"
#include "src/utils/allocation.h"
namespace v8 {
namespace internal {
class MarkingState;
class Page;
class Space;
class ArrayBufferTracker : public AllStatic {
public:
enum ProcessingMode {
kUpdateForwardedRemoveOthers,
kUpdateForwardedKeepOthers,
};
// The following methods are used to track raw C++ pointers to externally
// allocated memory used as backing store in live array buffers.
// Register/unregister a new JSArrayBuffer |buffer| for tracking. Guards all
// access to the tracker by taking the page lock for the corresponding page.
inline static void RegisterNew(Heap* heap, JSArrayBuffer buffer,
std::shared_ptr<BackingStore>);
inline static std::shared_ptr<BackingStore> Unregister(Heap* heap,
JSArrayBuffer buffer);
inline static std::shared_ptr<BackingStore> Lookup(Heap* heap,
JSArrayBuffer buffer);
// Identifies all backing store pointers for dead JSArrayBuffers in new space.
// Does not take any locks and can only be called during Scavenge.
static void PrepareToFreeDeadInNewSpace(Heap* heap);
// Frees all backing store pointers for dead JSArrayBuffer on a given page.
// Requires marking information to be present. Requires the page lock to be
// taken by the caller.
template <typename MarkingState>
static void FreeDead(Page* page, MarkingState* marking_state);
// Frees all remaining, live or dead, array buffers on a page. Only useful
// during tear down.
static void FreeAll(Page* page);
// Processes all array buffers on a given page. |mode| specifies the action
// to perform on the buffers. Returns whether the tracker is empty or not.
static bool ProcessBuffers(Page* page, ProcessingMode mode);
// Returns whether a buffer is currently tracked.
V8_EXPORT_PRIVATE static bool IsTracked(JSArrayBuffer buffer);
// Tears down the tracker and frees up all registered array buffers.
static void TearDown(Heap* heap);
};
// LocalArrayBufferTracker tracks internalized array buffers.
//
// Never use directly but instead always call through |ArrayBufferTracker|.
class LocalArrayBufferTracker {
public:
enum CallbackResult { kKeepEntry, kUpdateEntry, kRemoveEntry };
enum FreeMode { kFreeDead, kFreeAll };
explicit LocalArrayBufferTracker(Page* page) : page_(page) {}
~LocalArrayBufferTracker();
inline void Add(JSArrayBuffer buffer,
std::shared_ptr<BackingStore> backing_store);
inline std::shared_ptr<BackingStore> Remove(JSArrayBuffer buffer);
inline std::shared_ptr<BackingStore> Lookup(JSArrayBuffer buffer);
// Frees up array buffers.
//
// Sample usage:
// Free([](HeapObject array_buffer) {
// if (should_free_internal(array_buffer)) return true;
// return false;
// });
template <typename Callback>
void Free(Callback should_free);
// Processes buffers one by one. The CallbackResult of the callback decides
// what action to take on the buffer.
//
// Callback should be of type:
// CallbackResult fn(JSArrayBuffer buffer, JSArrayBuffer* new_buffer);
template <typename Callback>
void Process(Callback callback);
bool IsEmpty() const { return array_buffers_.empty(); }
bool IsTracked(JSArrayBuffer buffer) const {
return array_buffers_.find(buffer) != array_buffers_.end();
}
private:
class Hasher {
public:
size_t operator()(JSArrayBuffer buffer) const {
return static_cast<size_t>(buffer.ptr() >> 3);
}
};
using TrackingData =
std::unordered_map<JSArrayBuffer, std::shared_ptr<BackingStore>, Hasher>;
// Internal version of add that does not update counters. Requires separate
// logic for updating external memory counters.
inline void AddInternal(JSArrayBuffer buffer,
std::shared_ptr<BackingStore> backing_store);
Page* page_;
// The set contains raw heap pointers which are removed by the GC upon
// processing the tracker through its owning page.
TrackingData array_buffers_;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_ARRAY_BUFFER_TRACKER_H_
......@@ -27,7 +27,6 @@ class Isolate;
class LargeObjectSpace;
class LargePage;
class LinearAllocationArea;
class LocalArrayBufferTracker;
class Page;
class PagedSpace;
class SemiSpace;
......
......@@ -28,9 +28,7 @@
#include "src/execution/v8threads.h"
#include "src/execution/vm-state-inl.h"
#include "src/handles/global-handles.h"
#include "src/heap/array-buffer-collector.h"
#include "src/heap/array-buffer-sweeper.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/barrier.h"
#include "src/heap/code-object-registry.h"
#include "src/heap/code-stats.h"
......@@ -2888,20 +2886,6 @@ void* Heap::AllocateExternalBackingStore(
return allocate(byte_length);
}
void Heap::RegisterBackingStore(JSArrayBuffer buffer,
std::shared_ptr<BackingStore> backing_store) {
ArrayBufferTracker::RegisterNew(this, buffer, std::move(backing_store));
}
std::shared_ptr<BackingStore> Heap::UnregisterBackingStore(
JSArrayBuffer buffer) {
return ArrayBufferTracker::Unregister(this, buffer);
}
std::shared_ptr<BackingStore> Heap::LookupBackingStore(JSArrayBuffer buffer) {
return ArrayBufferTracker::Lookup(this, buffer);
}
void Heap::ConfigureInitialOldGenerationSize() {
if (!old_generation_size_configured_ && tracer()->SurvivalEventsRecorded()) {
const size_t minimum_growing_step =
......@@ -3900,20 +3884,7 @@ void Heap::MemoryPressureNotification(MemoryPressureLevel level,
}
void Heap::EagerlyFreeExternalMemory() {
if (FLAG_array_buffer_extension) {
array_buffer_sweeper()->EnsureFinished();
} else {
CHECK(!FLAG_local_heaps);
for (Page* page : *old_space()) {
if (!page->SweepingDone()) {
base::MutexGuard guard(page->mutex());
if (!page->SweepingDone()) {
ArrayBufferTracker::FreeDead(
page, mark_compact_collector()->non_atomic_marking_state());
}
}
}
}
memory_allocator()->unmapper()->EnsureUnmappingCompleted();
}
......@@ -5334,7 +5305,6 @@ void Heap::SetUpSpaces() {
#else
minor_mark_compact_collector_ = nullptr;
#endif // ENABLE_MINOR_MC
array_buffer_collector_.reset(new ArrayBufferCollector(this));
array_buffer_sweeper_.reset(new ArrayBufferSweeper(this));
gc_idle_time_handler_.reset(new GCIdleTimeHandler());
memory_measurement_.reset(new MemoryMeasurement(isolate()));
......@@ -5578,7 +5548,6 @@ void Heap::TearDown() {
#endif // ENABLE_MINOR_MC
scavenger_collector_.reset();
array_buffer_collector_.reset();
array_buffer_sweeper_.reset();
incremental_marking_.reset();
concurrent_marking_.reset();
......@@ -5599,11 +5568,6 @@ void Heap::TearDown() {
external_string_table_.TearDown();
// Tear down all ArrayBuffers before tearing down the heap since their
// byte_length may be a HeapNumber which is required for freeing the backing
// store.
ArrayBufferTracker::TearDown(this);
tracer_.reset();
for (int i = FIRST_MUTABLE_SPACE; i <= LAST_MUTABLE_SPACE; i++) {
......@@ -6182,12 +6146,10 @@ void Heap::RememberUnmappedPage(Address page, bool compacted) {
}
size_t Heap::YoungArrayBufferBytes() {
DCHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
return array_buffer_sweeper()->YoungBytes();
}
size_t Heap::OldArrayBufferBytes() {
DCHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
return array_buffer_sweeper()->OldBytes();
}
......
......@@ -788,10 +788,6 @@ class Heap {
return minor_mark_compact_collector_;
}
ArrayBufferCollector* array_buffer_collector() {
return array_buffer_collector_.get();
}
ArrayBufferSweeper* array_buffer_sweeper() {
return array_buffer_sweeper_.get();
}
......@@ -1348,14 +1344,6 @@ class Heap {
V8_EXPORT_PRIVATE void* AllocateExternalBackingStore(
const std::function<void*(size_t)>& allocate, size_t byte_length);
// ===========================================================================
// ArrayBuffer tracking. =====================================================
// ===========================================================================
void RegisterBackingStore(JSArrayBuffer buffer,
std::shared_ptr<BackingStore> backing_store);
std::shared_ptr<BackingStore> UnregisterBackingStore(JSArrayBuffer buffer);
std::shared_ptr<BackingStore> LookupBackingStore(JSArrayBuffer buffer);
// ===========================================================================
// Allocation site tracking. =================================================
// ===========================================================================
......@@ -2140,7 +2128,6 @@ class Heap {
std::unique_ptr<MarkCompactCollector> mark_compact_collector_;
MinorMarkCompactCollector* minor_mark_compact_collector_ = nullptr;
std::unique_ptr<ScavengerCollector> scavenger_collector_;
std::unique_ptr<ArrayBufferCollector> array_buffer_collector_;
std::unique_ptr<ArrayBufferSweeper> array_buffer_sweeper_;
std::unique_ptr<MemoryAllocator> memory_allocator_;
......
......@@ -13,9 +13,7 @@
#include "src/execution/frames-inl.h"
#include "src/execution/vm-state-inl.h"
#include "src/handles/global-handles.h"
#include "src/heap/array-buffer-collector.h"
#include "src/heap/array-buffer-sweeper.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/code-object-registry.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/incremental-marking-inl.h"
......@@ -3050,7 +3048,6 @@ void FullEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) {
LiveObjectVisitor::VisitBlackObjectsNoFail(
chunk, marking_state, &new_space_visitor_,
LiveObjectVisitor::kClearMarkbits);
// ArrayBufferTracker will be updated during pointers updating.
break;
case kPageNewToOld:
LiveObjectVisitor::VisitBlackObjectsNoFail(
......@@ -3058,7 +3055,6 @@ void FullEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) {
LiveObjectVisitor::kKeepMarking);
new_to_old_page_visitor_.account_moved_bytes(
marking_state->live_bytes(chunk));
// ArrayBufferTracker will be updated during sweeping.
break;
case kPageNewToNew:
LiveObjectVisitor::VisitBlackObjectsNoFail(
......@@ -3066,7 +3062,6 @@ void FullEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) {
LiveObjectVisitor::kKeepMarking);
new_to_new_page_visitor_.account_moved_bytes(
marking_state->live_bytes(chunk));
// ArrayBufferTracker will be updated during sweeping.
break;
case kObjectsOldToOld: {
const bool success = LiveObjectVisitor::VisitBlackObjects(
......@@ -3076,8 +3071,6 @@ void FullEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) {
// Aborted compaction page. Actual processing happens on the main
// thread for simplicity reasons.
collector_->ReportAbortedEvacuationCandidate(failed_object, chunk);
} else {
// ArrayBufferTracker will be updated during pointers updating.
}
break;
}
......@@ -3188,7 +3181,7 @@ void MarkCompactCollector::EvacuatePagesInParallel() {
for (Page* page : new_space_evacuation_pages_) {
intptr_t live_bytes_on_page = non_atomic_marking_state()->live_bytes(page);
if (live_bytes_on_page == 0 && !page->contains_array_buffers()) continue;
if (live_bytes_on_page == 0) continue;
live_bytes += live_bytes_on_page;
if (ShouldMovePage(page, live_bytes_on_page,
FLAG_always_promote_young_mc)) {
......@@ -3715,40 +3708,6 @@ UpdatingItem* MarkCompactCollector::CreateRememberedSetUpdatingItem(
heap(), non_atomic_marking_state(), chunk, updating_mode);
}
// Update array buffers on a page that has been evacuated by copying objects.
// Target page exclusivity in old space is guaranteed by the fact that
// evacuation tasks either (a) retrieved a fresh page, or (b) retrieved all
// free list items of a given page. For new space the tracker will update
// using a lock.
class ArrayBufferTrackerUpdatingItem : public UpdatingItem {
public:
enum EvacuationState { kRegular, kAborted };
explicit ArrayBufferTrackerUpdatingItem(Page* page, EvacuationState state)
: page_(page), state_(state) {}
~ArrayBufferTrackerUpdatingItem() override = default;
void Process() override {
TRACE_EVENT1(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
"ArrayBufferTrackerUpdatingItem::Process", "EvacuationState",
state_);
switch (state_) {
case EvacuationState::kRegular:
ArrayBufferTracker::ProcessBuffers(
page_, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
break;
case EvacuationState::kAborted:
ArrayBufferTracker::ProcessBuffers(
page_, ArrayBufferTracker::kUpdateForwardedKeepOthers);
break;
}
}
private:
Page* const page_;
const EvacuationState state_;
};
int MarkCompactCollectorBase::CollectToSpaceUpdatingItems(
ItemParallelJob* job) {
// Seed to space pages.
......@@ -3799,45 +3758,6 @@ int MarkCompactCollectorBase::CollectRememberedSetUpdatingItems(
return pages;
}
int MarkCompactCollector::CollectNewSpaceArrayBufferTrackerItems(
ItemParallelJob* job) {
int pages = 0;
for (Page* p : new_space_evacuation_pages_) {
if (Evacuator::ComputeEvacuationMode(p) == Evacuator::kObjectsNewToOld) {
if (p->local_tracker() == nullptr) continue;
pages++;
job->AddItem(new ArrayBufferTrackerUpdatingItem(
p, ArrayBufferTrackerUpdatingItem::kRegular));
}
}
return pages;
}
int MarkCompactCollector::CollectOldSpaceArrayBufferTrackerItems(
ItemParallelJob* job) {
int pages = 0;
for (Page* p : old_space_evacuation_pages_) {
if (Evacuator::ComputeEvacuationMode(p) == Evacuator::kObjectsOldToOld &&
p->IsEvacuationCandidate()) {
if (p->local_tracker() == nullptr) continue;
pages++;
job->AddItem(new ArrayBufferTrackerUpdatingItem(
p, ArrayBufferTrackerUpdatingItem::kRegular));
}
}
for (auto object_and_page : aborted_evacuation_candidates_) {
Page* p = object_and_page.second;
if (p->local_tracker() == nullptr) continue;
pages++;
job->AddItem(new ArrayBufferTrackerUpdatingItem(
p, ArrayBufferTrackerUpdatingItem::kAborted));
}
return pages;
}
class EphemeronTableUpdatingItem : public UpdatingItem {
public:
enum EvacuationState { kRegular, kAborted };
......@@ -3947,8 +3867,6 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() {
&page_parallel_job_semaphore_);
int array_buffer_pages = 0;
array_buffer_pages += CollectNewSpaceArrayBufferTrackerItems(&updating_job);
array_buffer_pages += CollectOldSpaceArrayBufferTrackerItems(&updating_job);
int remembered_set_pages = 0;
remembered_set_pages += CollectRememberedSetUpdatingItems(
......@@ -3966,7 +3884,6 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() {
GCTracer::BackgroundScope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS));
}
updating_job.Run();
heap()->array_buffer_collector()->FreeAllocations();
}
}
......@@ -4082,7 +3999,6 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
PrintIsolate(isolate(), "sweeping: released page: %p",
static_cast<void*>(p));
}
ArrayBufferTracker::FreeAll(p);
space->memory_chunk_list().Remove(p);
space->ReleasePage(p);
continue;
......@@ -4449,7 +4365,6 @@ void MinorMarkCompactCollector::UpdatePointersAfterEvacuation() {
ItemParallelJob updating_job(isolate()->cancelable_task_manager(),
&page_parallel_job_semaphore_);
CollectNewSpaceArrayBufferTrackerItems(&updating_job);
// Create batches of global handles.
const int to_space_tasks = CollectToSpaceUpdatingItems(&updating_job);
int remembered_set_pages = 0;
......@@ -4491,7 +4406,6 @@ void MinorMarkCompactCollector::UpdatePointersAfterEvacuation() {
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS);
updating_job.Run();
heap()->array_buffer_collector()->FreeAllocations();
}
{
......@@ -5151,7 +5065,6 @@ void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk,
LiveObjectVisitor::VisitGreyObjectsNoFail(
chunk, marking_state, &new_space_visitor_,
LiveObjectVisitor::kClearMarkbits);
// ArrayBufferTracker will be updated during pointers updating.
break;
case kPageNewToOld:
LiveObjectVisitor::VisitGreyObjectsNoFail(
......@@ -5160,9 +5073,6 @@ void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk,
new_to_old_page_visitor_.account_moved_bytes(
marking_state->live_bytes(chunk));
if (!chunk->IsLargePage()) {
// TODO(mlippautz): If cleaning array buffers is too slow here we can
// delay it until the next GC.
ArrayBufferTracker::FreeDead(static_cast<Page*>(chunk), marking_state);
if (heap()->ShouldZapGarbage()) {
collector_->MakeIterable(static_cast<Page*>(chunk),
MarkingTreatmentMode::KEEP, ZAP_FREE_SPACE);
......@@ -5183,9 +5093,6 @@ void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk,
new_to_new_page_visitor_.account_moved_bytes(
marking_state->live_bytes(chunk));
DCHECK(!chunk->IsLargePage());
// TODO(mlippautz): If cleaning array buffers is too slow here we can
// delay it until the next GC.
ArrayBufferTracker::FreeDead(static_cast<Page*>(chunk), marking_state);
if (heap()->ShouldZapGarbage()) {
collector_->MakeIterable(static_cast<Page*>(chunk),
MarkingTreatmentMode::KEEP, ZAP_FREE_SPACE);
......@@ -5211,7 +5118,7 @@ void MinorMarkCompactCollector::EvacuatePagesInParallel() {
for (Page* page : new_space_evacuation_pages_) {
intptr_t live_bytes_on_page = non_atomic_marking_state()->live_bytes(page);
if (live_bytes_on_page == 0 && !page->contains_array_buffers()) continue;
if (live_bytes_on_page == 0) continue;
live_bytes += live_bytes_on_page;
if (ShouldMovePage(page, live_bytes_on_page, false)) {
if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) {
......@@ -5244,21 +5151,6 @@ void MinorMarkCompactCollector::EvacuatePagesInParallel() {
this, &evacuation_job, &observer, live_bytes);
}
int MinorMarkCompactCollector::CollectNewSpaceArrayBufferTrackerItems(
ItemParallelJob* job) {
int pages = 0;
for (Page* p : new_space_evacuation_pages_) {
if (Evacuator::ComputeEvacuationMode(p) == Evacuator::kObjectsNewToOld) {
if (p->local_tracker() == nullptr) continue;
pages++;
job->AddItem(new ArrayBufferTrackerUpdatingItem(
p, ArrayBufferTrackerUpdatingItem::kRegular));
}
}
return pages;
}
#endif // ENABLE_MINOR_MC
} // namespace internal
......
......@@ -717,9 +717,6 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
UpdatingItem* CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override;
int CollectNewSpaceArrayBufferTrackerItems(ItemParallelJob* job);
int CollectOldSpaceArrayBufferTrackerItems(ItemParallelJob* job);
void ReleaseEvacuationCandidates();
void PostProcessEvacuationCandidates();
void ReportAbortedEvacuationCandidate(HeapObject failed_object,
......@@ -861,8 +858,6 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
UpdatingItem* CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override;
int CollectNewSpaceArrayBufferTrackerItems(ItemParallelJob* job);
int NumberOfParallelMarkingTasks(int pages);
void SweepArrayBufferExtensions();
......
......@@ -16,7 +16,6 @@ class Bitmap;
class CodeObjectRegistry;
class FreeListCategory;
class Heap;
class LocalArrayBuferTracker;
class TypedSlotsSet;
class SlotSet;
......@@ -58,7 +57,6 @@ class V8_EXPORT_PRIVATE MemoryChunkLayout {
FIELD(std::atomic<size_t>[kNumTypes], ExternalBackingStoreBytes),
FIELD(heap::ListNode<MemoryChunk>, ListNode),
FIELD(FreeListCategory**, Categories),
FIELD(LocalArrayBuferTracker*, LocalTracker),
FIELD(std::atomic<intptr_t>, YoungGenerationLiveByteCount),
FIELD(Bitmap*, YoungGenerationBitmap),
FIELD(CodeObjectRegistry*, CodeObjectRegistry),
......
......@@ -5,7 +5,6 @@
#include "src/heap/memory-chunk.h"
#include "src/base/platform/platform.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/code-object-registry.h"
#include "src/heap/memory-allocator.h"
#include "src/heap/memory-chunk-inl.h"
......@@ -123,7 +122,6 @@ MemoryChunk* MemoryChunk::Initialize(BasicMemoryChunk* basic_chunk, Heap* heap,
chunk->write_unprotect_counter_ = 0;
chunk->mutex_ = new base::Mutex();
chunk->young_generation_bitmap_ = nullptr;
chunk->local_tracker_ = nullptr;
chunk->external_backing_store_bytes_[ExternalBackingStoreType::kArrayBuffer] =
0;
......@@ -218,7 +216,6 @@ void MemoryChunk::ReleaseAllocatedMemoryNeededForWritableChunk() {
ReleaseInvalidatedSlots<OLD_TO_NEW>();
ReleaseInvalidatedSlots<OLD_TO_OLD>();
if (local_tracker_ != nullptr) ReleaseLocalTracker();
if (young_generation_bitmap_ != nullptr) ReleaseYoungGenerationBitmap();
if (!IsLargePage()) {
......@@ -375,12 +372,6 @@ bool MemoryChunk::RegisteredObjectWithInvalidatedSlots(HeapObject object) {
invalidated_slots<type>()->end();
}
void MemoryChunk::ReleaseLocalTracker() {
DCHECK_NOT_NULL(local_tracker_);
delete local_tracker_;
local_tracker_ = nullptr;
}
void MemoryChunk::AllocateYoungGenerationBitmap() {
DCHECK_NULL(young_generation_bitmap_);
young_generation_bitmap_ = static_cast<Bitmap*>(calloc(1, Bitmap::kSize));
......@@ -429,9 +420,6 @@ void MemoryChunk::ValidateOffsets(MemoryChunk* chunk) {
MemoryChunkLayout::kListNodeOffset);
DCHECK_EQ(reinterpret_cast<Address>(&chunk->categories_) - chunk->address(),
MemoryChunkLayout::kCategoriesOffset);
DCHECK_EQ(
reinterpret_cast<Address>(&chunk->local_tracker_) - chunk->address(),
MemoryChunkLayout::kLocalTrackerOffset);
DCHECK_EQ(
reinterpret_cast<Address>(&chunk->young_generation_live_byte_count_) -
chunk->address(),
......
......@@ -23,7 +23,6 @@ namespace internal {
class CodeObjectRegistry;
class FreeListCategory;
class LocalArrayBufferTracker;
// MemoryChunk represents a memory region owned by a specific space.
// It is divided into the header and the body. Chunk start is always
......@@ -156,8 +155,6 @@ class MemoryChunk : public BasicMemoryChunk {
return invalidated_slots_[type];
}
void ReleaseLocalTracker();
void AllocateYoungGenerationBitmap();
void ReleaseYoungGenerationBitmap();
......@@ -297,8 +294,6 @@ class MemoryChunk : public BasicMemoryChunk {
FreeListCategory** categories_;
LocalArrayBufferTracker* local_tracker_;
std::atomic<intptr_t> young_generation_live_byte_count_;
Bitmap* young_generation_bitmap_;
......
......@@ -5,7 +5,6 @@
#include "src/heap/new-spaces.h"
#include "src/heap/array-buffer-sweeper.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/heap-inl.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact.h"
......@@ -21,7 +20,6 @@ Page* SemiSpace::InitializePage(MemoryChunk* chunk) {
chunk->SetFlag(in_to_space ? MemoryChunk::TO_PAGE : MemoryChunk::FROM_PAGE);
Page* page = static_cast<Page*>(chunk);
page->SetYoungGenerationPageFlags(heap()->incremental_marking()->IsMarking());
page->AllocateLocalTracker();
page->list_node().Initialize();
#ifdef ENABLE_MINOR_MC
if (FLAG_minor_mc) {
......@@ -659,13 +657,6 @@ void NewSpace::Verify(Isolate* isolate) {
ExternalString external_string = ExternalString::cast(object);
size_t size = external_string.ExternalPayloadSize();
external_space_bytes[ExternalBackingStoreType::kExternalString] += size;
} else if (object.IsJSArrayBuffer()) {
JSArrayBuffer array_buffer = JSArrayBuffer::cast(object);
if (ArrayBufferTracker::IsTracked(array_buffer)) {
size_t size = ArrayBufferTracker::Lookup(heap(), array_buffer)
->PerIsolateAccountingLength();
external_space_bytes[ExternalBackingStoreType::kArrayBuffer] += size;
}
}
current += size;
......@@ -677,18 +668,14 @@ void NewSpace::Verify(Isolate* isolate) {
}
for (int i = 0; i < kNumTypes; i++) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL &&
i == ExternalBackingStoreType::kArrayBuffer)
continue;
if (i == ExternalBackingStoreType::kArrayBuffer) continue;
ExternalBackingStoreType t = static_cast<ExternalBackingStoreType>(i);
CHECK_EQ(external_space_bytes[t], ExternalBackingStoreBytes(t));
}
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
size_t bytes = heap()->array_buffer_sweeper()->young().BytesSlow();
CHECK_EQ(bytes,
ExternalBackingStoreBytes(ExternalBackingStoreType::kArrayBuffer));
}
// Check semi-spaces.
CHECK_EQ(from_space_.id(), kFromSpace);
......
......@@ -303,8 +303,7 @@ class V8_EXPORT_PRIVATE NewSpace
}
size_t ExternalBackingStoreBytes(ExternalBackingStoreType type) const final {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL &&
type == ExternalBackingStoreType::kArrayBuffer)
if (type == ExternalBackingStoreType::kArrayBuffer)
return heap()->YoungArrayBufferBytes();
DCHECK_EQ(0, from_space_.ExternalBackingStoreBytes(type));
return to_space_.ExternalBackingStoreBytes(type);
......
......@@ -7,7 +7,6 @@
#include "src/heap/objects-visiting.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/embedder-tracing.h"
#include "src/heap/mark-compact.h"
#include "src/objects/free-space-inl.h"
......
......@@ -9,7 +9,6 @@
#include "src/execution/isolate.h"
#include "src/execution/vm-state-inl.h"
#include "src/heap/array-buffer-sweeper.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/heap.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/memory-allocator.h"
......@@ -755,14 +754,6 @@ void PagedSpace::Verify(Isolate* isolate, ObjectVisitor* visitor) {
ExternalString external_string = ExternalString::cast(object);
size_t size = external_string.ExternalPayloadSize();
external_page_bytes[ExternalBackingStoreType::kExternalString] += size;
} else if (object.IsJSArrayBuffer()) {
JSArrayBuffer array_buffer = JSArrayBuffer::cast(object);
if (ArrayBufferTracker::IsTracked(array_buffer)) {
size_t size =
ArrayBufferTracker::Lookup(isolate->heap(), array_buffer)
->PerIsolateAccountingLength();
external_page_bytes[ExternalBackingStoreType::kArrayBuffer] += size;
}
}
}
for (int i = 0; i < kNumTypes; i++) {
......@@ -772,15 +763,13 @@ void PagedSpace::Verify(Isolate* isolate, ObjectVisitor* visitor) {
}
}
for (int i = 0; i < kNumTypes; i++) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL &&
i == ExternalBackingStoreType::kArrayBuffer)
continue;
if (i == ExternalBackingStoreType::kArrayBuffer) continue;
ExternalBackingStoreType t = static_cast<ExternalBackingStoreType>(i);
CHECK_EQ(external_space_bytes[t], ExternalBackingStoreBytes(t));
}
CHECK(allocation_pointer_found_in_space);
if (identity() == OLD_SPACE && V8_ARRAY_BUFFER_EXTENSION_BOOL) {
if (identity() == OLD_SPACE) {
size_t bytes = heap()->array_buffer_sweeper()->old().BytesSlow();
CHECK_EQ(bytes,
ExternalBackingStoreBytes(ExternalBackingStoreType::kArrayBuffer));
......
......@@ -478,8 +478,7 @@ class OldSpace : public PagedSpace {
}
size_t ExternalBackingStoreBytes(ExternalBackingStoreType type) const final {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL &&
type == ExternalBackingStoreType::kArrayBuffer)
if (type == ExternalBackingStoreType::kArrayBuffer)
return heap()->OldArrayBufferBytes();
return external_backing_store_bytes_[type];
}
......
......@@ -4,7 +4,6 @@
#include "src/heap/scavenger.h"
#include "src/heap/array-buffer-collector.h"
#include "src/heap/array-buffer-sweeper.h"
#include "src/heap/barrier.h"
#include "src/heap/gc-tracer.h"
......@@ -390,12 +389,6 @@ void ScavengerCollector::CollectGarbage() {
// Set age mark.
heap_->new_space_->set_age_mark(heap_->new_space()->top());
{
TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_PROCESS_ARRAY_BUFFERS);
ArrayBufferTracker::PrepareToFreeDeadInNewSpace(heap_);
}
heap_->array_buffer_collector()->FreeAllocations();
// Since we promote all surviving large objects immediatelly, all remaining
// large objects must be dead.
// TODO(hpayer): Don't free all as soon as we have an intermediate generation.
......
......@@ -12,7 +12,6 @@
#include "src/base/bounded-page-allocator.h"
#include "src/base/macros.h"
#include "src/common/globals.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/combined-heap.h"
#include "src/heap/concurrent-marking.h"
#include "src/heap/gc-tracer.h"
......@@ -116,15 +115,6 @@ void Page::MergeOldToNewRememberedSets() {
sweeping_slot_set_ = nullptr;
}
void Page::AllocateLocalTracker() {
DCHECK_NULL(local_tracker_);
local_tracker_ = new LocalArrayBufferTracker(this);
}
bool Page::contains_array_buffers() {
return local_tracker_ != nullptr && !local_tracker_->IsEmpty();
}
size_t Page::AvailableInFreeList() {
size_t sum = 0;
ForAllFreeListCategories([&sum](FreeListCategory* category) {
......
......@@ -38,7 +38,6 @@ class Isolate;
class LargeObjectSpace;
class LargePage;
class LinearAllocationArea;
class LocalArrayBufferTracker;
class Page;
class PagedSpace;
class SemiSpace;
......@@ -267,10 +266,6 @@ class Page : public MemoryChunk {
}
}
void AllocateLocalTracker();
inline LocalArrayBufferTracker* local_tracker() { return local_tracker_; }
bool contains_array_buffers();
size_t AvailableInFreeList();
size_t AvailableInFreeListFromAllocatedBytes() {
......
......@@ -5,7 +5,6 @@
#include "src/heap/sweeper.h"
#include "src/execution/vm-state-inl.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/code-object-registry.h"
#include "src/heap/free-list-inl.h"
#include "src/heap/gc-tracer.h"
......@@ -354,10 +353,6 @@ int Sweeper::RawSweep(
// Phase 1: Prepare the page for sweeping.
// Before we sweep objects on the page, we free dead array buffers which
// requires valid mark bits.
ArrayBufferTracker::FreeDead(p, marking_state_);
// Set the allocated_bytes_ counter to area_size and clear the wasted_memory_
// counter. The free operations below will decrease allocated_bytes_ to actual
// live bytes and keep track of wasted_memory_.
......
......@@ -57,7 +57,6 @@ void JSArrayBuffer::SetBackingStoreRefForSerialization(uint32_t ref) {
}
ArrayBufferExtension* JSArrayBuffer::extension() const {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
#if V8_COMPRESS_POINTERS
// With pointer compression the extension-field might not be
// pointer-aligned. However on ARM64 this field needs to be aligned to
......@@ -81,13 +80,9 @@ ArrayBufferExtension* JSArrayBuffer::extension() const {
#else
return base::AsAtomicPointer::Acquire_Load(extension_location());
#endif
} else {
return nullptr;
}
}
void JSArrayBuffer::set_extension(ArrayBufferExtension* extension) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
#if V8_COMPRESS_POINTERS
if (extension != nullptr) {
uintptr_t address = reinterpret_cast<uintptr_t>(extension);
......@@ -104,9 +99,6 @@ void JSArrayBuffer::set_extension(ArrayBufferExtension* extension) {
base::AsAtomicPointer::Release_Store(extension_location(), extension);
#endif
WriteBarrier::Marking(*this, extension);
} else {
CHECK_EQ(extension, nullptr);
}
}
ArrayBufferExtension** JSArrayBuffer::extension_location() const {
......
......@@ -65,16 +65,12 @@ void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) {
set_byte_length(backing_store->byte_length());
if (backing_store->is_wasm_memory()) set_is_detachable(false);
if (!backing_store->free_on_destruct()) set_is_external(true);
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
Heap* heap = isolate->heap();
ArrayBufferExtension* extension = EnsureExtension();
size_t bytes = backing_store->PerIsolateAccountingLength();
extension->set_accounting_length(bytes);
extension->set_backing_store(std::move(backing_store));
heap->AppendArrayBufferExtension(*this, extension);
} else {
isolate->heap()->RegisterBackingStore(*this, std::move(backing_store));
}
Heap* heap = isolate->heap();
ArrayBufferExtension* extension = EnsureExtension();
size_t bytes = backing_store->PerIsolateAccountingLength();
extension->set_accounting_length(bytes);
extension->set_backing_store(std::move(backing_store));
heap->AppendArrayBufferExtension(*this, extension);
}
void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
......@@ -90,11 +86,7 @@ void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
Isolate* const isolate = GetIsolate();
if (backing_store()) {
std::shared_ptr<BackingStore> backing_store;
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
backing_store = RemoveExtension();
} else {
backing_store = isolate->heap()->UnregisterBackingStore(*this);
}
CHECK_IMPLIES(force_for_wasm_memory, backing_store->is_wasm_memory());
}
......@@ -110,16 +102,11 @@ void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
}
std::shared_ptr<BackingStore> JSArrayBuffer::GetBackingStore() {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
if (!extension()) return nullptr;
return extension()->backing_store();
} else {
return GetIsolate()->heap()->LookupBackingStore(*this);
}
}
ArrayBufferExtension* JSArrayBuffer::EnsureExtension() {
DCHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
ArrayBufferExtension* extension = this->extension();
if (extension != nullptr) return extension;
......
......@@ -14,8 +14,7 @@ bitfield struct JSArrayBufferFlags extends uint32 {
extern class JSArrayBuffer extends JSObject {
byte_length: uintptr;
backing_store: ExternalPointer;
@if(V8_ARRAY_BUFFER_EXTENSION_BOOL) extension: RawPtr;
@ifnot(V8_ARRAY_BUFFER_EXTENSION_BOOL) extension: void;
extension: RawPtr;
bit_field: JSArrayBufferFlags;
// Pads header size to be a multiple of kTaggedSize.
@if(TAGGED_SIZE_8_BYTES) optional_padding: uint32;
......
......@@ -48,8 +48,6 @@ class BuildFlags : public ContextualClass<BuildFlags> {
build_flags_["V8_SFI_HAS_UNIQUE_ID"] = V8_SFI_HAS_UNIQUE_ID;
build_flags_["TAGGED_SIZE_8_BYTES"] = TAGGED_SIZE_8_BYTES;
build_flags_["V8_DOUBLE_FIELDS_UNBOXING"] = V8_DOUBLE_FIELDS_UNBOXING;
build_flags_["V8_ARRAY_BUFFER_EXTENSION_BOOL"] =
V8_ARRAY_BUFFER_EXTENSION_BOOL;
build_flags_["TRUE_FOR_TESTING"] = true;
build_flags_["FALSE_FOR_TESTING"] = false;
}
......
......@@ -3,7 +3,6 @@
// found in the LICENSE file.
#include "src/execution/isolate.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/factory.h"
#include "src/heap/spaces-inl.h"
#include "src/objects/objects-inl.h"
......@@ -129,100 +128,6 @@ UNINITIALIZED_TEST(PagePromotion_NewToNew) {
isolate->Dispose();
}
UNINITIALIZED_TEST(PagePromotion_NewToNewJSArrayBuffer) {
if (!i::FLAG_page_promotion || FLAG_always_promote_young_mc ||
i::FLAG_single_generation)
return;
// Test makes sure JSArrayBuffer backing stores are still tracked after
// new-to-new promotion.
v8::Isolate* isolate = NewIsolateForPagePromotion();
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
{
v8::Isolate::Scope isolate_scope(isolate);
v8::HandleScope handle_scope(isolate);
v8::Context::New(isolate)->Enter();
Heap* heap = i_isolate->heap();
// Fill the current page which potentially contains the age mark.
heap::FillCurrentPage(heap->new_space());
// Allocate a buffer we would like to check against.
Handle<JSArrayBuffer> buffer =
i_isolate->factory()
->NewJSArrayBufferAndBackingStore(100,
InitializedFlag::kZeroInitialized)
.ToHandleChecked();
std::vector<Handle<FixedArray>> handles;
// Simulate a full space, filling the interesting page with live objects.
heap::SimulateFullSpace(heap->new_space(), &handles);
CHECK_GT(handles.size(), 0u);
// First object in handles should be on the same page as the allocated
// JSArrayBuffer.
Handle<FixedArray> first_object = handles.front();
Page* to_be_promoted_page = Page::FromHeapObject(*first_object);
CHECK(!to_be_promoted_page->Contains(heap->new_space()->age_mark()));
CHECK(to_be_promoted_page->Contains(first_object->address()));
CHECK(to_be_promoted_page->Contains(buffer->address()));
CHECK(heap->new_space()->ToSpaceContainsSlow(first_object->address()));
CHECK(heap->new_space()->ToSpaceContainsSlow(buffer->address()));
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(heap->new_space()->ToSpaceContainsSlow(first_object->address()));
CHECK(heap->new_space()->ToSpaceContainsSlow(buffer->address()));
CHECK(to_be_promoted_page->Contains(first_object->address()));
CHECK(to_be_promoted_page->Contains(buffer->address()));
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL)
CHECK(ArrayBufferTracker::IsTracked(*buffer));
}
isolate->Dispose();
}
UNINITIALIZED_TEST(PagePromotion_NewToOldJSArrayBuffer) {
if (i::FLAG_single_generation) return;
if (!i::FLAG_page_promotion) return;
// Test makes sure JSArrayBuffer backing stores are still tracked after
// new-to-old promotion.
v8::Isolate* isolate = NewIsolateForPagePromotion();
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
{
v8::Isolate::Scope isolate_scope(isolate);
v8::HandleScope handle_scope(isolate);
v8::Context::New(isolate)->Enter();
Heap* heap = i_isolate->heap();
// Fill the current page which potentially contains the age mark.
heap::FillCurrentPage(heap->new_space());
// Allocate a buffer we would like to check against.
Handle<JSArrayBuffer> buffer =
i_isolate->factory()
->NewJSArrayBufferAndBackingStore(100,
InitializedFlag::kZeroInitialized)
.ToHandleChecked();
std::vector<Handle<FixedArray>> handles;
// Simulate a full space, filling the interesting page with live objects.
heap::SimulateFullSpace(heap->new_space(), &handles);
CHECK_GT(handles.size(), 0u);
// First object in handles should be on the same page as the allocated
// JSArrayBuffer.
Handle<FixedArray> first_object = handles.front();
Page* to_be_promoted_page = Page::FromHeapObject(*first_object);
CHECK(!to_be_promoted_page->Contains(heap->new_space()->age_mark()));
CHECK(to_be_promoted_page->Contains(first_object->address()));
CHECK(to_be_promoted_page->Contains(buffer->address()));
CHECK(heap->new_space()->ToSpaceContainsSlow(first_object->address()));
CHECK(heap->new_space()->ToSpaceContainsSlow(buffer->address()));
heap::GcAndSweep(heap, OLD_SPACE);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(heap->old_space()->ContainsSlow(first_object->address()));
CHECK(heap->old_space()->ContainsSlow(buffer->address()));
CHECK(to_be_promoted_page->Contains(first_object->address()));
CHECK(to_be_promoted_page->Contains(buffer->address()));
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL)
CHECK(ArrayBufferTracker::IsTracked(*buffer));
}
isolate->Dispose();
}
UNINITIALIZED_HEAP_TEST(Regress658718) {
if (!i::FLAG_page_promotion || FLAG_always_promote_young_mc) return;
......
......@@ -887,12 +887,18 @@ TEST(ReadOnlySpaceMetrics_AlignedAllocations) {
faked_space->AllocateRaw(object_size, kDoubleAligned).ToObjectChecked();
CHECK_EQ(object.address() % alignment, 0);
// Calculate size of allocations based on area_start.
Address area_start = faked_space->pages().back()->GetAreaStart();
Address top = RoundUp(area_start, alignment) + object_size;
top = RoundUp(top, alignment) + object_size;
size_t expected_size = top - area_start;
faked_space->ShrinkPages();
faked_space->Seal(ReadOnlySpace::SealMode::kDoNotDetachFromHeap);
// Allocated objects size may will contain 4 bytes of padding on 32-bit or
// with pointer compression.
CHECK_EQ(faked_space->Size(), object_size + RoundUp(object_size, alignment));
CHECK_EQ(faked_space->Size(), expected_size);
size_t committed_memory = RoundUp(
MemoryChunkLayout::ObjectStartOffsetInDataPage() + faked_space->Size(),
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment