Commit bd61a85f authored by hpayer's avatar hpayer Committed by Commit bot

Just visit young array buffers during scavenge. Additionally keep the views in...

Just visit young array buffers during scavenge. Additionally keep the views in new space in a separate global list and move them to the corresponding array buffers when they get promoted.

This reduces young generation garbage collections when many array buffers are allocated.
BUG=

Review URL: https://codereview.chromium.org/904633003

Cr-Commit-Position: refs/heads/master@{#26605}
parent 34c1db2f
......@@ -1795,8 +1795,14 @@ void SetupArrayBufferView(i::Isolate* isolate,
obj->set_buffer(*buffer);
obj->set_weak_next(buffer->weak_first_view());
buffer->set_weak_first_view(*obj);
Heap* heap = isolate->heap();
if (heap->InNewSpace(*obj)) {
obj->set_weak_next(heap->new_array_buffer_views_list());
heap->set_new_array_buffer_views_list(*obj);
} else {
obj->set_weak_next(buffer->weak_first_view());
buffer->set_weak_first_view(*obj);
}
i::Handle<i::Object> byte_offset_object =
isolate->factory()->NewNumberFromSize(byte_offset);
......
......@@ -144,7 +144,8 @@ Heap::Heap()
external_string_table_(this),
chunks_queued_for_free_(NULL),
gc_callbacks_depth_(0),
deserialization_complete_(false) {
deserialization_complete_(false),
promotion_failure_(false) {
// Allow build-time customization of the max semispace size. Building
// V8 with snapshots and a non-default max semispace size is much
// easier if you can define it as part of the build environment.
......@@ -731,6 +732,7 @@ void Heap::GarbageCollectionEpilogue() {
// Remember the last top pointer so that we can later find out
// whether we allocated in new space since the last GC.
new_space_top_after_last_gc_ = new_space()->top();
set_promotion_failure(false);
}
......@@ -1732,29 +1734,86 @@ void Heap::UpdateReferencesInExternalStringTable(
void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) {
ProcessArrayBuffers(retainer);
ProcessArrayBuffers(retainer, false);
ProcessNewArrayBufferViews(retainer);
ProcessNativeContexts(retainer);
ProcessAllocationSites(retainer);
}
void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) {
ProcessArrayBuffers(retainer);
ProcessArrayBuffers(retainer, true);
ProcessNewArrayBufferViews(retainer);
ProcessNativeContexts(retainer);
}
void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
Object* head =
VisitWeakList<Context>(this, native_contexts_list(), retainer, false);
// Update the head of the list of contexts.
set_native_contexts_list(head);
}
void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer) {
Object* array_buffer_obj =
VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer);
void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer,
bool stop_after_young) {
Object* array_buffer_obj = VisitWeakList<JSArrayBuffer>(
this, array_buffers_list(), retainer, stop_after_young);
set_array_buffers_list(array_buffer_obj);
#ifdef DEBUG
// Verify invariant that young array buffers come before old array buffers
// in array buffers list if there was no promotion failure.
Object* undefined = undefined_value();
Object* next = array_buffers_list();
bool old_objects_recorded = false;
if (promotion_failure()) return;
while (next != undefined) {
if (!old_objects_recorded) {
old_objects_recorded = !InNewSpace(next);
}
DCHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next));
next = JSArrayBuffer::cast(next)->weak_next();
}
#endif
}
void Heap::ProcessNewArrayBufferViews(WeakObjectRetainer* retainer) {
// Retain the list of new space views.
Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
this, new_array_buffer_views_list_, retainer, false);
set_new_array_buffer_views_list(typed_array_obj);
// Some objects in the list may be in old space now. Find them
// and move them to the corresponding array buffer.
Object* undefined = undefined_value();
Object* previous = undefined;
Object* head = undefined;
Object* next;
for (Object* o = new_array_buffer_views_list(); o != undefined;) {
JSArrayBufferView* view = JSArrayBufferView::cast(o);
next = view->weak_next();
if (!InNewSpace(view)) {
if (previous != undefined) {
// We are in the middle of the list, skip the old space element.
JSArrayBufferView::cast(previous)->set_weak_next(next);
}
JSArrayBuffer* buffer = JSArrayBuffer::cast(view->buffer());
view->set_weak_next(buffer->weak_first_view());
buffer->set_weak_first_view(view);
} else {
// We found a valid new space view, remember it.
previous = view;
if (head == undefined) {
// We are at the list head.
head = view;
}
}
o = next;
}
set_new_array_buffer_views_list(head);
}
......@@ -1770,8 +1829,8 @@ void Heap::TearDownArrayBuffers() {
void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
Object* allocation_site_obj =
VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
Object* allocation_site_obj = VisitWeakList<AllocationSite>(
this, allocation_sites_list(), retainer, false);
set_allocation_sites_list(allocation_site_obj);
}
......@@ -2204,6 +2263,7 @@ class ScavengingVisitor : public StaticVisitorBase {
return;
}
heap->set_promotion_failure(true);
// If promotion failed, we try to copy the object to the other semi-space
if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return;
......@@ -5538,6 +5598,7 @@ bool Heap::CreateHeapObjects() {
set_native_contexts_list(undefined_value());
set_array_buffers_list(undefined_value());
set_new_array_buffer_views_list(undefined_value());
set_allocation_sites_list(undefined_value());
weak_object_to_code_table_ = undefined_value();
return true;
......
......@@ -858,6 +858,13 @@ class Heap {
void set_array_buffers_list(Object* object) { array_buffers_list_ = object; }
Object* array_buffers_list() const { return array_buffers_list_; }
void set_new_array_buffer_views_list(Object* object) {
new_array_buffer_views_list_ = object;
}
Object* new_array_buffer_views_list() const {
return new_array_buffer_views_list_;
}
void set_allocation_sites_list(Object* object) {
allocation_sites_list_ = object;
}
......@@ -1458,6 +1465,11 @@ class Heap {
bool deserialization_complete() const { return deserialization_complete_; }
bool promotion_failure() const { return promotion_failure_; }
void set_promotion_failure(bool promotion_failure) {
promotion_failure_ = promotion_failure;
}
protected:
// Methods made available to tests.
......@@ -1623,13 +1635,18 @@ class Heap {
bool inline_allocation_disabled_;
// Weak list heads, threaded through the objects.
// List heads are initilized lazily and contain the undefined_value at start.
// List heads are initialized lazily and contain the undefined_value at start.
Object* native_contexts_list_;
Object* array_buffers_list_;
Object* allocation_sites_list_;
// This is a global list of array buffer views in new space. When the views
// get promoted, they are removed form the list and added to the corresponding
// array buffer.
Object* new_array_buffer_views_list_;
// WeakHashTable that maps objects embedded in optimized code to dependent
// code list. It is initilized lazily and contains the undefined_value at
// code list. It is initialized lazily and contains the undefined_value at
// start.
Object* weak_object_to_code_table_;
......@@ -1965,7 +1982,8 @@ class Heap {
void MarkCompactEpilogue();
void ProcessNativeContexts(WeakObjectRetainer* retainer);
void ProcessArrayBuffers(WeakObjectRetainer* retainer);
void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool stop_after_young);
void ProcessNewArrayBufferViews(WeakObjectRetainer* retainer);
void ProcessAllocationSites(WeakObjectRetainer* retainer);
// Deopts all code that contains allocation instruction which are tenured or
......@@ -2138,6 +2156,11 @@ class Heap {
bool deserialization_complete_;
// A promotion failure indicates that old space promotion failed during
// gc, i.e., some objects that should have gotten promoted had to stay in
// the new space (they were copied to the other semi-space).
bool promotion_failure_;
friend class AlwaysAllocateScope;
friend class Deserializer;
friend class Factory;
......
......@@ -1864,6 +1864,7 @@ int MarkCompactCollector::DiscoverAndEvacuateBlackObjectsOnPage(
TryPromoteObject(object, size)) {
continue;
}
heap()->set_promotion_failure(true);
AllocationResult allocation = new_space->AllocateRaw(size);
if (allocation.IsRetry()) {
......
......@@ -191,15 +191,19 @@ struct WeakListVisitor;
template <class T>
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer,
bool stop_after_young) {
Object* undefined = heap->undefined_value();
Object* head = undefined;
T* tail = NULL;
MarkCompactCollector* collector = heap->mark_compact_collector();
bool record_slots = MustRecordSlots(heap);
while (list != undefined) {
// Check whether to keep the candidate in the list.
T* candidate = reinterpret_cast<T*>(list);
T* original_candidate = candidate;
Object* retained = retainer->RetainAs(list);
if (retained != NULL) {
if (head == undefined) {
......@@ -220,9 +224,19 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
candidate = reinterpret_cast<T*>(retained);
tail = candidate;
// tail is a live object, visit it.
WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
// The list of weak objects is usually order. It starts with objects
// recently allocated in the young generation followed by objects
// allocated in the old generation. When a promotion failure happens,
// the list is not ordered until the next GC.
// For young generation collections we just have to visit until the last
// young generation objects.
if (stop_after_young && !heap->promotion_failure() &&
!heap->InNewSpace(original_candidate)) {
return head;
}
} else {
WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
}
......@@ -316,7 +330,8 @@ struct WeakListVisitor<Context> {
static void DoWeakList(Heap* heap, Context* context,
WeakObjectRetainer* retainer, int index) {
// Visit the weak list, removing dead intermediate elements.
Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
Object* list_head =
VisitWeakList<T>(heap, context->get(index), retainer, false);
// Update the list head.
context->set(index, list_head, UPDATE_WRITE_BARRIER);
......@@ -368,7 +383,7 @@ struct WeakListVisitor<JSArrayBuffer> {
static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer,
WeakObjectRetainer* retainer) {
Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
heap, array_buffer->weak_first_view(), retainer);
heap, array_buffer->weak_first_view(), retainer, false);
array_buffer->set_weak_first_view(typed_array_obj);
if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) {
Object** slot = HeapObject::RawField(array_buffer,
......@@ -399,23 +414,18 @@ struct WeakListVisitor<AllocationSite> {
};
template Object* VisitWeakList<Code>(Heap* heap, Object* list,
WeakObjectRetainer* retainer);
template Object* VisitWeakList<JSFunction>(Heap* heap, Object* list,
WeakObjectRetainer* retainer);
template Object* VisitWeakList<Context>(Heap* heap, Object* list,
WeakObjectRetainer* retainer);
WeakObjectRetainer* retainer,
bool stop_after_young);
template Object* VisitWeakList<JSArrayBuffer>(Heap* heap, Object* list,
WeakObjectRetainer* retainer);
WeakObjectRetainer* retainer,
bool stop_after_young);
template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
WeakObjectRetainer* retainer);
WeakObjectRetainer* retainer,
bool stop_after_young);
}
} // namespace v8::internal
......@@ -489,7 +489,8 @@ class WeakObjectRetainer;
// pointers. The template parameter T is a WeakListVisitor that defines how to
// access the next-element pointers.
template <class T>
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer);
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer,
bool stop_after_young);
}
} // namespace v8::internal
......
......@@ -16879,8 +16879,15 @@ Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer(
fixed_typed_array->length(), typed_array->type(),
static_cast<uint8_t*>(buffer->backing_store()));
buffer->set_weak_first_view(*typed_array);
DCHECK(typed_array->weak_next() == isolate->heap()->undefined_value());
Heap* heap = isolate->heap();
if (heap->InNewSpace(*typed_array)) {
DCHECK(typed_array->weak_next() == isolate->heap()->undefined_value());
typed_array->set_weak_next(heap->new_array_buffer_views_list());
heap->set_new_array_buffer_views_list(*typed_array);
} else {
buffer->set_weak_first_view(*typed_array);
DCHECK(typed_array->weak_next() == isolate->heap()->undefined_value());
}
typed_array->set_buffer(*buffer);
JSObject::SetMapAndElements(typed_array, new_map, new_elements);
......
......@@ -88,6 +88,8 @@ bool Runtime::SetupArrayBufferAllocatingData(Isolate* isolate,
void Runtime::NeuterArrayBuffer(Handle<JSArrayBuffer> array_buffer) {
Isolate* isolate = array_buffer->GetIsolate();
// Firstly, iterate over the views which are referenced directly by the array
// buffer.
for (Handle<Object> view_obj(array_buffer->weak_first_view(), isolate);
!view_obj->IsUndefined();) {
Handle<JSArrayBufferView> view(JSArrayBufferView::cast(*view_obj));
......@@ -100,6 +102,24 @@ void Runtime::NeuterArrayBuffer(Handle<JSArrayBuffer> array_buffer) {
}
view_obj = handle(view->weak_next(), isolate);
}
// Secondly, iterate over the global list of new space views to find views
// that belong to the neutered array buffer.
Heap* heap = isolate->heap();
for (Handle<Object> view_obj(heap->new_array_buffer_views_list(), isolate);
!view_obj->IsUndefined();) {
Handle<JSArrayBufferView> view(JSArrayBufferView::cast(*view_obj));
if (view->buffer() == *array_buffer) {
if (view->IsJSTypedArray()) {
JSTypedArray::cast(*view)->Neuter();
} else if (view->IsJSDataView()) {
JSDataView::cast(*view)->Neuter();
} else {
UNREACHABLE();
}
}
view_obj = handle(view->weak_next(), isolate);
}
array_buffer->Neuter();
}
......@@ -265,11 +285,18 @@ RUNTIME_FUNCTION(Runtime_TypedArrayInitialize) {
holder->set_byte_offset(*byte_offset_object);
holder->set_byte_length(*byte_length_object);
Heap* heap = isolate->heap();
if (!maybe_buffer->IsNull()) {
Handle<JSArrayBuffer> buffer = Handle<JSArrayBuffer>::cast(maybe_buffer);
holder->set_buffer(*buffer);
holder->set_weak_next(buffer->weak_first_view());
buffer->set_weak_first_view(*holder);
if (heap->InNewSpace(*holder)) {
holder->set_weak_next(heap->new_array_buffer_views_list());
heap->set_new_array_buffer_views_list(*holder);
} else {
holder->set_weak_next(buffer->weak_first_view());
buffer->set_weak_first_view(*holder);
}
Handle<ExternalArray> elements = isolate->factory()->NewExternalArray(
static_cast<int>(length), array_type,
......@@ -367,8 +394,15 @@ RUNTIME_FUNCTION(Runtime_TypedArrayInitializeFromArrayLike) {
isolate->factory()->NewNumberFromSize(byte_length));
holder->set_byte_length(*byte_length_obj);
holder->set_length(*length_obj);
holder->set_weak_next(buffer->weak_first_view());
buffer->set_weak_first_view(*holder);
Heap* heap = isolate->heap();
if (heap->InNewSpace(*holder)) {
holder->set_weak_next(heap->new_array_buffer_views_list());
heap->set_new_array_buffer_views_list(*holder);
} else {
holder->set_weak_next(buffer->weak_first_view());
buffer->set_weak_first_view(*holder);
}
Handle<ExternalArray> elements = isolate->factory()->NewExternalArray(
static_cast<int>(length), array_type,
......@@ -542,8 +576,14 @@ RUNTIME_FUNCTION(Runtime_DataViewInitialize) {
holder->set_byte_offset(*byte_offset);
holder->set_byte_length(*byte_length);
holder->set_weak_next(buffer->weak_first_view());
buffer->set_weak_first_view(*holder);
Heap* heap = isolate->heap();
if (heap->InNewSpace(*holder)) {
holder->set_weak_next(heap->new_array_buffer_views_list());
heap->set_new_array_buffer_views_list(*holder);
} else {
holder->set_weak_next(buffer->weak_first_view());
buffer->set_weak_first_view(*holder);
}
return isolate->heap()->undefined_value();
}
......
......@@ -672,6 +672,8 @@ void Deserializer::Deserialize(Isolate* isolate) {
isolate_->heap()->undefined_value());
isolate_->heap()->set_array_buffers_list(
isolate_->heap()->undefined_value());
isolate->heap()->set_new_array_buffer_views_list(
isolate_->heap()->undefined_value());
// The allocation site list is build during root iteration, but if no sites
// were encountered then it needs to be initialized to undefined.
......
......@@ -62,7 +62,20 @@ static bool HasArrayBufferInWeakList(Heap* heap, JSArrayBuffer* ab) {
}
static int CountViews(JSArrayBuffer* array_buffer) {
static int CountViewsInNewSpaceList(Heap* heap, JSArrayBuffer* array_buffer) {
int count = 0;
for (Object* o = heap->new_array_buffer_views_list(); !o->IsUndefined();) {
JSArrayBufferView* view = JSArrayBufferView::cast(o);
if (array_buffer == view->buffer()) {
count++;
}
o = view->weak_next();
}
return count;
}
static int CountViews(Heap* heap, JSArrayBuffer* array_buffer) {
int count = 0;
for (Object* o = array_buffer->weak_first_view();
!o->IsUndefined();
......@@ -70,17 +83,27 @@ static int CountViews(JSArrayBuffer* array_buffer) {
count++;
}
return count;
return count + CountViewsInNewSpaceList(heap, array_buffer);
}
static bool HasViewInWeakList(JSArrayBuffer* array_buffer,
static bool HasViewInNewSpaceList(Heap* heap, JSArrayBufferView* ta) {
for (Object* o = heap->new_array_buffer_views_list(); !o->IsUndefined();
o = JSArrayBufferView::cast(o)->weak_next()) {
if (ta == o) return true;
}
return false;
}
static bool HasViewInWeakList(Heap* heap, JSArrayBuffer* array_buffer,
JSArrayBufferView* ta) {
for (Object* o = array_buffer->weak_first_view();
!o->IsUndefined();
o = JSArrayBufferView::cast(o)->weak_next()) {
if (ta == o) return true;
}
return false;
return HasViewInNewSpaceList(heap, ta);
}
......@@ -200,18 +223,18 @@ void TestViewFromApi() {
Handle<JSArrayBufferView> ita1 = v8::Utils::OpenHandle(*ta1);
Handle<JSArrayBufferView> ita2 = v8::Utils::OpenHandle(*ta2);
CHECK_EQ(2, CountViews(*iab));
CHECK(HasViewInWeakList(*iab, *ita1));
CHECK(HasViewInWeakList(*iab, *ita2));
CHECK_EQ(2, CountViews(isolate->heap(), *iab));
CHECK(HasViewInWeakList(isolate->heap(), *iab, *ita1));
CHECK(HasViewInWeakList(isolate->heap(), *iab, *ita2));
}
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CHECK_EQ(1, CountViews(*iab));
CHECK_EQ(1, CountViews(isolate->heap(), *iab));
Handle<JSArrayBufferView> ita1 = v8::Utils::OpenHandle(*ta1);
CHECK(HasViewInWeakList(*iab, *ita1));
CHECK(HasViewInWeakList(isolate->heap(), *iab, *ita1));
}
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CHECK_EQ(0, CountViews(*iab));
CHECK_EQ(0, CountViews(isolate->heap(), *iab));
}
......@@ -299,10 +322,13 @@ static void TestTypedArrayFromScript(const char* constructor) {
v8::Handle<TypedArray>::Cast(CompileRun("ta3"));
CHECK_EQ(1, CountArrayBuffersInWeakList(isolate->heap()) - start);
Handle<JSArrayBuffer> iab = v8::Utils::OpenHandle(*ab);
CHECK_EQ(3, CountViews(*iab));
CHECK(HasViewInWeakList(*iab, *v8::Utils::OpenHandle(*ta1)));
CHECK(HasViewInWeakList(*iab, *v8::Utils::OpenHandle(*ta2)));
CHECK(HasViewInWeakList(*iab, *v8::Utils::OpenHandle(*ta3)));
CHECK_EQ(3, CountViews(isolate->heap(), *iab));
CHECK(HasViewInWeakList(isolate->heap(), *iab,
*v8::Utils::OpenHandle(*ta1)));
CHECK(HasViewInWeakList(isolate->heap(), *iab,
*v8::Utils::OpenHandle(*ta2)));
CHECK(HasViewInWeakList(isolate->heap(), *iab,
*v8::Utils::OpenHandle(*ta3)));
}
i::SNPrintF(source, "ta%d = null;", i);
......@@ -316,13 +342,14 @@ static void TestTypedArrayFromScript(const char* constructor) {
v8::Handle<v8::ArrayBuffer> ab =
v8::Handle<v8::ArrayBuffer>::Cast(CompileRun("ab"));
Handle<JSArrayBuffer> iab = v8::Utils::OpenHandle(*ab);
CHECK_EQ(2, CountViews(*iab));
CHECK_EQ(2, CountViews(isolate->heap(), *iab));
for (int j = 1; j <= 3; j++) {
if (j == i) continue;
i::SNPrintF(source, "ta%d", j);
v8::Handle<TypedArray> ta =
v8::Handle<TypedArray>::Cast(CompileRun(source.start()));
CHECK(HasViewInWeakList(*iab, *v8::Utils::OpenHandle(*ta)));
CHECK(HasViewInWeakList(isolate->heap(), *iab,
*v8::Utils::OpenHandle(*ta)));
}
}
......@@ -336,7 +363,7 @@ static void TestTypedArrayFromScript(const char* constructor) {
v8::Handle<v8::ArrayBuffer> ab =
v8::Handle<v8::ArrayBuffer>::Cast(CompileRun("ab"));
Handle<JSArrayBuffer> iab = v8::Utils::OpenHandle(*ab);
CHECK_EQ(0, CountViews(*iab));
CHECK_EQ(0, CountViews(isolate->heap(), *iab));
}
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment