Commit 67702104 authored by Dominik Inführ's avatar Dominik Inführ Committed by Commit Bot

[heap] Introduce separate young and old list for ArrayBufferExtension

Split the linked list of array buffer extensions into two lists for
young and old JSArrayBuffers. Process young extensions during the
minor GC pause. When promoting JSArrayBuffers into the old gen, move
the extension into the old linked list as well.

Bug: v8:10064
Change-Id: I07275ffe7ba918c9b2d6d0648a6d1b59e4fa4891
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1997438
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#65842}
parent 873f66cd
...@@ -887,6 +887,9 @@ DEFINE_BOOL(concurrent_marking, V8_CONCURRENT_MARKING_BOOL, ...@@ -887,6 +887,9 @@ DEFINE_BOOL(concurrent_marking, V8_CONCURRENT_MARKING_BOOL,
#else #else
#define V8_ARRAY_BUFFER_EXTENSION_BOOL false #define V8_ARRAY_BUFFER_EXTENSION_BOOL false
#endif #endif
DEFINE_BOOL_READONLY(array_buffer_extension, V8_ARRAY_BUFFER_EXTENSION_BOOL,
"enable array buffer tracking using extension objects")
DEFINE_IMPLICATION(array_buffer_extension, always_promote_young_mc)
DEFINE_BOOL(parallel_marking, true, "use parallel marking in atomic pause") DEFINE_BOOL(parallel_marking, true, "use parallel marking in atomic pause")
DEFINE_INT(ephemeron_fixpoint_iterations, 10, DEFINE_INT(ephemeron_fixpoint_iterations, 10,
"number of fixpoint iterations it takes to switch to linear " "number of fixpoint iterations it takes to switch to linear "
...@@ -1346,6 +1349,9 @@ DEFINE_BOOL(minor_mc_parallel_marking, true, ...@@ -1346,6 +1349,9 @@ DEFINE_BOOL(minor_mc_parallel_marking, true,
DEFINE_BOOL(trace_minor_mc_parallel_marking, false, DEFINE_BOOL(trace_minor_mc_parallel_marking, false,
"trace parallel marking for the young generation") "trace parallel marking for the young generation")
DEFINE_BOOL(minor_mc, false, "perform young generation mark compact GCs") DEFINE_BOOL(minor_mc, false, "perform young generation mark compact GCs")
#else
DEFINE_BOOL_READONLY(minor_mc, false,
"perform young generation mark compact GCs")
#endif // ENABLE_MINOR_MC #endif // ENABLE_MINOR_MC
// //
......
...@@ -3790,8 +3790,24 @@ void Heap::RemoveNearHeapLimitCallback(v8::NearHeapLimitCallback callback, ...@@ -3790,8 +3790,24 @@ void Heap::RemoveNearHeapLimitCallback(v8::NearHeapLimitCallback callback,
UNREACHABLE(); UNREACHABLE();
} }
void Heap::AppendArrayBufferExtension(JSArrayBuffer object,
ArrayBufferExtension* extension) {
if (Heap::InYoungGeneration(object)) {
extension->set_next(young_array_buffer_extensions_);
young_array_buffer_extensions_ = extension;
} else {
extension->set_next(old_array_buffer_extensions_);
old_array_buffer_extensions_ = extension;
}
}
void Heap::ReleaseAllArrayBufferExtensions() { void Heap::ReleaseAllArrayBufferExtensions() {
ArrayBufferExtension* current = array_buffer_extensions_; ReleaseAllArrayBufferExtensions(&old_array_buffer_extensions_);
ReleaseAllArrayBufferExtensions(&young_array_buffer_extensions_);
}
void Heap::ReleaseAllArrayBufferExtensions(ArrayBufferExtension** head) {
ArrayBufferExtension* current = *head;
while (current) { while (current) {
ArrayBufferExtension* next = current->next(); ArrayBufferExtension* next = current->next();
...@@ -3799,7 +3815,7 @@ void Heap::ReleaseAllArrayBufferExtensions() { ...@@ -3799,7 +3815,7 @@ void Heap::ReleaseAllArrayBufferExtensions() {
current = next; current = next;
} }
array_buffer_extensions_ = nullptr; *head = nullptr;
} }
void Heap::AutomaticallyRestoreInitialHeapLimit(double threshold_percent) { void Heap::AutomaticallyRestoreInitialHeapLimit(double threshold_percent) {
......
...@@ -587,19 +587,25 @@ class Heap { ...@@ -587,19 +587,25 @@ class Heap {
V8_EXPORT_PRIVATE void AutomaticallyRestoreInitialHeapLimit( V8_EXPORT_PRIVATE void AutomaticallyRestoreInitialHeapLimit(
double threshold_percent); double threshold_percent);
ArrayBufferExtension* array_buffer_extensions() { ArrayBufferExtension* old_array_buffer_extensions() {
return array_buffer_extensions_; return old_array_buffer_extensions_;
} }
void set_array_buffer_extensions(ArrayBufferExtension* head) { ArrayBufferExtension* young_array_buffer_extensions() {
array_buffer_extensions_ = head; return young_array_buffer_extensions_;
} }
void AppendArrayBufferExtension(ArrayBufferExtension* extension) { void set_old_array_buffer_extensions(ArrayBufferExtension* head) {
extension->set_next(array_buffer_extensions_); old_array_buffer_extensions_ = head;
array_buffer_extensions_ = extension;
} }
void set_young_array_buffer_extensions(ArrayBufferExtension* head) {
young_array_buffer_extensions_ = head;
}
void AppendArrayBufferExtension(JSArrayBuffer object,
ArrayBufferExtension* extension);
void ReleaseAllArrayBufferExtensions(); void ReleaseAllArrayBufferExtensions();
V8_EXPORT_PRIVATE double MonotonicallyIncreasingTimeInMs(); V8_EXPORT_PRIVATE double MonotonicallyIncreasingTimeInMs();
...@@ -1413,6 +1419,8 @@ class Heap { ...@@ -1413,6 +1419,8 @@ class Heap {
static Isolate* GetIsolateFromWritableObject(HeapObject object); static Isolate* GetIsolateFromWritableObject(HeapObject object);
private: private:
void ReleaseAllArrayBufferExtensions(ArrayBufferExtension** head);
using ExternalStringTableUpdaterCallback = String (*)(Heap* heap, using ExternalStringTableUpdaterCallback = String (*)(Heap* heap,
FullObjectSlot pointer); FullObjectSlot pointer);
...@@ -1925,7 +1933,8 @@ class Heap { ...@@ -1925,7 +1933,8 @@ class Heap {
Space* space_[LAST_SPACE + 1]; Space* space_[LAST_SPACE + 1];
// List for tracking ArrayBufferExtensions // List for tracking ArrayBufferExtensions
ArrayBufferExtension* array_buffer_extensions_ = nullptr; ArrayBufferExtension* old_array_buffer_extensions_ = nullptr;
ArrayBufferExtension* young_array_buffer_extensions_ = nullptr;
// Determines whether code space is write-protected. This is essentially a // Determines whether code space is write-protected. This is essentially a
// race-free copy of the {FLAG_write_protect_code_memory} flag. // race-free copy of the {FLAG_write_protect_code_memory} flag.
......
...@@ -511,8 +511,6 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() { ...@@ -511,8 +511,6 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
#ifdef ENABLE_MINOR_MC #ifdef ENABLE_MINOR_MC
MinorMarkCompactCollector::MarkingState* minor_marking_state = MinorMarkCompactCollector::MarkingState* minor_marking_state =
heap()->minor_mark_compact_collector()->marking_state(); heap()->minor_mark_compact_collector()->marking_state();
#else
void* minor_marking_state = nullptr;
#endif // ENABLE_MINOR_MC #endif // ENABLE_MINOR_MC
collector_->marking_worklists_holder()->Update( collector_->marking_worklists_holder()->Update(
...@@ -521,8 +519,10 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() { ...@@ -521,8 +519,10 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
// this is referred inside DCHECK. // this is referred inside DCHECK.
this, this,
#endif #endif
filler_map, #ifdef ENABLE_MINOR_MC
minor_marking_state](HeapObject obj, HeapObject* out) -> bool { minor_marking_state,
#endif
filler_map](HeapObject obj, HeapObject* out) -> bool {
DCHECK(obj.IsHeapObject()); DCHECK(obj.IsHeapObject());
// Only pointers to from space have to be updated. // Only pointers to from space have to be updated.
if (Heap::InFromPage(obj)) { if (Heap::InFromPage(obj)) {
......
...@@ -932,9 +932,14 @@ void MarkCompactCollector::Finish() { ...@@ -932,9 +932,14 @@ void MarkCompactCollector::Finish() {
} }
void MarkCompactCollector::SweepArrayBufferExtensions() { void MarkCompactCollector::SweepArrayBufferExtensions() {
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL) return; ArrayBufferExtension* promoted_list = SweepYoungArrayBufferExtensions();
ArrayBufferExtension* current = heap_->array_buffer_extensions(); SweepOldArrayBufferExtensions(promoted_list);
ArrayBufferExtension* last = nullptr; }
void MarkCompactCollector::SweepOldArrayBufferExtensions(
ArrayBufferExtension* promoted_list) {
ArrayBufferExtension* current = heap_->old_array_buffer_extensions();
ArrayBufferExtension* last = promoted_list;
while (current) { while (current) {
ArrayBufferExtension* next = current->next(); ArrayBufferExtension* next = current->next();
...@@ -950,7 +955,29 @@ void MarkCompactCollector::SweepArrayBufferExtensions() { ...@@ -950,7 +955,29 @@ void MarkCompactCollector::SweepArrayBufferExtensions() {
current = next; current = next;
} }
heap_->set_array_buffer_extensions(last); heap_->set_old_array_buffer_extensions(last);
}
ArrayBufferExtension* MarkCompactCollector::SweepYoungArrayBufferExtensions() {
ArrayBufferExtension* current = heap_->young_array_buffer_extensions();
ArrayBufferExtension* promoted_list = nullptr;
while (current) {
ArrayBufferExtension* next = current->next();
if (!current->IsMarked()) {
delete current;
} else {
current->Unmark();
current->set_next(promoted_list);
promoted_list = current;
}
current = next;
}
heap_->set_young_array_buffer_extensions(nullptr);
return promoted_list;
} }
class MarkCompactCollector::RootMarkingVisitor final : public RootVisitor { class MarkCompactCollector::RootMarkingVisitor final : public RootVisitor {
...@@ -1219,6 +1246,8 @@ class RecordMigratedSlotVisitor : public ObjectVisitor { ...@@ -1219,6 +1246,8 @@ class RecordMigratedSlotVisitor : public ObjectVisitor {
inline void VisitRuntimeEntry(Code host, RelocInfo* rinfo) final {} inline void VisitRuntimeEntry(Code host, RelocInfo* rinfo) final {}
inline void VisitInternalReference(Code host, RelocInfo* rinfo) final {} inline void VisitInternalReference(Code host, RelocInfo* rinfo) final {}
virtual void MarkArrayBufferExtensionPromoted(HeapObject object) {}
protected: protected:
inline virtual void RecordMigratedSlot(HeapObject host, MaybeObject value, inline virtual void RecordMigratedSlot(HeapObject host, MaybeObject value,
Address slot) { Address slot) {
...@@ -1305,6 +1334,9 @@ class EvacuateVisitorBase : public HeapObjectVisitor { ...@@ -1305,6 +1334,9 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
if (mode != MigrationMode::kFast) if (mode != MigrationMode::kFast)
base->ExecuteMigrationObservers(dest, src, dst, size); base->ExecuteMigrationObservers(dest, src, dst, size);
dst.IterateBodyFast(dst.map(), size, base->record_visitor_); dst.IterateBodyFast(dst.map(), size, base->record_visitor_);
if (V8_UNLIKELY(FLAG_minor_mc)) {
base->record_visitor_->MarkArrayBufferExtensionPromoted(dst);
}
} else if (dest == CODE_SPACE) { } else if (dest == CODE_SPACE) {
DCHECK_CODEOBJECT_SIZE(size, base->heap_->code_space()); DCHECK_CODEOBJECT_SIZE(size, base->heap_->code_space());
base->heap_->CopyBlock(dst_addr, src_addr, size); base->heap_->CopyBlock(dst_addr, src_addr, size);
...@@ -1525,6 +1557,9 @@ class EvacuateNewSpacePageVisitor final : public HeapObjectVisitor { ...@@ -1525,6 +1557,9 @@ class EvacuateNewSpacePageVisitor final : public HeapObjectVisitor {
local_pretenuring_feedback_); local_pretenuring_feedback_);
} else if (mode == NEW_TO_OLD) { } else if (mode == NEW_TO_OLD) {
object.IterateBodyFast(record_visitor_); object.IterateBodyFast(record_visitor_);
if (V8_UNLIKELY(FLAG_minor_mc)) {
record_visitor_->MarkArrayBufferExtensionPromoted(object);
}
} }
return true; return true;
} }
...@@ -3134,12 +3169,14 @@ void MarkCompactCollectorBase::CreateAndExecuteEvacuationTasks( ...@@ -3134,12 +3169,14 @@ void MarkCompactCollectorBase::CreateAndExecuteEvacuationTasks(
} }
} }
bool MarkCompactCollectorBase::ShouldMovePage(Page* p, intptr_t live_bytes) { bool MarkCompactCollectorBase::ShouldMovePage(Page* p, intptr_t live_bytes,
bool always_promote_young) {
const bool reduce_memory = heap()->ShouldReduceMemory(); const bool reduce_memory = heap()->ShouldReduceMemory();
const Address age_mark = heap()->new_space()->age_mark(); const Address age_mark = heap()->new_space()->age_mark();
return !reduce_memory && !p->NeverEvacuate() && return !reduce_memory && !p->NeverEvacuate() &&
(live_bytes > Evacuator::NewSpacePageEvacuationThreshold()) && (live_bytes > Evacuator::NewSpacePageEvacuationThreshold()) &&
!p->Contains(age_mark) && heap()->CanExpandOldGeneration(live_bytes); (always_promote_young || !p->Contains(age_mark)) &&
heap()->CanExpandOldGeneration(live_bytes);
} }
void MarkCompactCollector::EvacuatePagesInParallel() { void MarkCompactCollector::EvacuatePagesInParallel() {
...@@ -3156,7 +3193,8 @@ void MarkCompactCollector::EvacuatePagesInParallel() { ...@@ -3156,7 +3193,8 @@ void MarkCompactCollector::EvacuatePagesInParallel() {
intptr_t live_bytes_on_page = non_atomic_marking_state()->live_bytes(page); intptr_t live_bytes_on_page = non_atomic_marking_state()->live_bytes(page);
if (live_bytes_on_page == 0 && !page->contains_array_buffers()) continue; if (live_bytes_on_page == 0 && !page->contains_array_buffers()) continue;
live_bytes += live_bytes_on_page; live_bytes += live_bytes_on_page;
if (ShouldMovePage(page, live_bytes_on_page)) { if (ShouldMovePage(page, live_bytes_on_page,
FLAG_always_promote_young_mc)) {
if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) || if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) ||
FLAG_always_promote_young_mc) { FLAG_always_promote_young_mc) {
EvacuateNewSpacePageVisitor<NEW_TO_OLD>::Move(page); EvacuateNewSpacePageVisitor<NEW_TO_OLD>::Move(page);
...@@ -4251,6 +4289,13 @@ class YoungGenerationMarkingVisitor final ...@@ -4251,6 +4289,13 @@ class YoungGenerationMarkingVisitor final
UNREACHABLE(); UNREACHABLE();
} }
V8_INLINE int VisitJSArrayBuffer(Map map, JSArrayBuffer object) {
object.YoungMarkExtension();
int size = JSArrayBuffer::BodyDescriptor::SizeOf(map, object);
JSArrayBuffer::BodyDescriptor::IterateBody(map, object, size, this);
return size;
}
private: private:
template <typename TSlot> template <typename TSlot>
V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end) { V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end) {
...@@ -4323,6 +4368,33 @@ void MinorMarkCompactCollector::CleanupSweepToIteratePages() { ...@@ -4323,6 +4368,33 @@ void MinorMarkCompactCollector::CleanupSweepToIteratePages() {
sweep_to_iterate_pages_.clear(); sweep_to_iterate_pages_.clear();
} }
void MinorMarkCompactCollector::SweepArrayBufferExtensions() {
ArrayBufferExtension* current = heap_->young_array_buffer_extensions();
ArrayBufferExtension* last_young = nullptr;
ArrayBufferExtension* last_old = heap_->old_array_buffer_extensions();
while (current) {
ArrayBufferExtension* next = current->next();
if (!current->IsYoungMarked()) {
delete current;
} else if (current->IsYoungPromoted()) {
current->YoungUnmark();
current->set_next(last_old);
last_old = current;
} else {
current->YoungUnmark();
current->set_next(last_young);
last_young = current;
}
current = next;
}
heap_->set_old_array_buffer_extensions(last_old);
heap_->set_young_array_buffer_extensions(last_young);
}
class YoungGenerationMigrationObserver final : public MigrationObserver { class YoungGenerationMigrationObserver final : public MigrationObserver {
public: public:
YoungGenerationMigrationObserver(Heap* heap, YoungGenerationMigrationObserver(Heap* heap,
...@@ -4358,6 +4430,11 @@ class YoungGenerationRecordMigratedSlotVisitor final ...@@ -4358,6 +4430,11 @@ class YoungGenerationRecordMigratedSlotVisitor final
UNREACHABLE(); UNREACHABLE();
} }
void MarkArrayBufferExtensionPromoted(HeapObject object) final {
if (!object.IsJSArrayBuffer()) return;
JSArrayBuffer::cast(object).YoungMarkExtensionPromoted();
}
private: private:
// Only record slots for host objects that are considered as live by the full // Only record slots for host objects that are considered as live by the full
// collector. // collector.
...@@ -4521,6 +4598,8 @@ void MinorMarkCompactCollector::CollectGarbage() { ...@@ -4521,6 +4598,8 @@ void MinorMarkCompactCollector::CollectGarbage() {
} }
heap()->account_external_memory_concurrently_freed(); heap()->account_external_memory_concurrently_freed();
SweepArrayBufferExtensions();
} }
void MinorMarkCompactCollector::MakeIterable( void MinorMarkCompactCollector::MakeIterable(
...@@ -5088,7 +5167,7 @@ void MinorMarkCompactCollector::EvacuatePagesInParallel() { ...@@ -5088,7 +5167,7 @@ void MinorMarkCompactCollector::EvacuatePagesInParallel() {
intptr_t live_bytes_on_page = non_atomic_marking_state()->live_bytes(page); intptr_t live_bytes_on_page = non_atomic_marking_state()->live_bytes(page);
if (live_bytes_on_page == 0 && !page->contains_array_buffers()) continue; if (live_bytes_on_page == 0 && !page->contains_array_buffers()) continue;
live_bytes += live_bytes_on_page; live_bytes += live_bytes_on_page;
if (ShouldMovePage(page, live_bytes_on_page)) { if (ShouldMovePage(page, live_bytes_on_page, false)) {
if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) { if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) {
EvacuateNewSpacePageVisitor<NEW_TO_OLD>::Move(page); EvacuateNewSpacePageVisitor<NEW_TO_OLD>::Move(page);
} else { } else {
......
...@@ -225,7 +225,7 @@ class MarkCompactCollectorBase { ...@@ -225,7 +225,7 @@ class MarkCompactCollectorBase {
const intptr_t live_bytes); const intptr_t live_bytes);
// Returns whether this page should be moved according to heuristics. // Returns whether this page should be moved according to heuristics.
bool ShouldMovePage(Page* p, intptr_t live_bytes); bool ShouldMovePage(Page* p, intptr_t live_bytes, bool promote_young);
int CollectToSpaceUpdatingItems(ItemParallelJob* job); int CollectToSpaceUpdatingItems(ItemParallelJob* job);
template <typename IterateableSpace> template <typename IterateableSpace>
...@@ -610,6 +610,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -610,6 +610,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// Free unmarked ArrayBufferExtensions. // Free unmarked ArrayBufferExtensions.
void SweepArrayBufferExtensions(); void SweepArrayBufferExtensions();
void SweepOldArrayBufferExtensions(ArrayBufferExtension* promoted_list);
ArrayBufferExtension* SweepYoungArrayBufferExtensions();
void MarkLiveObjects() override; void MarkLiveObjects() override;
...@@ -872,6 +874,8 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -872,6 +874,8 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
int NumberOfParallelMarkingTasks(int pages); int NumberOfParallelMarkingTasks(int pages);
void SweepArrayBufferExtensions();
MarkingWorklist* worklist_; MarkingWorklist* worklist_;
YoungGenerationMarkingVisitor* main_marking_visitor_; YoungGenerationMarkingVisitor* main_marking_visitor_;
......
...@@ -476,6 +476,13 @@ void ScavengeVisitor::VisitPointersImpl(HeapObject host, TSlot start, ...@@ -476,6 +476,13 @@ void ScavengeVisitor::VisitPointersImpl(HeapObject host, TSlot start,
} }
} }
int ScavengeVisitor::VisitJSArrayBuffer(Map map, JSArrayBuffer object) {
object.YoungMarkExtension();
int size = JSArrayBuffer::BodyDescriptor::SizeOf(map, object);
JSArrayBuffer::BodyDescriptor::IterateBody(map, object, size, this);
return size;
}
int ScavengeVisitor::VisitEphemeronHashTable(Map map, int ScavengeVisitor::VisitEphemeronHashTable(Map map,
EphemeronHashTable table) { EphemeronHashTable table) {
// Register table with the scavenger, so it can take care of the weak keys // Register table with the scavenger, so it can take care of the weak keys
......
...@@ -378,10 +378,39 @@ void ScavengerCollector::CollectGarbage() { ...@@ -378,10 +378,39 @@ void ScavengerCollector::CollectGarbage() {
#endif #endif
} }
SweepArrayBufferExtensions();
// Update how much has survived scavenge. // Update how much has survived scavenge.
heap_->IncrementYoungSurvivorsCounter(heap_->SurvivedYoungObjectSize()); heap_->IncrementYoungSurvivorsCounter(heap_->SurvivedYoungObjectSize());
} }
void ScavengerCollector::SweepArrayBufferExtensions() {
ArrayBufferExtension* current = heap_->young_array_buffer_extensions();
ArrayBufferExtension* last_young = nullptr;
ArrayBufferExtension* last_old = heap_->old_array_buffer_extensions();
while (current) {
ArrayBufferExtension* next = current->next();
if (!current->IsYoungMarked()) {
delete current;
} else if (current->IsYoungPromoted()) {
current->YoungUnmark();
current->set_next(last_old);
last_old = current;
} else {
current->YoungUnmark();
current->set_next(last_young);
last_young = current;
}
current = next;
}
heap_->set_old_array_buffer_extensions(last_old);
heap_->set_young_array_buffer_extensions(last_young);
}
void ScavengerCollector::HandleSurvivingNewLargeObjects() { void ScavengerCollector::HandleSurvivingNewLargeObjects() {
for (SurvivingNewLargeObjectMapEntry update_info : for (SurvivingNewLargeObjectMapEntry update_info :
surviving_new_large_objects_) { surviving_new_large_objects_) {
...@@ -449,8 +478,14 @@ void Scavenger::IterateAndScavengePromotedObject(HeapObject target, Map map, ...@@ -449,8 +478,14 @@ void Scavenger::IterateAndScavengePromotedObject(HeapObject target, Map map,
const bool record_slots = const bool record_slots =
is_compacting_ && is_compacting_ &&
heap()->incremental_marking()->atomic_marking_state()->IsBlack(target); heap()->incremental_marking()->atomic_marking_state()->IsBlack(target);
IterateAndScavengePromotedObjectsVisitor visitor(this, record_slots); IterateAndScavengePromotedObjectsVisitor visitor(this, record_slots);
target.IterateBodyFast(map, size, &visitor); target.IterateBodyFast(map, size, &visitor);
if (map.IsJSArrayBufferMap()) {
DCHECK(!MemoryChunk::FromHeapObject(target)->IsLargePage());
JSArrayBuffer::cast(target).YoungMarkExtensionPromoted();
}
} }
void Scavenger::RememberPromotedEphemeron(EphemeronHashTable table, int entry) { void Scavenger::RememberPromotedEphemeron(EphemeronHashTable table, int entry) {
......
...@@ -51,6 +51,8 @@ class ScavengerCollector { ...@@ -51,6 +51,8 @@ class ScavengerCollector {
void ClearOldEphemerons(); void ClearOldEphemerons();
void HandleSurvivingNewLargeObjects(); void HandleSurvivingNewLargeObjects();
void SweepArrayBufferExtensions();
Isolate* const isolate_; Isolate* const isolate_;
Heap* const heap_; Heap* const heap_;
base::Semaphore parallel_scavenge_semaphore_; base::Semaphore parallel_scavenge_semaphore_;
...@@ -257,6 +259,7 @@ class ScavengeVisitor final : public NewSpaceVisitor<ScavengeVisitor> { ...@@ -257,6 +259,7 @@ class ScavengeVisitor final : public NewSpaceVisitor<ScavengeVisitor> {
V8_INLINE void VisitCodeTarget(Code host, RelocInfo* rinfo) final; V8_INLINE void VisitCodeTarget(Code host, RelocInfo* rinfo) final;
V8_INLINE void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final; V8_INLINE void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final;
V8_INLINE int VisitEphemeronHashTable(Map map, EphemeronHashTable object); V8_INLINE int VisitEphemeronHashTable(Map map, EphemeronHashTable object);
V8_INLINE int VisitJSArrayBuffer(Map map, JSArrayBuffer object);
private: private:
template <typename TSlot> template <typename TSlot>
......
...@@ -120,7 +120,7 @@ ArrayBufferExtension* JSArrayBuffer::EnsureExtension(Heap* heap) { ...@@ -120,7 +120,7 @@ ArrayBufferExtension* JSArrayBuffer::EnsureExtension(Heap* heap) {
ArrayBufferExtension* extension = ArrayBufferExtension* extension =
new ArrayBufferExtension(std::shared_ptr<BackingStore>()); new ArrayBufferExtension(std::shared_ptr<BackingStore>());
set_extension(extension); set_extension(extension);
heap->AppendArrayBufferExtension(extension); heap->AppendArrayBufferExtension(*this, extension);
return extension; return extension;
} }
...@@ -141,6 +141,20 @@ void JSArrayBuffer::MarkExtension() { ...@@ -141,6 +141,20 @@ void JSArrayBuffer::MarkExtension() {
} }
} }
void JSArrayBuffer::YoungMarkExtension() {
ArrayBufferExtension* extension = this->extension();
if (extension) {
extension->YoungMark();
}
}
void JSArrayBuffer::YoungMarkExtensionPromoted() {
ArrayBufferExtension* extension = this->extension();
if (extension) {
extension->YoungMarkPromoted();
}
}
Handle<JSArrayBuffer> JSTypedArray::GetBuffer() { Handle<JSArrayBuffer> JSTypedArray::GetBuffer() {
Isolate* isolate = GetIsolate(); Isolate* isolate = GetIsolate();
Handle<JSTypedArray> self(*this, isolate); Handle<JSTypedArray> self(*this, isolate);
......
...@@ -114,6 +114,8 @@ class JSArrayBuffer : public JSObject { ...@@ -114,6 +114,8 @@ class JSArrayBuffer : public JSObject {
// Marks ArrayBufferExtension // Marks ArrayBufferExtension
void MarkExtension(); void MarkExtension();
void YoungMarkExtension();
void YoungMarkExtensionPromoted();
// Dispatched behavior. // Dispatched behavior.
DECL_PRINTER(JSArrayBuffer) DECL_PRINTER(JSArrayBuffer)
...@@ -155,22 +157,44 @@ class JSArrayBuffer : public JSObject { ...@@ -155,22 +157,44 @@ class JSArrayBuffer : public JSObject {
// and frees unmarked ones. // and frees unmarked ones.
// https://docs.google.com/document/d/1-ZrLdlFX1nXT3z-FAgLbKal1gI8Auiaya_My-a0UJ28/edit // https://docs.google.com/document/d/1-ZrLdlFX1nXT3z-FAgLbKal1gI8Auiaya_My-a0UJ28/edit
class ArrayBufferExtension : public Malloced { class ArrayBufferExtension : public Malloced {
enum class GcState : uint8_t { Dead = 0, Copied, Promoted };
std::atomic<bool> marked_; std::atomic<bool> marked_;
std::atomic<GcState> young_gc_state_;
std::shared_ptr<BackingStore> backing_store_; std::shared_ptr<BackingStore> backing_store_;
ArrayBufferExtension* next_; ArrayBufferExtension* next_;
GcState young_gc_state() {
return young_gc_state_.load(std::memory_order_relaxed);
}
void set_young_gc_state(GcState value) {
young_gc_state_.store(value, std::memory_order_relaxed);
}
public: public:
ArrayBufferExtension() ArrayBufferExtension()
: marked_(false), : marked_(false),
young_gc_state_(GcState::Dead),
backing_store_(std::shared_ptr<BackingStore>()), backing_store_(std::shared_ptr<BackingStore>()),
next_(nullptr) {} next_(nullptr) {}
explicit ArrayBufferExtension(std::shared_ptr<BackingStore> backing_store) explicit ArrayBufferExtension(std::shared_ptr<BackingStore> backing_store)
: marked_(false), backing_store_(backing_store), next_(nullptr) {} : marked_(false),
young_gc_state_(GcState::Dead),
backing_store_(backing_store),
next_(nullptr) {}
void Mark() { marked_.store(true, std::memory_order_relaxed); } void Mark() { marked_.store(true, std::memory_order_relaxed); }
void Unmark() { marked_.store(false, std::memory_order_relaxed); } void Unmark() { marked_.store(false, std::memory_order_relaxed); }
bool IsMarked() { return marked_.load(std::memory_order_relaxed); } bool IsMarked() { return marked_.load(std::memory_order_relaxed); }
void YoungMark() { set_young_gc_state(GcState::Copied); }
void YoungMarkPromoted() { set_young_gc_state(GcState::Promoted); }
void YoungUnmark() { set_young_gc_state(GcState::Dead); }
bool IsYoungMarked() { return young_gc_state() != GcState::Dead; }
bool IsYoungPromoted() { return young_gc_state() == GcState::Promoted; }
std::shared_ptr<BackingStore> backing_store() { return backing_store_; } std::shared_ptr<BackingStore> backing_store() { return backing_store_; }
BackingStore* backing_store_raw() { return backing_store_.get(); } BackingStore* backing_store_raw() { return backing_store_.get(); }
......
...@@ -20,6 +20,39 @@ bool IsTracked(i::JSArrayBuffer buf) { ...@@ -20,6 +20,39 @@ bool IsTracked(i::JSArrayBuffer buf) {
return i::ArrayBufferTracker::IsTracked(buf); return i::ArrayBufferTracker::IsTracked(buf);
} }
bool LookupExtension(i::ArrayBufferExtension* head,
i::ArrayBufferExtension* extension) {
i::ArrayBufferExtension* current = head;
while (current) {
if (current == extension) return true;
current = current->next();
}
return false;
}
bool IsTrackedYoung(i::Heap* heap, i::ArrayBufferExtension* extension) {
bool in_young =
LookupExtension(heap->young_array_buffer_extensions(), extension);
bool in_old = LookupExtension(heap->old_array_buffer_extensions(), extension);
return in_young && !in_old;
}
bool IsTrackedOld(i::Heap* heap, i::ArrayBufferExtension* extension) {
bool in_young =
LookupExtension(heap->young_array_buffer_extensions(), extension);
bool in_old = LookupExtension(heap->old_array_buffer_extensions(), extension);
return in_old && !in_young;
}
bool IsTracked(i::Heap* heap, i::ArrayBufferExtension* extension) {
bool in_young =
LookupExtension(heap->young_array_buffer_extensions(), extension);
bool in_old = LookupExtension(heap->old_array_buffer_extensions(), extension);
return in_young != in_old;
}
} // namespace } // namespace
namespace v8 { namespace v8 {
...@@ -58,6 +91,31 @@ TEST(ArrayBuffer_OnlyMC) { ...@@ -58,6 +91,31 @@ TEST(ArrayBuffer_OnlyMC) {
CHECK(!IsTracked(raw_ab)); CHECK(!IsTracked(raw_ab));
} }
TEST(ArrayBuffer_OnlyMC_Extension) {
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
ArrayBufferExtension* extension;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
extension = buf->extension();
CHECK(IsTrackedYoung(heap, extension));
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(IsTrackedOld(heap, extension));
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(IsTrackedOld(heap, extension));
}
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(!IsTracked(heap, extension));
}
TEST(ArrayBuffer_OnlyScavenge) { TEST(ArrayBuffer_OnlyScavenge) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return; if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope; ManualGCScope manual_gc_scope;
...@@ -89,6 +147,33 @@ TEST(ArrayBuffer_OnlyScavenge) { ...@@ -89,6 +147,33 @@ TEST(ArrayBuffer_OnlyScavenge) {
CHECK(!IsTracked(raw_ab)); CHECK(!IsTracked(raw_ab));
} }
TEST(ArrayBuffer_OnlyScavenge_Extension) {
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
ArrayBufferExtension* extension;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
extension = buf->extension();
CHECK(IsTrackedYoung(heap, extension));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedYoung(heap, extension));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedOld(heap, extension));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedOld(heap, extension));
}
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(!IsTracked(heap, extension));
}
TEST(ArrayBuffer_ScavengeAndMC) { TEST(ArrayBuffer_ScavengeAndMC) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return; if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope; ManualGCScope manual_gc_scope;
...@@ -122,6 +207,34 @@ TEST(ArrayBuffer_ScavengeAndMC) { ...@@ -122,6 +207,34 @@ TEST(ArrayBuffer_ScavengeAndMC) {
CHECK(!IsTracked(raw_ab)); CHECK(!IsTracked(raw_ab));
} }
TEST(ArrayBuffer_ScavengeAndMC_Extension) {
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
ArrayBufferExtension* extension;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
extension = buf->extension();
CHECK(IsTrackedYoung(heap, extension));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedYoung(heap, extension));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedOld(heap, extension));
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(IsTrackedOld(heap, extension));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTrackedOld(heap, extension));
}
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(!IsTracked(heap, extension));
}
TEST(ArrayBuffer_Compaction) { TEST(ArrayBuffer_Compaction) {
if (FLAG_never_compact || V8_ARRAY_BUFFER_EXTENSION_BOOL) return; if (FLAG_never_compact || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope; ManualGCScope manual_gc_scope;
...@@ -308,6 +421,40 @@ TEST(ArrayBuffer_SemiSpaceCopyThenPagePromotion) { ...@@ -308,6 +421,40 @@ TEST(ArrayBuffer_SemiSpaceCopyThenPagePromotion) {
} }
} }
TEST(ArrayBuffer_PagePromotion_Extension) {
if (!i::FLAG_incremental_marking || !V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
i::FLAG_always_promote_young_mc = true;
ManualGCScope manual_gc_scope;
// The test verifies that the marking state is preserved across semispace
// copy.
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
heap::SealCurrentObjects(heap);
{
v8::HandleScope handle_scope(isolate);
Handle<FixedArray> root =
heap->isolate()->factory()->NewFixedArray(1, AllocationType::kOld);
ArrayBufferExtension* extension;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
extension = buf->extension();
root->set(0, *buf); // Buffer that should be promoted as live.
}
std::vector<Handle<FixedArray>> handles;
// Create live objects on page such that the whole page gets promoted
heap::FillCurrentPage(heap->new_space(), &handles);
CHECK(IsTrackedYoung(heap, extension));
heap::SimulateIncrementalMarking(heap, true);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(IsTrackedOld(heap, extension));
}
}
UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) { UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) {
if (FLAG_optimize_for_size) return; if (FLAG_optimize_for_size) return;
ManualGCScope manual_gc_scope; ManualGCScope manual_gc_scope;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment