Commit 02320548 authored by ulan's avatar ulan Committed by Commit bot

Move map retaining to finalization of incremental marking.

Compaction of the array with maps happens lazily upon adding new maps.

BUG=

Review URL: https://codereview.chromium.org/1481953002

Cr-Commit-Position: refs/heads/master@{#32717}
parent 3950ca1b
......@@ -514,7 +514,6 @@ void GCTracer::PrintNVP() const {
"mark_prepcodeflush=%.1f "
"mark_root=%.1f "
"mark_topopt=%.1f "
"mark_retainmaps=%.1f "
"mark_weakclosure=%.1f "
"mark_stringtable=%.1f "
"mark_weakrefs=%.1f "
......@@ -585,7 +584,6 @@ void GCTracer::PrintNVP() const {
current_.scopes[Scope::MC_MARK_PREPARE_CODE_FLUSH],
current_.scopes[Scope::MC_MARK_ROOT],
current_.scopes[Scope::MC_MARK_TOPOPT],
current_.scopes[Scope::MC_MARK_RETAIN_MAPS],
current_.scopes[Scope::MC_MARK_WEAK_CLOSURE],
current_.scopes[Scope::MC_MARK_STRING_TABLE],
current_.scopes[Scope::MC_MARK_WEAK_REFERENCES],
......
......@@ -103,7 +103,6 @@ class GCTracer {
MC_MARK_PREPARE_CODE_FLUSH,
MC_MARK_ROOT,
MC_MARK_TOPOPT,
MC_MARK_RETAIN_MAPS,
MC_MARK_WEAK_CLOSURE,
MC_MARK_STRING_TABLE,
MC_MARK_WEAK_REFERENCES,
......
......@@ -5332,6 +5332,9 @@ DependentCode* Heap::LookupWeakObjectToCodeDependency(Handle<HeapObject> obj) {
void Heap::AddRetainedMap(Handle<Map> map) {
Handle<WeakCell> cell = Map::WeakCellForMap(map);
Handle<ArrayList> array(retained_maps(), isolate());
if (array->IsFull()) {
CompactRetainedMaps(*array);
}
array = ArrayList::Add(
array, cell, handle(Smi::FromInt(FLAG_retain_maps_for_n_gc), isolate()),
ArrayList::kReloadLengthAfterAllocation);
......@@ -5341,6 +5344,35 @@ void Heap::AddRetainedMap(Handle<Map> map) {
}
void Heap::CompactRetainedMaps(ArrayList* retained_maps) {
DCHECK_EQ(retained_maps, this->retained_maps());
int length = retained_maps->Length();
int new_length = 0;
int new_number_of_disposed_maps = 0;
// This loop compacts the array by removing cleared weak cells.
for (int i = 0; i < length; i += 2) {
DCHECK(retained_maps->Get(i)->IsWeakCell());
WeakCell* cell = WeakCell::cast(retained_maps->Get(i));
Object* age = retained_maps->Get(i + 1);
if (cell->cleared()) continue;
if (i != new_length) {
retained_maps->Set(new_length, cell);
retained_maps->Set(new_length + 1, age);
}
if (i < number_of_disposed_maps_) {
new_number_of_disposed_maps += 2;
}
new_length += 2;
}
number_of_disposed_maps_ = new_number_of_disposed_maps;
Object* undefined = undefined_value();
for (int i = new_length; i < length; i++) {
retained_maps->Clear(i, undefined);
}
if (new_length != length) retained_maps->SetLength(new_length);
}
void Heap::FatalProcessOutOfMemory(const char* location, bool take_snapshot) {
v8::internal::V8::FatalProcessOutOfMemory(location, take_snapshot);
}
......
......@@ -1814,6 +1814,8 @@ class Heap {
void AddToRingBuffer(const char* string);
void GetFromRingBuffer(char* buffer);
void CompactRetainedMaps(ArrayList* retained_maps);
// Attempt to over-approximate the weak closure by marking object groups and
// implicit references from global handles, but don't atomically complete
// marking. If we continue to mark incrementally, we might have marked
......
......@@ -683,6 +683,69 @@ void IncrementalMarking::ProcessWeakCells() {
}
bool ShouldRetainMap(Map* map, int age) {
if (age == 0) {
// The map has aged. Do not retain this map.
return false;
}
Object* constructor = map->GetConstructor();
if (!constructor->IsHeapObject() ||
Marking::IsWhite(Marking::MarkBitFrom(HeapObject::cast(constructor)))) {
// The constructor is dead, no new objects with this map can
// be created. Do not retain this map.
return false;
}
return true;
}
void IncrementalMarking::RetainMaps() {
// Do not retain dead maps if flag disables it or there is
// - memory pressure (reduce_memory_footprint_),
// - GC is requested by tests or dev-tools (abort_incremental_marking_).
bool map_retaining_is_disabled = heap()->ShouldReduceMemory() ||
heap()->ShouldAbortIncrementalMarking() ||
FLAG_retain_maps_for_n_gc == 0;
ArrayList* retained_maps = heap()->retained_maps();
int length = retained_maps->Length();
// The number_of_disposed_maps separates maps in the retained_maps
// array that were created before and after context disposal.
// We do not age and retain disposed maps to avoid memory leaks.
int number_of_disposed_maps = heap()->number_of_disposed_maps_;
for (int i = 0; i < length; i += 2) {
DCHECK(retained_maps->Get(i)->IsWeakCell());
WeakCell* cell = WeakCell::cast(retained_maps->Get(i));
if (cell->cleared()) continue;
int age = Smi::cast(retained_maps->Get(i + 1))->value();
int new_age;
Map* map = Map::cast(cell->value());
MarkBit map_mark = Marking::MarkBitFrom(map);
if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
Marking::IsWhite(map_mark)) {
if (ShouldRetainMap(map, age)) {
MarkObject(heap(), map);
}
Object* prototype = map->prototype();
if (age > 0 && prototype->IsHeapObject() &&
Marking::IsWhite(Marking::MarkBitFrom(HeapObject::cast(prototype)))) {
// The prototype is not marked, age the map.
new_age = age - 1;
} else {
// The prototype and the constructor are marked, this map keeps only
// transition tree alive, not JSObjects. Do not age the map.
new_age = age;
}
} else {
new_age = FLAG_retain_maps_for_n_gc;
}
// Compact the array and update the age.
if (new_age != age) {
retained_maps->Set(i + 1, Smi::FromInt(new_age));
}
}
}
void IncrementalMarking::FinalizeIncrementally() {
DCHECK(!finalize_marking_completed_);
DCHECK(IsMarking());
......@@ -696,10 +759,16 @@ void IncrementalMarking::FinalizeIncrementally() {
// objects to reduce the marking load in the final pause.
// 1) We scan and mark the roots again to find all changes to the root set.
// 2) We mark the object groups.
// 3) Remove weak cell with live values from the list of weak cells, they
// 3) Age and retain maps embedded in optimized code.
// 4) Remove weak cell with live values from the list of weak cells, they
// do not need processing during GC.
MarkRoots();
MarkObjectGroups();
if (incremental_marking_finalization_rounds_ == 0) {
// Map retaining is needed for perfromance, not correctness,
// so we can do it only once at the beginning of the finalization.
RetainMaps();
}
ProcessWeakCells();
int marking_progress =
......
......@@ -241,6 +241,9 @@ class IncrementalMarking {
void MarkRoots();
void MarkObjectGroups();
void ProcessWeakCells();
// Retain dying maps for <FLAG_retain_maps_for_n_gc> garbage collections to
// increase chances of reusing of map transition tree in future.
void RetainMaps();
void ActivateIncrementalWriteBarrier(PagedSpace* space);
static void ActivateIncrementalWriteBarrier(NewSpace* space);
......
......@@ -1845,90 +1845,6 @@ void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) {
}
bool ShouldRetainMap(Map* map, int age) {
if (age == 0) {
// The map has aged. Do not retain this map.
return false;
}
Object* constructor = map->GetConstructor();
if (!constructor->IsHeapObject() ||
Marking::IsWhite(Marking::MarkBitFrom(HeapObject::cast(constructor)))) {
// The constructor is dead, no new objects with this map can
// be created. Do not retain this map.
return false;
}
return true;
}
void MarkCompactCollector::RetainMaps() {
// Do not retain dead maps if flag disables it or there is
// - memory pressure (reduce_memory_footprint_),
// - GC is requested by tests or dev-tools (abort_incremental_marking_).
bool map_retaining_is_disabled = heap()->ShouldReduceMemory() ||
heap()->ShouldAbortIncrementalMarking() ||
FLAG_retain_maps_for_n_gc == 0;
ArrayList* retained_maps = heap()->retained_maps();
int length = retained_maps->Length();
int new_length = 0;
// The number_of_disposed_maps separates maps in the retained_maps
// array that were created before and after context disposal.
// We do not age and retain disposed maps to avoid memory leaks.
int number_of_disposed_maps = heap()->number_of_disposed_maps_;
int new_number_of_disposed_maps = 0;
// This loop compacts the array by removing cleared weak cells,
// ages and retains dead maps.
for (int i = 0; i < length; i += 2) {
DCHECK(retained_maps->Get(i)->IsWeakCell());
WeakCell* cell = WeakCell::cast(retained_maps->Get(i));
if (cell->cleared()) continue;
int age = Smi::cast(retained_maps->Get(i + 1))->value();
int new_age;
Map* map = Map::cast(cell->value());
MarkBit map_mark = Marking::MarkBitFrom(map);
if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
Marking::IsWhite(map_mark)) {
if (ShouldRetainMap(map, age)) {
MarkObject(map, map_mark);
}
Object* prototype = map->prototype();
if (age > 0 && prototype->IsHeapObject() &&
Marking::IsWhite(Marking::MarkBitFrom(HeapObject::cast(prototype)))) {
// The prototype is not marked, age the map.
new_age = age - 1;
} else {
// The prototype and the constructor are marked, this map keeps only
// transition tree alive, not JSObjects. Do not age the map.
new_age = age;
}
} else {
new_age = FLAG_retain_maps_for_n_gc;
}
// Compact the array and update the age.
if (i != new_length) {
retained_maps->Set(new_length, cell);
Object** slot = retained_maps->Slot(new_length);
RecordSlot(retained_maps, slot, cell);
retained_maps->Set(new_length + 1, Smi::FromInt(new_age));
} else if (new_age != age) {
retained_maps->Set(new_length + 1, Smi::FromInt(new_age));
}
if (i < number_of_disposed_maps) {
new_number_of_disposed_maps++;
}
new_length += 2;
}
heap()->number_of_disposed_maps_ = new_number_of_disposed_maps;
Object* undefined = heap()->undefined_value();
for (int i = new_length; i < length; i++) {
retained_maps->Clear(i, undefined);
}
if (new_length != length) retained_maps->SetLength(new_length);
ProcessMarkingDeque();
}
void MarkCompactCollector::EnsureMarkingDequeIsReserved() {
DCHECK(!marking_deque_.in_use());
if (marking_deque_memory_ == NULL) {
......@@ -2062,15 +1978,6 @@ void MarkCompactCollector::MarkLiveObjects() {
ProcessTopOptimizedFrame(&root_visitor);
}
// Retaining dying maps should happen before or during ephemeral marking
// because a map could keep the key of an ephemeron alive. Note that map
// aging is imprecise: maps that are kept alive only by ephemerons will age.
{
GCTracer::Scope gc_scope(heap()->tracer(),
GCTracer::Scope::MC_MARK_RETAIN_MAPS);
RetainMaps();
}
{
GCTracer::Scope gc_scope(heap()->tracer(),
GCTracer::Scope::MC_MARK_WEAK_CLOSURE);
......
......@@ -617,10 +617,6 @@ class MarkCompactCollector {
// otherwise a map can die and deoptimize the code.
void ProcessTopOptimizedFrame(ObjectVisitor* visitor);
// Retain dying maps for <FLAG_retain_maps_for_n_gc> garbage collections to
// increase chances of reusing of map transition tree in future.
void RetainMaps();
// Collects a list of dependent code from maps embedded in optimize code.
DependentCode* DependentCodeListFromNonLiveMaps();
......
......@@ -10393,6 +10393,12 @@ Handle<ArrayList> ArrayList::Add(Handle<ArrayList> array, Handle<Object> obj1,
}
bool ArrayList::IsFull() {
int capacity = length();
return kFirstIndex + Length() == capacity;
}
Handle<ArrayList> ArrayList::EnsureSpace(Handle<ArrayList> array, int length) {
int capacity = array->length();
bool empty = (capacity == 0);
......
......@@ -2787,6 +2787,7 @@ class ArrayList : public FixedArray {
inline Object** Slot(int index);
inline void Set(int index, Object* obj);
inline void Clear(int index, Object* undefined);
bool IsFull();
DECLARE_CAST(ArrayList)
private:
......
......@@ -5923,9 +5923,11 @@ void CheckMapRetainingFor(int n) {
Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
CHECK(!weak_cell->cleared());
for (int i = 0; i < n; i++) {
SimulateIncrementalMarking(heap);
heap->CollectGarbage(OLD_SPACE);
}
CHECK(!weak_cell->cleared());
SimulateIncrementalMarking(heap);
heap->CollectGarbage(OLD_SPACE);
CHECK(weak_cell->cleared());
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment