Commit e777784f authored by ulan's avatar ulan Committed by Commit bot

Optimize ClearNonLiveReferences: collect dependent code only from maps that...

Optimize ClearNonLiveReferences: collect dependent code only from maps that are embedded in optimize code.

BUG=chromium:554488
LOG=NO

Review URL: https://codereview.chromium.org/1471703002

Cr-Commit-Position: refs/heads/master@{#32218}
parent 0fb2edd1
...@@ -528,6 +528,8 @@ void GCTracer::PrintNVP() const { ...@@ -528,6 +528,8 @@ void GCTracer::PrintNVP() const {
"weakcollection_abort=%.1f " "weakcollection_abort=%.1f "
"weakcells=%.1f " "weakcells=%.1f "
"nonlive_refs=%.1f " "nonlive_refs=%.1f "
"extract_dependent_code=%.1f "
"deopt_dependent_code=%.1f "
"steps_count=%d " "steps_count=%d "
"steps_took=%.1f " "steps_took=%.1f "
"longest_step=%.1f " "longest_step=%.1f "
...@@ -595,6 +597,8 @@ void GCTracer::PrintNVP() const { ...@@ -595,6 +597,8 @@ void GCTracer::PrintNVP() const {
current_.scopes[Scope::MC_WEAKCOLLECTION_ABORT], current_.scopes[Scope::MC_WEAKCOLLECTION_ABORT],
current_.scopes[Scope::MC_WEAKCELL], current_.scopes[Scope::MC_WEAKCELL],
current_.scopes[Scope::MC_NONLIVEREFERENCES], current_.scopes[Scope::MC_NONLIVEREFERENCES],
current_.scopes[Scope::MC_EXTRACT_DEPENDENT_CODE],
current_.scopes[Scope::MC_DEOPT_DEPENDENT_CODE],
current_.incremental_marking_steps, current_.incremental_marking_steps,
current_.incremental_marking_duration, current_.incremental_marking_duration,
current_.longest_incremental_marking_step, current_.longest_incremental_marking_step,
......
...@@ -131,7 +131,9 @@ class GCTracer { ...@@ -131,7 +131,9 @@ class GCTracer {
MC_WEAKCOLLECTION_CLEAR, MC_WEAKCOLLECTION_CLEAR,
MC_WEAKCOLLECTION_ABORT, MC_WEAKCOLLECTION_ABORT,
MC_WEAKCELL, MC_WEAKCELL,
MC_EXTRACT_DEPENDENT_CODE,
MC_NONLIVEREFERENCES, MC_NONLIVEREFERENCES,
MC_DEOPT_DEPENDENT_CODE,
MC_FLUSH_CODE, MC_FLUSH_CODE,
SCAVENGER_CODE_FLUSH_CANDIDATES, SCAVENGER_CODE_FLUSH_CANDIDATES,
SCAVENGER_OBJECT_GROUPS, SCAVENGER_OBJECT_GROUPS,
......
...@@ -91,6 +91,7 @@ Heap::Heap() ...@@ -91,6 +91,7 @@ Heap::Heap()
survived_last_scavenge_(0), survived_last_scavenge_(0),
always_allocate_scope_count_(0), always_allocate_scope_count_(0),
contexts_disposed_(0), contexts_disposed_(0),
number_of_disposed_maps_(0),
global_ic_age_(0), global_ic_age_(0),
scan_on_scavenge_pages_(0), scan_on_scavenge_pages_(0),
new_space_(this), new_space_(this),
...@@ -1037,7 +1038,7 @@ int Heap::NotifyContextDisposed(bool dependant_context) { ...@@ -1037,7 +1038,7 @@ int Heap::NotifyContextDisposed(bool dependant_context) {
isolate()->optimizing_compile_dispatcher()->Flush(); isolate()->optimizing_compile_dispatcher()->Flush();
} }
AgeInlineCaches(); AgeInlineCaches();
set_retained_maps(ArrayList::cast(empty_fixed_array())); number_of_disposed_maps_ = retained_maps()->Length();
tracer()->AddContextDisposalTime(MonotonicallyIncreasingTimeInMs()); tracer()->AddContextDisposalTime(MonotonicallyIncreasingTimeInMs());
return ++contexts_disposed_; return ++contexts_disposed_;
} }
...@@ -5366,7 +5367,6 @@ DependentCode* Heap::LookupWeakObjectToCodeDependency(Handle<HeapObject> obj) { ...@@ -5366,7 +5367,6 @@ DependentCode* Heap::LookupWeakObjectToCodeDependency(Handle<HeapObject> obj) {
void Heap::AddRetainedMap(Handle<Map> map) { void Heap::AddRetainedMap(Handle<Map> map) {
if (FLAG_retain_maps_for_n_gc == 0) return;
Handle<WeakCell> cell = Map::WeakCellForMap(map); Handle<WeakCell> cell = Map::WeakCellForMap(map);
Handle<ArrayList> array(retained_maps(), isolate()); Handle<ArrayList> array(retained_maps(), isolate());
array = ArrayList::Add( array = ArrayList::Add(
......
...@@ -2149,6 +2149,11 @@ class Heap { ...@@ -2149,6 +2149,11 @@ class Heap {
// For keeping track of context disposals. // For keeping track of context disposals.
int contexts_disposed_; int contexts_disposed_;
// The length of the retained_maps array at the time of context disposal.
// This separates maps in the retained_maps array that were created before
// and after context disposal.
int number_of_disposed_maps_;
int global_ic_age_; int global_ic_age_;
int scan_on_scavenge_pages_; int scan_on_scavenge_pages_;
......
...@@ -349,16 +349,7 @@ void MarkCompactCollector::CollectGarbage() { ...@@ -349,16 +349,7 @@ void MarkCompactCollector::CollectGarbage() {
DCHECK(heap_->incremental_marking()->IsStopped()); DCHECK(heap_->incremental_marking()->IsStopped());
// ClearNonLiveReferences can deoptimize code in dependent code arrays. ProcessWeakReferences();
// Process weak cells before so that weak cells in dependent code
// arrays are cleared or contain only live code objects.
ProcessAndClearWeakCells();
ClearNonLiveReferences();
ClearWeakCollections();
heap_->set_encountered_weak_cells(Smi::FromInt(0));
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
if (FLAG_verify_heap) { if (FLAG_verify_heap) {
...@@ -1821,18 +1812,40 @@ void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) { ...@@ -1821,18 +1812,40 @@ void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) {
} }
bool ShouldRetainMap(Map* map, int age) {
if (age == 0) {
// The map has aged. Do not retain this map.
return false;
}
Object* constructor = map->GetConstructor();
if (!constructor->IsHeapObject() ||
Marking::IsWhite(Marking::MarkBitFrom(HeapObject::cast(constructor)))) {
// The constructor is dead, no new objects with this map can
// be created. Do not retain this map.
return false;
}
return true;
}
void MarkCompactCollector::RetainMaps() { void MarkCompactCollector::RetainMaps() {
if (heap()->ShouldReduceMemory() || heap()->ShouldAbortIncrementalMarking() ||
FLAG_retain_maps_for_n_gc == 0) {
// Do not retain dead maps if flag disables it or there is // Do not retain dead maps if flag disables it or there is
// - memory pressure (reduce_memory_footprint_), // - memory pressure (reduce_memory_footprint_),
// - GC is requested by tests or dev-tools (abort_incremental_marking_). // - GC is requested by tests or dev-tools (abort_incremental_marking_).
return; bool map_retaining_is_disabled = heap()->ShouldReduceMemory() ||
} heap()->ShouldAbortIncrementalMarking() ||
FLAG_retain_maps_for_n_gc == 0;
ArrayList* retained_maps = heap()->retained_maps(); ArrayList* retained_maps = heap()->retained_maps();
int length = retained_maps->Length(); int length = retained_maps->Length();
int new_length = 0; int new_length = 0;
// The number_of_disposed_maps separates maps in the retained_maps
// array that were created before and after context disposal.
// We do not age and retain disposed maps to avoid memory leaks.
int number_of_disposed_maps = heap()->number_of_disposed_maps_;
int new_number_of_disposed_maps = 0;
// This loop compacts the array by removing cleared weak cells,
// ages and retains dead maps.
for (int i = 0; i < length; i += 2) { for (int i = 0; i < length; i += 2) {
DCHECK(retained_maps->Get(i)->IsWeakCell()); DCHECK(retained_maps->Get(i)->IsWeakCell());
WeakCell* cell = WeakCell::cast(retained_maps->Get(i)); WeakCell* cell = WeakCell::cast(retained_maps->Get(i));
...@@ -1841,20 +1854,13 @@ void MarkCompactCollector::RetainMaps() { ...@@ -1841,20 +1854,13 @@ void MarkCompactCollector::RetainMaps() {
int new_age; int new_age;
Map* map = Map::cast(cell->value()); Map* map = Map::cast(cell->value());
MarkBit map_mark = Marking::MarkBitFrom(map); MarkBit map_mark = Marking::MarkBitFrom(map);
if (Marking::IsWhite(map_mark)) { if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
if (age == 0) { Marking::IsWhite(map_mark)) {
// The map has aged. Do not retain this map. if (ShouldRetainMap(map, age)) {
continue; MarkObject(map, map_mark);
}
Object* constructor = map->GetConstructor();
if (!constructor->IsHeapObject() || Marking::IsWhite(Marking::MarkBitFrom(
HeapObject::cast(constructor)))) {
// The constructor is dead, no new objects with this map can
// be created. Do not retain this map.
continue;
} }
Object* prototype = map->prototype(); Object* prototype = map->prototype();
if (prototype->IsHeapObject() && if (age > 0 && prototype->IsHeapObject() &&
Marking::IsWhite(Marking::MarkBitFrom(HeapObject::cast(prototype)))) { Marking::IsWhite(Marking::MarkBitFrom(HeapObject::cast(prototype)))) {
// The prototype is not marked, age the map. // The prototype is not marked, age the map.
new_age = age - 1; new_age = age - 1;
...@@ -1863,10 +1869,10 @@ void MarkCompactCollector::RetainMaps() { ...@@ -1863,10 +1869,10 @@ void MarkCompactCollector::RetainMaps() {
// transition tree alive, not JSObjects. Do not age the map. // transition tree alive, not JSObjects. Do not age the map.
new_age = age; new_age = age;
} }
MarkObject(map, map_mark);
} else { } else {
new_age = FLAG_retain_maps_for_n_gc; new_age = FLAG_retain_maps_for_n_gc;
} }
// Compact the array and update the age.
if (i != new_length) { if (i != new_length) {
retained_maps->Set(new_length, cell); retained_maps->Set(new_length, cell);
Object** slot = retained_maps->Slot(new_length); Object** slot = retained_maps->Slot(new_length);
...@@ -1875,8 +1881,12 @@ void MarkCompactCollector::RetainMaps() { ...@@ -1875,8 +1881,12 @@ void MarkCompactCollector::RetainMaps() {
} else if (new_age != age) { } else if (new_age != age) {
retained_maps->Set(new_length + 1, Smi::FromInt(new_age)); retained_maps->Set(new_length + 1, Smi::FromInt(new_age));
} }
if (i < number_of_disposed_maps) {
new_number_of_disposed_maps++;
}
new_length += 2; new_length += 2;
} }
heap()->number_of_disposed_maps_ = new_number_of_disposed_maps;
Object* undefined = heap()->undefined_value(); Object* undefined = heap()->undefined_value();
for (int i = new_length; i < length; i++) { for (int i = new_length; i < length; i++) {
retained_maps->Clear(i, undefined); retained_maps->Clear(i, undefined);
...@@ -1886,6 +1896,33 @@ void MarkCompactCollector::RetainMaps() { ...@@ -1886,6 +1896,33 @@ void MarkCompactCollector::RetainMaps() {
} }
DependentCode* MarkCompactCollector::DependentCodeListFromNonLiveMaps() {
GCTracer::Scope gc_scope(heap()->tracer(),
GCTracer::Scope::MC_EXTRACT_DEPENDENT_CODE);
ArrayList* retained_maps = heap()->retained_maps();
int length = retained_maps->Length();
DependentCode* head = DependentCode::cast(heap()->empty_fixed_array());
for (int i = 0; i < length; i += 2) {
DCHECK(retained_maps->Get(i)->IsWeakCell());
WeakCell* cell = WeakCell::cast(retained_maps->Get(i));
DCHECK(!cell->cleared());
Map* map = Map::cast(cell->value());
MarkBit map_mark = Marking::MarkBitFrom(map);
if (Marking::IsWhite(map_mark)) {
DependentCode* candidate = map->dependent_code();
// We rely on the fact that the weak code group comes first.
STATIC_ASSERT(DependentCode::kWeakCodeGroup == 0);
if (candidate->length() > 0 &&
candidate->group() == DependentCode::kWeakCodeGroup) {
candidate->set_next_link(head);
head = candidate;
}
}
}
return head;
}
void MarkCompactCollector::EnsureMarkingDequeIsReserved() { void MarkCompactCollector::EnsureMarkingDequeIsReserved() {
DCHECK(!marking_deque_.in_use()); DCHECK(!marking_deque_.in_use());
if (marking_deque_memory_ == NULL) { if (marking_deque_memory_ == NULL) {
...@@ -2192,6 +2229,26 @@ void MarkCompactCollector::ProcessAndClearOptimizedCodeMaps() { ...@@ -2192,6 +2229,26 @@ void MarkCompactCollector::ProcessAndClearOptimizedCodeMaps() {
} }
void MarkCompactCollector::ProcessWeakReferences() {
// This should be done before processing weak cells because it checks
// mark bits of maps in weak cells.
DependentCode* dependent_code_list = DependentCodeListFromNonLiveMaps();
// Process weak cells before MarkCodeForDeoptimization and
// ClearNonLiveReferences so that weak cells in dependent code arrays are
// cleared or contain only live code objects.
ProcessAndClearWeakCells();
MarkDependentCodeListForDeoptimization(dependent_code_list);
ClearNonLiveReferences();
ClearWeakCollections();
heap_->set_encountered_weak_cells(Smi::FromInt(0));
}
void MarkCompactCollector::ClearNonLiveReferences() { void MarkCompactCollector::ClearNonLiveReferences() {
GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope gc_scope(heap()->tracer(),
GCTracer::Scope::MC_NONLIVEREFERENCES); GCTracer::Scope::MC_NONLIVEREFERENCES);
...@@ -2209,10 +2266,6 @@ void MarkCompactCollector::ClearNonLiveReferences() { ...@@ -2209,10 +2266,6 @@ void MarkCompactCollector::ClearNonLiveReferences() {
ClearNonLivePrototypeTransitions(map); ClearNonLivePrototypeTransitions(map);
} else { } else {
ClearNonLiveMapTransitions(map); ClearNonLiveMapTransitions(map);
have_code_to_deoptimize_ |=
map->dependent_code()->MarkCodeForDeoptimization(
isolate(), DependentCode::kWeakCodeGroup);
map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array()));
} }
} }
...@@ -2237,6 +2290,20 @@ void MarkCompactCollector::ClearNonLiveReferences() { ...@@ -2237,6 +2290,20 @@ void MarkCompactCollector::ClearNonLiveReferences() {
} }
void MarkCompactCollector::MarkDependentCodeListForDeoptimization(
DependentCode* list_head) {
GCTracer::Scope gc_scope(heap()->tracer(),
GCTracer::Scope::MC_DEOPT_DEPENDENT_CODE);
Isolate* isolate = this->isolate();
DependentCode* current = list_head;
while (current->length() > 0) {
have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization(
isolate, DependentCode::kWeakCodeGroup);
current = current->next_link();
}
}
void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
FixedArray* prototype_transitions = FixedArray* prototype_transitions =
TransitionArray::GetPrototypeTransitions(map); TransitionArray::GetPrototypeTransitions(map);
......
...@@ -627,6 +627,9 @@ class MarkCompactCollector { ...@@ -627,6 +627,9 @@ class MarkCompactCollector {
// increase chances of reusing of map transition tree in future. // increase chances of reusing of map transition tree in future.
void RetainMaps(); void RetainMaps();
// Collects a list of dependent code from maps embedded in optimize code.
DependentCode* DependentCodeListFromNonLiveMaps();
// Mark objects reachable (transitively) from objects in the marking // Mark objects reachable (transitively) from objects in the marking
// stack. This function empties the marking stack, but may leave // stack. This function empties the marking stack, but may leave
// overflowed objects in the heap, in which case the marking stack's // overflowed objects in the heap, in which case the marking stack's
...@@ -657,6 +660,7 @@ class MarkCompactCollector { ...@@ -657,6 +660,7 @@ class MarkCompactCollector {
void ClearNonLiveMapTransitions(Map* map); void ClearNonLiveMapTransitions(Map* map);
void ClearMapTransitions(Map* map, Map* dead_transition); void ClearMapTransitions(Map* map, Map* dead_transition);
bool ClearMapBackPointer(Map* map); bool ClearMapBackPointer(Map* map);
void MarkDependentCodeListForDeoptimization(DependentCode* list_head);
void TrimDescriptorArray(Map* map, DescriptorArray* descriptors, void TrimDescriptorArray(Map* map, DescriptorArray* descriptors,
int number_of_own_descriptors); int number_of_own_descriptors);
void TrimEnumCache(Map* map, DescriptorArray* descriptors); void TrimEnumCache(Map* map, DescriptorArray* descriptors);
...@@ -683,6 +687,9 @@ class MarkCompactCollector { ...@@ -683,6 +687,9 @@ class MarkCompactCollector {
// trimming or clearing out the entire optimized code map. // trimming or clearing out the entire optimized code map.
void ProcessAndClearOptimizedCodeMaps(); void ProcessAndClearOptimizedCodeMaps();
// Process non-live references in maps and optimized code.
void ProcessWeakReferences();
// ----------------------------------------------------------------------- // -----------------------------------------------------------------------
// Phase 2: Sweeping to clear mark bits and free non-live objects for // Phase 2: Sweeping to clear mark bits and free non-live objects for
// a non-compacting collection. // a non-compacting collection.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment