Commit 8c376b46 authored by ulan's avatar ulan Committed by Commit bot

Optimize clearing of map transitions.

Instead of iterating the whole map space to find dead transitions,
look in weak cell list and transition array list.

Simple transitions are in the weak cell list.

Full transitions are in the transitions array list.

BUG=chromium:554488
LOG=NO

Review URL: https://codereview.chromium.org/1488593003

Cr-Commit-Position: refs/heads/master@{#32684}
parent 14613c16
...@@ -541,10 +541,10 @@ void GCTracer::PrintNVP() const { ...@@ -541,10 +541,10 @@ void GCTracer::PrintNVP() const {
"weakcollection_process=%.1f " "weakcollection_process=%.1f "
"weakcollection_clear=%.1f " "weakcollection_clear=%.1f "
"weakcollection_abort=%.1f " "weakcollection_abort=%.1f "
"weakcells=%.1f " "clear=%1f"
"nonlive_refs=%.1f " "clear_weakcell=%.1f "
"extract_dependent_code=%.1f " "clear_map=%.1f "
"deopt_dependent_code=%.1f " "clear_dependent_code=%.1f "
"steps_count=%d " "steps_count=%d "
"steps_took=%.1f " "steps_took=%.1f "
"longest_step=%.1f " "longest_step=%.1f "
...@@ -613,10 +613,10 @@ void GCTracer::PrintNVP() const { ...@@ -613,10 +613,10 @@ void GCTracer::PrintNVP() const {
current_.scopes[Scope::MC_WEAKCOLLECTION_PROCESS], current_.scopes[Scope::MC_WEAKCOLLECTION_PROCESS],
current_.scopes[Scope::MC_WEAKCOLLECTION_CLEAR], current_.scopes[Scope::MC_WEAKCOLLECTION_CLEAR],
current_.scopes[Scope::MC_WEAKCOLLECTION_ABORT], current_.scopes[Scope::MC_WEAKCOLLECTION_ABORT],
current_.scopes[Scope::MC_WEAKCELL], current_.scopes[Scope::MC_CLEAR],
current_.scopes[Scope::MC_NONLIVEREFERENCES], current_.scopes[Scope::MC_CLEAR_WEAKCELL],
current_.scopes[Scope::MC_EXTRACT_DEPENDENT_CODE], current_.scopes[Scope::MC_CLEAR_MAP],
current_.scopes[Scope::MC_DEOPT_DEPENDENT_CODE], current_.scopes[Scope::MC_CLEAR_DEPENDENT_CODE],
current_.incremental_marking_steps, current_.incremental_marking_steps,
current_.incremental_marking_duration, current_.incremental_marking_duration,
current_.longest_incremental_marking_step, current_.longest_incremental_marking_step,
......
...@@ -130,10 +130,10 @@ class GCTracer { ...@@ -130,10 +130,10 @@ class GCTracer {
MC_WEAKCOLLECTION_PROCESS, MC_WEAKCOLLECTION_PROCESS,
MC_WEAKCOLLECTION_CLEAR, MC_WEAKCOLLECTION_CLEAR,
MC_WEAKCOLLECTION_ABORT, MC_WEAKCOLLECTION_ABORT,
MC_WEAKCELL, MC_CLEAR,
MC_EXTRACT_DEPENDENT_CODE, MC_CLEAR_WEAKCELL,
MC_NONLIVEREFERENCES, MC_CLEAR_MAP,
MC_DEOPT_DEPENDENT_CODE, MC_CLEAR_DEPENDENT_CODE,
MC_FLUSH_CODE, MC_FLUSH_CODE,
SCAVENGER_CODE_FLUSH_CANDIDATES, SCAVENGER_CODE_FLUSH_CANDIDATES,
SCAVENGER_OBJECT_GROUPS, SCAVENGER_OBJECT_GROUPS,
......
...@@ -2486,7 +2486,7 @@ AllocationResult Heap::AllocateWeakCell(HeapObject* value) { ...@@ -2486,7 +2486,7 @@ AllocationResult Heap::AllocateWeakCell(HeapObject* value) {
} }
result->set_map_no_write_barrier(weak_cell_map()); result->set_map_no_write_barrier(weak_cell_map());
WeakCell::cast(result)->initialize(value); WeakCell::cast(result)->initialize(value);
WeakCell::cast(result)->clear_next(this); WeakCell::cast(result)->clear_next(the_hole_value());
return result; return result;
} }
......
...@@ -649,6 +649,7 @@ void IncrementalMarking::ProcessWeakCells() { ...@@ -649,6 +649,7 @@ void IncrementalMarking::ProcessWeakCells() {
DCHECK(!finalize_marking_completed_); DCHECK(!finalize_marking_completed_);
DCHECK(IsMarking()); DCHECK(IsMarking());
Object* the_hole_value = heap()->the_hole_value();
Object* weak_cell_obj = heap()->encountered_weak_cells(); Object* weak_cell_obj = heap()->encountered_weak_cells();
Object* weak_cell_head = Smi::FromInt(0); Object* weak_cell_head = Smi::FromInt(0);
WeakCell* prev_weak_cell_obj = NULL; WeakCell* prev_weak_cell_obj = NULL;
...@@ -668,7 +669,7 @@ void IncrementalMarking::ProcessWeakCells() { ...@@ -668,7 +669,7 @@ void IncrementalMarking::ProcessWeakCells() {
prev_weak_cell_obj->set_next(weak_cell->next()); prev_weak_cell_obj->set_next(weak_cell->next());
} }
weak_cell_obj = weak_cell->next(); weak_cell_obj = weak_cell->next();
weak_cell->clear_next(heap()); weak_cell->clear_next(the_hole_value);
} else { } else {
if (weak_cell_head == Smi::FromInt(0)) { if (weak_cell_head == Smi::FromInt(0)) {
weak_cell_head = weak_cell; weak_cell_head = weak_cell;
......
...@@ -362,7 +362,9 @@ void MarkCompactCollector::CollectGarbage() { ...@@ -362,7 +362,9 @@ void MarkCompactCollector::CollectGarbage() {
DCHECK(heap_->incremental_marking()->IsStopped()); DCHECK(heap_->incremental_marking()->IsStopped());
ProcessWeakReferences(); ClearNonLiveReferences();
ClearWeakCollections();
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
if (FLAG_verify_heap) { if (FLAG_verify_heap) {
...@@ -1937,33 +1939,6 @@ void MarkCompactCollector::RetainMaps() { ...@@ -1937,33 +1939,6 @@ void MarkCompactCollector::RetainMaps() {
} }
DependentCode* MarkCompactCollector::DependentCodeListFromNonLiveMaps() {
GCTracer::Scope gc_scope(heap()->tracer(),
GCTracer::Scope::MC_EXTRACT_DEPENDENT_CODE);
ArrayList* retained_maps = heap()->retained_maps();
int length = retained_maps->Length();
DependentCode* head = DependentCode::cast(heap()->empty_fixed_array());
for (int i = 0; i < length; i += 2) {
DCHECK(retained_maps->Get(i)->IsWeakCell());
WeakCell* cell = WeakCell::cast(retained_maps->Get(i));
DCHECK(!cell->cleared());
Map* map = Map::cast(cell->value());
MarkBit map_mark = Marking::MarkBitFrom(map);
if (Marking::IsWhite(map_mark)) {
DependentCode* candidate = map->dependent_code();
// We rely on the fact that the weak code group comes first.
STATIC_ASSERT(DependentCode::kWeakCodeGroup == 0);
if (candidate->length() > 0 &&
candidate->group() == DependentCode::kWeakCodeGroup) {
candidate->set_next_link(head);
head = candidate;
}
}
}
return head;
}
void MarkCompactCollector::EnsureMarkingDequeIsReserved() { void MarkCompactCollector::EnsureMarkingDequeIsReserved() {
DCHECK(!marking_deque_.in_use()); DCHECK(!marking_deque_.in_use());
if (marking_deque_memory_ == NULL) { if (marking_deque_memory_ == NULL) {
...@@ -2270,42 +2245,34 @@ void MarkCompactCollector::ProcessAndClearOptimizedCodeMaps() { ...@@ -2270,42 +2245,34 @@ void MarkCompactCollector::ProcessAndClearOptimizedCodeMaps() {
} }
void MarkCompactCollector::ProcessWeakReferences() { void MarkCompactCollector::ClearNonLiveReferences() {
// This should be done before processing weak cells because it checks GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR);
// mark bits of maps in weak cells.
DependentCode* dependent_code_list = DependentCodeListFromNonLiveMaps();
// Process weak cells before MarkCodeForDeoptimization and
// ClearNonLiveReferences so that weak cells in dependent code arrays are
// cleared or contain only live code objects.
ProcessAndClearWeakCells();
MarkDependentCodeListForDeoptimization(dependent_code_list); DependentCode* dependent_code_list;
Object* non_live_map_list;
ClearWeakCells(&non_live_map_list, &dependent_code_list);
ClearNonLiveReferences(); {
GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAP);
ClearSimpleMapTransitions(non_live_map_list);
ClearFullMapTransitions();
}
ClearWeakCollections(); MarkDependentCodeForDeoptimization(dependent_code_list);
} }
void MarkCompactCollector::ClearNonLiveReferences() { void MarkCompactCollector::MarkDependentCodeForDeoptimization(
DependentCode* list_head) {
GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope gc_scope(heap()->tracer(),
GCTracer::Scope::MC_NONLIVEREFERENCES); GCTracer::Scope::MC_CLEAR_DEPENDENT_CODE);
ProcessAndClearTransitionArrays();
// Iterate over the map space, setting map transitions that go from Isolate* isolate = this->isolate();
// a marked map to an unmarked map to null transitions. This action DependentCode* current = list_head;
// is carried out only on maps of JSObjects and related subtypes. while (current->length() > 0) {
HeapObjectIterator map_iterator(heap()->map_space()); have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization(
for (HeapObject* obj = map_iterator.Next(); obj != NULL; isolate, DependentCode::kWeakCodeGroup);
obj = map_iterator.Next()) { current = current->next_link();
Map* map = Map::cast(obj);
if (!map->CanTransition()) continue;
MarkBit map_mark = Marking::MarkBitFrom(map);
if (Marking::IsWhite(map_mark)) {
ClearNonLiveMapTransitions(map);
}
} }
WeakHashTable* table = heap_->weak_object_to_code_table(); WeakHashTable* table = heap_->weak_object_to_code_table();
...@@ -2320,7 +2287,7 @@ void MarkCompactCollector::ClearNonLiveReferences() { ...@@ -2320,7 +2287,7 @@ void MarkCompactCollector::ClearNonLiveReferences() {
if (WeakCell::cast(key)->cleared()) { if (WeakCell::cast(key)->cleared()) {
have_code_to_deoptimize_ |= have_code_to_deoptimize_ |=
DependentCode::cast(value)->MarkCodeForDeoptimization( DependentCode::cast(value)->MarkCodeForDeoptimization(
isolate(), DependentCode::kWeakCodeGroup); isolate, DependentCode::kWeakCodeGroup);
table->set(key_index, heap_->the_hole_value()); table->set(key_index, heap_->the_hole_value());
table->set(value_index, heap_->the_hole_value()); table->set(value_index, heap_->the_hole_value());
table->ElementRemoved(); table->ElementRemoved();
...@@ -2329,133 +2296,129 @@ void MarkCompactCollector::ClearNonLiveReferences() { ...@@ -2329,133 +2296,129 @@ void MarkCompactCollector::ClearNonLiveReferences() {
} }
void MarkCompactCollector::MarkDependentCodeListForDeoptimization( void MarkCompactCollector::ClearSimpleMapTransitions(
DependentCode* list_head) { Object* non_live_map_list) {
GCTracer::Scope gc_scope(heap()->tracer(), Object* the_hole_value = heap()->the_hole_value();
GCTracer::Scope::MC_DEOPT_DEPENDENT_CODE); Object* weak_cell_obj = non_live_map_list;
Isolate* isolate = this->isolate(); while (weak_cell_obj != Smi::FromInt(0)) {
DependentCode* current = list_head; WeakCell* weak_cell = WeakCell::cast(weak_cell_obj);
while (current->length() > 0) { Map* map = Map::cast(weak_cell->value());
have_code_to_deoptimize_ |= current->MarkCodeForDeoptimization( DCHECK(Marking::IsWhite(Marking::MarkBitFrom(map)));
isolate, DependentCode::kWeakCodeGroup); Object* potential_parent = map->constructor_or_backpointer();
current = current->next_link(); if (potential_parent->IsMap()) {
}
}
void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map) {
Object* potential_parent = map->GetBackPointer();
if (!potential_parent->IsMap()) return;
Map* parent = Map::cast(potential_parent); Map* parent = Map::cast(potential_parent);
if (Marking::IsBlackOrGrey(Marking::MarkBitFrom(parent)) &&
// Follow back pointer, check whether we are dealing with a map transition parent->raw_transitions() == weak_cell) {
// from a live map to a dead path and in case clear transitions of parent. ClearSimpleMapTransition(parent, map);
DCHECK(!Marking::IsGrey(Marking::MarkBitFrom(map))); }
bool parent_is_alive = Marking::IsBlack(Marking::MarkBitFrom(parent)); }
if (parent_is_alive) { weak_cell->clear();
ClearMapTransitions(parent, map); weak_cell_obj = weak_cell->next();
weak_cell->clear_next(the_hole_value);
} }
} }
// Clear a possible back pointer in case the transition leads to a dead map. void MarkCompactCollector::ClearSimpleMapTransition(Map* map,
// Return true in case a back pointer has been cleared and false otherwise. Map* dead_transition) {
bool MarkCompactCollector::ClearMapBackPointer(Map* target) { // A previously existing simple transition (stored in a WeakCell) is going
DCHECK(!Marking::IsGrey(Marking::MarkBitFrom(target))); // to be cleared. Clear the useless cell pointer, and take ownership
if (Marking::IsBlack(Marking::MarkBitFrom(target))) return false;
target->SetBackPointer(heap_->undefined_value(), SKIP_WRITE_BARRIER);
return true;
}
void MarkCompactCollector::ClearMapTransitions(Map* map, Map* dead_transition) {
Object* transitions = map->raw_transitions();
int num_transitions = TransitionArray::NumberOfTransitions(transitions);
int number_of_own_descriptors = map->NumberOfOwnDescriptors();
DescriptorArray* descriptors = map->instance_descriptors();
// A previously existing simple transition (stored in a WeakCell) may have
// been cleared. Clear the useless cell pointer, and take ownership
// of the descriptor array. // of the descriptor array.
if (transitions->IsWeakCell() && WeakCell::cast(transitions)->cleared()) {
map->set_raw_transitions(Smi::FromInt(0)); map->set_raw_transitions(Smi::FromInt(0));
} int number_of_own_descriptors = map->NumberOfOwnDescriptors();
if (num_transitions == 0 && DescriptorArray* descriptors = map->instance_descriptors();
descriptors == dead_transition->instance_descriptors() && if (descriptors == dead_transition->instance_descriptors() &&
number_of_own_descriptors > 0) { number_of_own_descriptors > 0) {
TrimDescriptorArray(map, descriptors, number_of_own_descriptors); TrimDescriptorArray(map, descriptors);
DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors); DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
map->set_owns_descriptors(true); map->set_owns_descriptors(true);
return;
} }
}
int transition_index = 0;
bool descriptors_owner_died = false; void MarkCompactCollector::ClearFullMapTransitions() {
HeapObject* undefined = heap()->undefined_value();
Object* obj = heap()->encountered_transition_arrays();
while (obj != Smi::FromInt(0)) {
TransitionArray* array = TransitionArray::cast(obj);
int num_transitions = array->number_of_entries();
DCHECK_EQ(TransitionArray::NumberOfTransitions(array), num_transitions);
if (num_transitions > 0) {
Map* map = array->GetTarget(0);
Map* parent = Map::cast(map->constructor_or_backpointer());
bool parent_is_alive =
Marking::IsBlackOrGrey(Marking::MarkBitFrom(parent));
DescriptorArray* descriptors =
parent_is_alive ? parent->instance_descriptors() : nullptr;
bool descriptors_owner_died =
CompactTransitionArray(parent, array, descriptors);
if (descriptors_owner_died) {
TrimDescriptorArray(parent, descriptors);
}
}
obj = array->next_link();
array->set_next_link(undefined, SKIP_WRITE_BARRIER);
}
heap()->set_encountered_transition_arrays(Smi::FromInt(0));
}
// Compact all live descriptors to the left.
bool MarkCompactCollector::CompactTransitionArray(
Map* map, TransitionArray* transitions, DescriptorArray* descriptors) {
int num_transitions = transitions->number_of_entries();
bool descriptors_owner_died = false;
int transition_index = 0;
// Compact all live transitions to the left.
for (int i = 0; i < num_transitions; ++i) { for (int i = 0; i < num_transitions; ++i) {
Map* target = TransitionArray::GetTarget(transitions, i); Map* target = transitions->GetTarget(i);
if (ClearMapBackPointer(target)) { DCHECK_EQ(target->constructor_or_backpointer(), map);
if (target->instance_descriptors() == descriptors) { if (Marking::IsWhite(Marking::MarkBitFrom(target))) {
if (descriptors != nullptr &&
target->instance_descriptors() == descriptors) {
descriptors_owner_died = true; descriptors_owner_died = true;
} }
} else { } else {
if (i != transition_index) { if (i != transition_index) {
DCHECK(TransitionArray::IsFullTransitionArray(transitions)); Name* key = transitions->GetKey(i);
TransitionArray* t = TransitionArray::cast(transitions); transitions->SetKey(transition_index, key);
Name* key = t->GetKey(i); Object** key_slot = transitions->GetKeySlot(transition_index);
t->SetKey(transition_index, key); RecordSlot(transitions, key_slot, key);
Object** key_slot = t->GetKeySlot(transition_index);
RecordSlot(t, key_slot, key);
// Target slots do not need to be recorded since maps are not compacted. // Target slots do not need to be recorded since maps are not compacted.
t->SetTarget(transition_index, t->GetTarget(i)); transitions->SetTarget(transition_index, transitions->GetTarget(i));
} }
transition_index++; transition_index++;
} }
} }
// If there are no transitions to be cleared, return. // If there are no transitions to be cleared, return.
// TODO(verwaest) Should be an assert, otherwise back pointers are not if (transition_index == num_transitions) {
// properly cleared. DCHECK(!descriptors_owner_died);
if (transition_index == num_transitions) return; return false;
if (descriptors_owner_died) {
if (number_of_own_descriptors > 0) {
TrimDescriptorArray(map, descriptors, number_of_own_descriptors);
DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
map->set_owns_descriptors(true);
} else {
DCHECK(descriptors == heap_->empty_descriptor_array());
}
} }
// Note that we never eliminate a transition array, though we might right-trim // Note that we never eliminate a transition array, though we might right-trim
// such that number_of_transitions() == 0. If this assumption changes, // such that number_of_transitions() == 0. If this assumption changes,
// TransitionArray::Insert() will need to deal with the case that a transition // TransitionArray::Insert() will need to deal with the case that a transition
// array disappeared during GC. // array disappeared during GC.
int trim = TransitionArray::Capacity(transitions) - transition_index; int trim = TransitionArray::Capacity(transitions) - transition_index;
if (trim > 0) { if (trim > 0) {
// Non-full-TransitionArray cases can never reach this point.
DCHECK(TransitionArray::IsFullTransitionArray(transitions));
TransitionArray* t = TransitionArray::cast(transitions);
heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>( heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(
t, trim * TransitionArray::kTransitionSize); transitions, trim * TransitionArray::kTransitionSize);
t->SetNumberOfTransitions(transition_index); transitions->SetNumberOfTransitions(transition_index);
// The map still has a full transition array.
DCHECK(TransitionArray::IsFullTransitionArray(map->raw_transitions()));
} }
return descriptors_owner_died;
} }
void MarkCompactCollector::TrimDescriptorArray(Map* map, void MarkCompactCollector::TrimDescriptorArray(Map* map,
DescriptorArray* descriptors, DescriptorArray* descriptors) {
int number_of_own_descriptors) { int number_of_own_descriptors = map->NumberOfOwnDescriptors();
if (number_of_own_descriptors == 0) {
DCHECK(descriptors == heap_->empty_descriptor_array());
return;
}
int number_of_descriptors = descriptors->number_of_descriptors_storage(); int number_of_descriptors = descriptors->number_of_descriptors_storage();
int to_trim = number_of_descriptors - number_of_own_descriptors; int to_trim = number_of_descriptors - number_of_own_descriptors;
if (to_trim == 0) return; if (to_trim > 0) {
heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>( heap_->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(
descriptors, to_trim * DescriptorArray::kDescriptorSize); descriptors, to_trim * DescriptorArray::kDescriptorSize);
descriptors->SetNumberOfDescriptors(number_of_own_descriptors); descriptors->SetNumberOfDescriptors(number_of_own_descriptors);
...@@ -2469,6 +2432,9 @@ void MarkCompactCollector::TrimDescriptorArray(Map* map, ...@@ -2469,6 +2432,9 @@ void MarkCompactCollector::TrimDescriptorArray(Map* map,
number_of_own_descriptors); number_of_own_descriptors);
SLOW_DCHECK(layout_descriptor->IsConsistentWithMap(map, true)); SLOW_DCHECK(layout_descriptor->IsConsistentWithMap(map, true));
} }
}
DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
map->set_owns_descriptors(true);
} }
...@@ -2560,11 +2526,20 @@ void MarkCompactCollector::AbortWeakCollections() { ...@@ -2560,11 +2526,20 @@ void MarkCompactCollector::AbortWeakCollections() {
} }
void MarkCompactCollector::ProcessAndClearWeakCells() { void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list,
GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_WEAKCELL); DependentCode** dependent_code_list) {
Object* weak_cell_obj = heap()->encountered_weak_cells(); Heap* heap = this->heap();
GCTracer::Scope gc_scope(heap->tracer(), GCTracer::Scope::MC_CLEAR_WEAKCELL);
Object* weak_cell_obj = heap->encountered_weak_cells();
Object* the_hole_value = heap->the_hole_value();
DependentCode* dependent_code_head =
DependentCode::cast(heap->empty_fixed_array());
Object* non_live_map_head = Smi::FromInt(0);
while (weak_cell_obj != Smi::FromInt(0)) { while (weak_cell_obj != Smi::FromInt(0)) {
WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
Object* next_weak_cell = weak_cell->next();
bool clear_value = true;
bool clear_next = true;
// We do not insert cleared weak cells into the list, so the value // We do not insert cleared weak cells into the list, so the value
// cannot be a Smi here. // cannot be a Smi here.
HeapObject* value = HeapObject::cast(weak_cell->value()); HeapObject* value = HeapObject::cast(weak_cell->value());
...@@ -2585,47 +2560,59 @@ void MarkCompactCollector::ProcessAndClearWeakCells() { ...@@ -2585,47 +2560,59 @@ void MarkCompactCollector::ProcessAndClearWeakCells() {
RecordSlot(value, slot, *slot); RecordSlot(value, slot, *slot);
slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
RecordSlot(weak_cell, slot, *slot); RecordSlot(weak_cell, slot, *slot);
} else { clear_value = false;
weak_cell->clear();
} }
} else { }
weak_cell->clear(); if (value->IsMap()) {
// The map is non-live.
Map* map = Map::cast(value);
// Add dependent code to the dependent_code_list.
DependentCode* candidate = map->dependent_code();
// We rely on the fact that the weak code group comes first.
STATIC_ASSERT(DependentCode::kWeakCodeGroup == 0);
if (candidate->length() > 0 &&
candidate->group() == DependentCode::kWeakCodeGroup) {
candidate->set_next_link(dependent_code_head);
dependent_code_head = candidate;
}
// Add the weak cell to the non_live_map list.
weak_cell->set_next(non_live_map_head);
non_live_map_head = weak_cell;
clear_value = false;
clear_next = false;
} }
} else { } else {
// The value of the weak cell is alive.
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
RecordSlot(weak_cell, slot, *slot); RecordSlot(weak_cell, slot, *slot);
clear_value = false;
} }
weak_cell_obj = weak_cell->next(); if (clear_value) {
weak_cell->clear_next(heap()); weak_cell->clear();
} }
heap()->set_encountered_weak_cells(Smi::FromInt(0)); if (clear_next) {
weak_cell->clear_next(the_hole_value);
}
weak_cell_obj = next_weak_cell;
}
heap->set_encountered_weak_cells(Smi::FromInt(0));
*non_live_map_list = non_live_map_head;
*dependent_code_list = dependent_code_head;
} }
void MarkCompactCollector::AbortWeakCells() { void MarkCompactCollector::AbortWeakCells() {
Object* the_hole_value = heap()->the_hole_value();
Object* weak_cell_obj = heap()->encountered_weak_cells(); Object* weak_cell_obj = heap()->encountered_weak_cells();
while (weak_cell_obj != Smi::FromInt(0)) { while (weak_cell_obj != Smi::FromInt(0)) {
WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
weak_cell_obj = weak_cell->next(); weak_cell_obj = weak_cell->next();
weak_cell->clear_next(heap()); weak_cell->clear_next(the_hole_value);
} }
heap()->set_encountered_weak_cells(Smi::FromInt(0)); heap()->set_encountered_weak_cells(Smi::FromInt(0));
} }
void MarkCompactCollector::ProcessAndClearTransitionArrays() {
HeapObject* undefined = heap()->undefined_value();
Object* obj = heap()->encountered_transition_arrays();
while (obj != Smi::FromInt(0)) {
TransitionArray* array = TransitionArray::cast(obj);
// TODO(ulan): move logic from ClearMapTransitions here.
obj = array->next_link();
array->set_next_link(undefined, SKIP_WRITE_BARRIER);
}
heap()->set_encountered_transition_arrays(Smi::FromInt(0));
}
void MarkCompactCollector::AbortTransitionArrays() { void MarkCompactCollector::AbortTransitionArrays() {
HeapObject* undefined = heap()->undefined_value(); HeapObject* undefined = heap()->undefined_value();
Object* obj = heap()->encountered_transition_arrays(); Object* obj = heap()->encountered_transition_arrays();
......
...@@ -649,15 +649,20 @@ class MarkCompactCollector { ...@@ -649,15 +649,20 @@ class MarkCompactCollector {
// heap object. // heap object.
static bool IsUnmarkedHeapObject(Object** p); static bool IsUnmarkedHeapObject(Object** p);
// Map transitions from a live map to a dead map must be killed. // Clear non-live references in weak cells, transition and descriptor arrays,
// We replace them with a null descriptor, with the same key. // and deoptimize dependent code of non-live maps.
void ClearNonLiveReferences(); void ClearNonLiveReferences();
void ClearNonLiveMapTransitions(Map* map); void MarkDependentCodeForDeoptimization(DependentCode* list);
void ClearMapTransitions(Map* map, Map* dead_transition); // Find non-live targets of simple transitions in the given list. Clear
bool ClearMapBackPointer(Map* map); // transitions to non-live targets and if needed trim descriptors arrays.
void MarkDependentCodeListForDeoptimization(DependentCode* list_head); void ClearSimpleMapTransitions(Object* non_live_map_list);
void TrimDescriptorArray(Map* map, DescriptorArray* descriptors, void ClearSimpleMapTransition(Map* map, Map* dead_transition);
int number_of_own_descriptors); // Compact every array in the global list of transition arrays and
// trim the corresponding descriptor array if a transition target is non-live.
void ClearFullMapTransitions();
bool CompactTransitionArray(Map* map, TransitionArray* transitions,
DescriptorArray* descriptors);
void TrimDescriptorArray(Map* map, DescriptorArray* descriptors);
void TrimEnumCache(Map* map, DescriptorArray* descriptors); void TrimEnumCache(Map* map, DescriptorArray* descriptors);
// Mark all values associated with reachable keys in weak collections // Mark all values associated with reachable keys in weak collections
...@@ -674,10 +679,10 @@ class MarkCompactCollector { ...@@ -674,10 +679,10 @@ class MarkCompactCollector {
// collections when incremental marking is aborted. // collections when incremental marking is aborted.
void AbortWeakCollections(); void AbortWeakCollections();
void ProcessAndClearWeakCells(); void ClearWeakCells(Object** non_live_map_list,
DependentCode** dependent_code_list);
void AbortWeakCells(); void AbortWeakCells();
void ProcessAndClearTransitionArrays();
void AbortTransitionArrays(); void AbortTransitionArrays();
// After all reachable objects have been marked, those entries within // After all reachable objects have been marked, those entries within
...@@ -685,9 +690,6 @@ class MarkCompactCollector { ...@@ -685,9 +690,6 @@ class MarkCompactCollector {
// trimming or clearing out the entire optimized code map. // trimming or clearing out the entire optimized code map.
void ProcessAndClearOptimizedCodeMaps(); void ProcessAndClearOptimizedCodeMaps();
// Process non-live references in maps and optimized code.
void ProcessWeakReferences();
// ----------------------------------------------------------------------- // -----------------------------------------------------------------------
// Phase 2: Sweeping to clear mark bits and free non-live objects for // Phase 2: Sweeping to clear mark bits and free non-live objects for
// a non-compacting collection. // a non-compacting collection.
......
...@@ -359,9 +359,17 @@ void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map, ...@@ -359,9 +359,17 @@ void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
template <typename StaticVisitor> template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitTransitionArray( void StaticMarkingVisitor<StaticVisitor>::VisitTransitionArray(
Map* map, HeapObject* object) { Map* map, HeapObject* object) {
typedef FlexibleBodyVisitor<StaticVisitor, TransitionArray::BodyDescriptor,
int> TransitionArrayBodyVisitor;
TransitionArray* array = TransitionArray::cast(object); TransitionArray* array = TransitionArray::cast(object);
Heap* heap = array->GetHeap();
// Visit strong references.
if (array->HasPrototypeTransitions()) {
StaticVisitor::VisitPointer(heap, array,
array->GetPrototypeTransitionsSlot());
}
int num_transitions = TransitionArray::NumberOfTransitions(array);
for (int i = 0; i < num_transitions; ++i) {
StaticVisitor::VisitPointer(heap, array, array->GetKeySlot(i));
}
// Enqueue the array in linked list of encountered transition arrays if it is // Enqueue the array in linked list of encountered transition arrays if it is
// not already in the list. // not already in the list.
if (array->next_link()->IsUndefined()) { if (array->next_link()->IsUndefined()) {
...@@ -370,8 +378,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitTransitionArray( ...@@ -370,8 +378,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitTransitionArray(
UPDATE_WEAK_WRITE_BARRIER); UPDATE_WEAK_WRITE_BARRIER);
heap->set_encountered_transition_arrays(array); heap->set_encountered_transition_arrays(array);
} }
// TODO(ulan): Move MarkTransitionArray logic here.
TransitionArrayBodyVisitor::Visit(map, object);
} }
...@@ -540,11 +546,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray( ...@@ -540,11 +546,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray(
template <typename StaticVisitor> template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap, void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
Map* map) { Map* map) {
Object* raw_transitions = map->raw_transitions();
if (TransitionArray::IsFullTransitionArray(raw_transitions)) {
MarkTransitionArray(heap, TransitionArray::cast(raw_transitions));
}
// Since descriptor arrays are potentially shared, ensure that only the // Since descriptor arrays are potentially shared, ensure that only the
// descriptors that belong to this map are marked. The first time a non-empty // descriptors that belong to this map are marked. The first time a non-empty
// descriptor array is marked, its header is also visited. The slot holding // descriptor array is marked, its header is also visited. The slot holding
...@@ -577,23 +578,6 @@ void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap, ...@@ -577,23 +578,6 @@ void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
} }
template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
Heap* heap, TransitionArray* transitions) {
if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
if (transitions->HasPrototypeTransitions()) {
StaticVisitor::VisitPointer(heap, transitions,
transitions->GetPrototypeTransitionsSlot());
}
int num_transitions = TransitionArray::NumberOfTransitions(transitions);
for (int i = 0; i < num_transitions; ++i) {
StaticVisitor::VisitPointer(heap, transitions, transitions->GetKeySlot(i));
}
}
template <typename StaticVisitor> template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::MarkOptimizedCodeMap( void StaticMarkingVisitor<StaticVisitor>::MarkOptimizedCodeMap(
Heap* heap, FixedArray* code_map) { Heap* heap, FixedArray* code_map) {
......
...@@ -372,10 +372,8 @@ class StaticMarkingVisitor : public StaticVisitorBase { ...@@ -372,10 +372,8 @@ class StaticMarkingVisitor : public StaticVisitorBase {
INLINE(static void VisitNativeContext(Map* map, HeapObject* object)); INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
INLINE(static void VisitBytecodeArray(Map* map, HeapObject* object)); INLINE(static void VisitBytecodeArray(Map* map, HeapObject* object));
// Mark pointers in a Map and its TransitionArray together, possibly // Mark pointers in a Map treating some elements of the descriptor array weak.
// treating transitions or back pointers weak.
static void MarkMapContents(Heap* heap, Map* map); static void MarkMapContents(Heap* heap, Map* map);
static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
// Mark pointers in the optimized code map that should act as strong // Mark pointers in the optimized code map that should act as strong
// references, possibly treating some entries weak. // references, possibly treating some entries weak.
......
...@@ -2060,8 +2060,9 @@ void WeakCell::set_next(Object* val, WriteBarrierMode mode) { ...@@ -2060,8 +2060,9 @@ void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
} }
void WeakCell::clear_next(Heap* heap) { void WeakCell::clear_next(Object* the_hole_value) {
set_next(heap->the_hole_value(), SKIP_WRITE_BARRIER); DCHECK_EQ(GetHeap()->the_hole_value(), the_hole_value);
set_next(the_hole_value, SKIP_WRITE_BARRIER);
} }
...@@ -5489,8 +5490,7 @@ void Map::set_prototype_info(Object* value, WriteBarrierMode mode) { ...@@ -5489,8 +5490,7 @@ void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
void Map::SetBackPointer(Object* value, WriteBarrierMode mode) { void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE); DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) || DCHECK((value->IsMap() && GetBackPointer()->IsUndefined()));
(value->IsMap() && GetBackPointer()->IsUndefined()));
DCHECK(!value->IsMap() || DCHECK(!value->IsMap() ||
Map::cast(value)->GetConstructor() == constructor_or_backpointer()); Map::cast(value)->GetConstructor() == constructor_or_backpointer());
set_constructor_or_backpointer(value, mode); set_constructor_or_backpointer(value, mode);
......
...@@ -9463,7 +9463,7 @@ class WeakCell : public HeapObject { ...@@ -9463,7 +9463,7 @@ class WeakCell : public HeapObject {
DECL_ACCESSORS(next, Object) DECL_ACCESSORS(next, Object)
inline void clear_next(Heap* heap); inline void clear_next(Object* the_hole_value);
inline bool next_cleared(); inline bool next_cleared();
......
...@@ -1919,7 +1919,7 @@ class UnlinkWeakCellScope { ...@@ -1919,7 +1919,7 @@ class UnlinkWeakCellScope {
if (object->IsWeakCell()) { if (object->IsWeakCell()) {
weak_cell_ = WeakCell::cast(object); weak_cell_ = WeakCell::cast(object);
next_ = weak_cell_->next(); next_ = weak_cell_->next();
weak_cell_->clear_next(object->GetHeap()); weak_cell_->clear_next(object->GetHeap()->the_hole_value());
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment