Commit 2bc756e4 authored by ulan's avatar ulan Committed by Commit bot

Retain maps for several garbage collections

This keeps dying maps alive for FLAG_retain_maps_for_n_gc garbage collections to increase chances of them being reused for new objects in future.

BUG=v8:3664
LOG=N
TEST=cctest/test-heap/MapRetaining

Review URL: https://codereview.chromium.org/794583003

Cr-Commit-Position: refs/heads/master@{#25887}
parent 40f45ab2
......@@ -2455,7 +2455,7 @@ Handle<FixedArray> Debug::GetLoadedScripts() {
// Perform GC to get unreferenced scripts evicted from the cache before
// returning the content.
isolate_->heap()->CollectAllGarbage(Heap::kNoGCFlags,
isolate_->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask,
"Debug::GetLoadedScripts");
// Get the scripts from the cache.
......
......@@ -560,6 +560,8 @@ DEFINE_INT(max_old_space_size, 0, "max size of the old space (in Mbytes)")
DEFINE_INT(max_executable_size, 0, "max size of executable memory (in Mbytes)")
DEFINE_BOOL(gc_global, false, "always perform global GCs")
DEFINE_INT(gc_interval, -1, "garbage collect after <n> allocations")
DEFINE_INT(retain_maps_for_n_gc, 1,
"keeps maps alive for <n> old space garbage collections")
DEFINE_BOOL(trace_gc, false,
"print one trace line following each garbage collection")
DEFINE_BOOL(trace_gc_nvp, false,
......
......@@ -2123,6 +2123,52 @@ void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) {
}
void MarkCompactCollector::RetainMaps(ObjectVisitor* visitor) {
if (reduce_memory_footprint_ || abort_incremental_marking_ ||
FLAG_retain_maps_for_n_gc == 0) {
// Do not retain dead maps if flag disables it or there is
// - memory pressure (reduce_memory_footprint_),
// - GC is requested by tests or dev-tools (abort_incremental_marking_).
return;
}
HeapObjectIterator map_iterator(heap()->map_space());
// The retaining counter goes from Map::kRetainingCounterStart
// down to Map::kRetainingCounterEnd. This range can be narrowed
// by the FLAG_retain_maps_for_n_gc flag.
int retaining_counter_end =
Max(Map::kRetainingCounterEnd,
Map::kRetainingCounterStart - FLAG_retain_maps_for_n_gc);
for (HeapObject* obj = map_iterator.Next(); obj != NULL;
obj = map_iterator.Next()) {
Map* map = Map::cast(obj);
MarkBit map_mark = Marking::MarkBitFrom(map);
int counter = map->counter();
if (!map_mark.Get()) {
if (counter > Map::kRetainingCounterStart ||
counter <= retaining_counter_end) {
// The counter is outside of retaining range. Do not retain this map.
continue;
}
Object* constructor = map->constructor();
if (!constructor->IsHeapObject() ||
!Marking::MarkBitFrom(HeapObject::cast(constructor)).Get()) {
// The constructor is dead, no new objects with this map can
// be created. Do not retain this map.
continue;
}
map->set_counter(counter - 1);
SetMark(map, map_mark);
MarkCompactMarkingVisitor::IterateBody(map->map(), map);
} else if (counter < Map::kRetainingCounterStart) {
// Reset the counter for live maps.
map->set_counter(Map::kRetainingCounterStart);
}
}
ProcessMarkingDeque();
}
void MarkCompactCollector::EnsureMarkingDequeIsCommittedAndInitialize() {
if (marking_deque_memory_ == NULL) {
marking_deque_memory_ = new base::VirtualMemory(4 * MB);
......@@ -2222,6 +2268,11 @@ void MarkCompactCollector::MarkLiveObjects() {
ProcessTopOptimizedFrame(&root_visitor);
// Retaining dying maps should happen before or during ephemeral marking
// because a map could keep the key of an ephemeron alive. Note that map
// aging is imprecise: maps that are kept alive only by ephemerons will age.
RetainMaps(&root_visitor);
{
GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_WEAKCLOSURE);
......
......@@ -780,6 +780,10 @@ class MarkCompactCollector {
// otherwise a map can die and deoptimize the code.
void ProcessTopOptimizedFrame(ObjectVisitor* visitor);
// Retain dying maps for <FLAG_retain_maps_for_n_gc> garbage collections to
// increase chances of reusing of map transition tree in future.
void RetainMaps(ObjectVisitor* visitor);
// Mark objects reachable (transitively) from objects in the marking
// stack. This function empties the marking stack, but may leave
// overflowed objects in the heap, in which case the marking stack's
......
......@@ -19353,6 +19353,7 @@ THREADED_TEST(SpaghettiStackReThrow) {
TEST(Regress528) {
v8::V8::Initialize();
v8::Isolate* isolate = CcTest::isolate();
i::FLAG_retain_maps_for_n_gc = 0;
v8::HandleScope scope(isolate);
v8::Local<Context> other_context;
int gc_count;
......
......@@ -1499,6 +1499,7 @@ TEST(TestInternalWeakLists) {
// Some flags turn Scavenge collections into Mark-sweep collections
// and hence are incompatible with this test case.
if (FLAG_gc_global || FLAG_stress_compaction) return;
FLAG_retain_maps_for_n_gc = 0;
static const int kNumTestContexts = 10;
......@@ -2921,6 +2922,7 @@ TEST(Regress1465) {
i::FLAG_stress_compaction = false;
i::FLAG_allow_natives_syntax = true;
i::FLAG_trace_incremental_marking = true;
i::FLAG_retain_maps_for_n_gc = 0;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
static const int transitions_count = 256;
......@@ -2983,6 +2985,7 @@ static void AddPropertyTo(
Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
i::FLAG_gc_interval = gc_count;
i::FLAG_gc_global = true;
i::FLAG_retain_maps_for_n_gc = 0;
CcTest::heap()->set_allocation_timeout(gc_count);
JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
}
......@@ -4205,7 +4208,7 @@ TEST(EnsureAllocationSiteDependentCodesProcessed) {
// Now make sure that a gc should get rid of the function, even though we
// still have the allocation site alive.
for (int i = 0; i < 4; i++) {
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
}
// TODO(mvstanton): this test fails when FLAG_vector_ics is true because
......@@ -4316,6 +4319,7 @@ TEST(NoWeakHashTableLeakWithIncrementalMarking) {
i::FLAG_weak_embedded_objects_in_optimized_code = true;
i::FLAG_allow_natives_syntax = true;
i::FLAG_compilation_cache = false;
i::FLAG_retain_maps_for_n_gc = 0;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
v8::internal::Heap* heap = CcTest::heap();
......@@ -5111,6 +5115,40 @@ TEST(Regress442710) {
}
void CheckMapRetainingFor(int n) {
FLAG_retain_maps_for_n_gc = n;
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
Handle<WeakCell> weak_cell;
{
HandleScope inner_scope(isolate);
Handle<Map> map = Map::Create(isolate, 1);
weak_cell = inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
}
CHECK(!weak_cell->cleared());
int retaining_count =
Min(FLAG_retain_maps_for_n_gc,
Map::kRetainingCounterStart - Map::kRetainingCounterEnd);
for (int i = 0; i < retaining_count; i++) {
heap->CollectGarbage(OLD_POINTER_SPACE);
}
CHECK(!weak_cell->cleared());
heap->CollectGarbage(OLD_POINTER_SPACE);
CHECK(weak_cell->cleared());
}
TEST(MapRetaining) {
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
CheckMapRetainingFor(0);
CheckMapRetainingFor(Map::kRetainingCounterStart - Map::kRetainingCounterEnd);
CheckMapRetainingFor(Map::kRetainingCounterStart - Map::kRetainingCounterEnd +
1);
}
#ifdef DEBUG
TEST(PathTracer) {
CcTest::InitializeVM();
......
......@@ -127,6 +127,7 @@ TEST(NoPromotion) {
TEST(MarkCompactCollector) {
FLAG_incremental_marking = false;
FLAG_retain_maps_for_n_gc = 0;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
TestHeap* heap = CcTest::test_heap();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment