Commit 64e1b561 authored by ulan's avatar ulan Committed by Commit bot

Revert "Retain maps for several garbage collections"

This reverts commit 2bc756e4
because of performance regression in kraken.

BUG=chromium:444232
LOG=N
TBR=hpayer@chromium.org

Review URL: https://codereview.chromium.org/816353002

Cr-Commit-Position: refs/heads/master@{#25918}
parent c37e09d7
......@@ -2473,7 +2473,7 @@ Handle<FixedArray> Debug::GetLoadedScripts() {
// Perform GC to get unreferenced scripts evicted from the cache before
// returning the content.
isolate_->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask,
isolate_->heap()->CollectAllGarbage(Heap::kNoGCFlags,
"Debug::GetLoadedScripts");
// Get the scripts from the cache.
......
......@@ -560,8 +560,6 @@ DEFINE_INT(initial_old_space_size, 0, "initial old space size (in Mbytes)")
DEFINE_INT(max_executable_size, 0, "max size of executable memory (in Mbytes)")
DEFINE_BOOL(gc_global, false, "always perform global GCs")
DEFINE_INT(gc_interval, -1, "garbage collect after <n> allocations")
DEFINE_INT(retain_maps_for_n_gc, 1,
"keeps maps alive for <n> old space garbage collections")
DEFINE_BOOL(trace_gc, false,
"print one trace line following each garbage collection")
DEFINE_BOOL(trace_gc_nvp, false,
......
......@@ -2125,52 +2125,6 @@ void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) {
}
void MarkCompactCollector::RetainMaps(ObjectVisitor* visitor) {
if (reduce_memory_footprint_ || abort_incremental_marking_ ||
FLAG_retain_maps_for_n_gc == 0) {
// Do not retain dead maps if flag disables it or there is
// - memory pressure (reduce_memory_footprint_),
// - GC is requested by tests or dev-tools (abort_incremental_marking_).
return;
}
HeapObjectIterator map_iterator(heap()->map_space());
// The retaining counter goes from Map::kRetainingCounterStart
// down to Map::kRetainingCounterEnd. This range can be narrowed
// by the FLAG_retain_maps_for_n_gc flag.
int retaining_counter_end =
Max(Map::kRetainingCounterEnd,
Map::kRetainingCounterStart - FLAG_retain_maps_for_n_gc);
for (HeapObject* obj = map_iterator.Next(); obj != NULL;
obj = map_iterator.Next()) {
Map* map = Map::cast(obj);
MarkBit map_mark = Marking::MarkBitFrom(map);
int counter = map->counter();
if (!map_mark.Get()) {
if (counter > Map::kRetainingCounterStart ||
counter <= retaining_counter_end) {
// The counter is outside of retaining range. Do not retain this map.
continue;
}
Object* constructor = map->constructor();
if (!constructor->IsHeapObject() ||
!Marking::MarkBitFrom(HeapObject::cast(constructor)).Get()) {
// The constructor is dead, no new objects with this map can
// be created. Do not retain this map.
continue;
}
map->set_counter(counter - 1);
SetMark(map, map_mark);
MarkCompactMarkingVisitor::IterateBody(map->map(), map);
} else if (counter < Map::kRetainingCounterStart) {
// Reset the counter for live maps.
map->set_counter(Map::kRetainingCounterStart);
}
}
ProcessMarkingDeque();
}
void MarkCompactCollector::EnsureMarkingDequeIsCommittedAndInitialize() {
if (marking_deque_memory_ == NULL) {
marking_deque_memory_ = new base::VirtualMemory(4 * MB);
......@@ -2270,11 +2224,6 @@ void MarkCompactCollector::MarkLiveObjects() {
ProcessTopOptimizedFrame(&root_visitor);
// Retaining dying maps should happen before or during ephemeral marking
// because a map could keep the key of an ephemeron alive. Note that map
// aging is imprecise: maps that are kept alive only by ephemerons will age.
RetainMaps(&root_visitor);
{
GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_WEAKCLOSURE);
......
......@@ -780,10 +780,6 @@ class MarkCompactCollector {
// otherwise a map can die and deoptimize the code.
void ProcessTopOptimizedFrame(ObjectVisitor* visitor);
// Retain dying maps for <FLAG_retain_maps_for_n_gc> garbage collections to
// increase chances of reusing of map transition tree in future.
void RetainMaps(ObjectVisitor* visitor);
// Mark objects reachable (transitively) from objects in the marking
// stack. This function empties the marking stack, but may leave
// overflowed objects in the heap, in which case the marking stack's
......
......@@ -19444,7 +19444,6 @@ THREADED_TEST(SpaghettiStackReThrow) {
TEST(Regress528) {
v8::V8::Initialize();
v8::Isolate* isolate = CcTest::isolate();
i::FLAG_retain_maps_for_n_gc = 0;
v8::HandleScope scope(isolate);
v8::Local<Context> other_context;
int gc_count;
......
......@@ -1499,7 +1499,6 @@ TEST(TestInternalWeakLists) {
// Some flags turn Scavenge collections into Mark-sweep collections
// and hence are incompatible with this test case.
if (FLAG_gc_global || FLAG_stress_compaction) return;
FLAG_retain_maps_for_n_gc = 0;
static const int kNumTestContexts = 10;
......@@ -2922,7 +2921,6 @@ TEST(Regress1465) {
i::FLAG_stress_compaction = false;
i::FLAG_allow_natives_syntax = true;
i::FLAG_trace_incremental_marking = true;
i::FLAG_retain_maps_for_n_gc = 0;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
static const int transitions_count = 256;
......@@ -2985,7 +2983,6 @@ static void AddPropertyTo(
Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
i::FLAG_gc_interval = gc_count;
i::FLAG_gc_global = true;
i::FLAG_retain_maps_for_n_gc = 0;
CcTest::heap()->set_allocation_timeout(gc_count);
JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
}
......@@ -4202,7 +4199,7 @@ TEST(EnsureAllocationSiteDependentCodesProcessed) {
// Now make sure that a gc should get rid of the function, even though we
// still have the allocation site alive.
for (int i = 0; i < 4; i++) {
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage(Heap::kNoGCFlags);
}
// The site still exists because of our global handle, but the code is no
......@@ -4303,7 +4300,6 @@ TEST(NoWeakHashTableLeakWithIncrementalMarking) {
i::FLAG_weak_embedded_objects_in_optimized_code = true;
i::FLAG_allow_natives_syntax = true;
i::FLAG_compilation_cache = false;
i::FLAG_retain_maps_for_n_gc = 0;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
v8::internal::Heap* heap = CcTest::heap();
......@@ -5093,40 +5089,6 @@ TEST(Regress442710) {
}
void CheckMapRetainingFor(int n) {
FLAG_retain_maps_for_n_gc = n;
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
Handle<WeakCell> weak_cell;
{
HandleScope inner_scope(isolate);
Handle<Map> map = Map::Create(isolate, 1);
weak_cell = inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
}
CHECK(!weak_cell->cleared());
int retaining_count =
Min(FLAG_retain_maps_for_n_gc,
Map::kRetainingCounterStart - Map::kRetainingCounterEnd);
for (int i = 0; i < retaining_count; i++) {
heap->CollectGarbage(OLD_POINTER_SPACE);
}
CHECK(!weak_cell->cleared());
heap->CollectGarbage(OLD_POINTER_SPACE);
CHECK(weak_cell->cleared());
}
TEST(MapRetaining) {
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
CheckMapRetainingFor(0);
CheckMapRetainingFor(Map::kRetainingCounterStart - Map::kRetainingCounterEnd);
CheckMapRetainingFor(Map::kRetainingCounterStart - Map::kRetainingCounterEnd +
1);
}
#ifdef DEBUG
TEST(PathTracer) {
CcTest::InitializeVM();
......
......@@ -127,7 +127,6 @@ TEST(NoPromotion) {
TEST(MarkCompactCollector) {
FLAG_incremental_marking = false;
FLAG_retain_maps_for_n_gc = 0;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
TestHeap* heap = CcTest::test_heap();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment