Commit ee59bde7 authored by hpayer's avatar hpayer Committed by Commit bot

Reland Force full GCwhenever CollectAllGarbage is meant to trigger a full GC.

BUG=

Review URL: https://codereview.chromium.org/1099783003

Cr-Commit-Position: refs/heads/master@{#28024}
parent ed688523
......@@ -851,6 +851,7 @@ bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason,
}
if (collector == MARK_COMPACTOR &&
!mark_compact_collector()->finalize_incremental_marking() &&
!mark_compact_collector()->abort_incremental_marking() &&
!incremental_marking()->IsStopped() &&
!incremental_marking()->should_hurry() &&
......
......@@ -777,6 +777,7 @@ class Heap {
static const int kNoGCFlags = 0;
static const int kReduceMemoryFootprintMask = 1;
static const int kAbortIncrementalMarkingMask = 2;
static const int kFinalizeIncrementalMarkingMask = 4;
// Making the heap iterable requires us to abort incremental marking.
static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;
......@@ -794,7 +795,7 @@ class Heap {
// non-zero, then the slower precise sweeper is used, which leaves the heap
// in a state where we can iterate over the heap visiting all objects.
void CollectAllGarbage(
int flags, const char* gc_reason = NULL,
int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
// Last hope GC, should try to squeeze as much as possible.
......
......@@ -23,6 +23,9 @@ void MarkCompactCollector::SetFlags(int flags) {
reduce_memory_footprint_ = ((flags & Heap::kReduceMemoryFootprintMask) != 0);
abort_incremental_marking_ =
((flags & Heap::kAbortIncrementalMarkingMask) != 0);
finalize_incremental_marking_ =
((flags & Heap::kFinalizeIncrementalMarkingMask) != 0);
DCHECK(!finalize_incremental_marking_ || !abort_incremental_marking_);
}
......
......@@ -42,6 +42,7 @@ MarkCompactCollector::MarkCompactCollector(Heap* heap)
#endif
reduce_memory_footprint_(false),
abort_incremental_marking_(false),
finalize_incremental_marking_(false),
marking_parity_(ODD_MARKING_PARITY),
compacting_(false),
was_marked_incrementally_(false),
......
......@@ -659,6 +659,10 @@ class MarkCompactCollector {
bool abort_incremental_marking() const { return abort_incremental_marking_; }
bool finalize_incremental_marking() const {
return finalize_incremental_marking_;
}
bool is_compacting() const { return compacting_; }
MarkingParity marking_parity() { return marking_parity_; }
......@@ -749,6 +753,8 @@ class MarkCompactCollector {
bool abort_incremental_marking_;
bool finalize_incremental_marking_;
MarkingParity marking_parity_;
// True if we are collecting slots to perform evacuation from evacuation
......
......@@ -277,7 +277,7 @@ static void CheckAccessorArgsCorrect(
CHECK(info.This() == info.Holder());
CHECK(
info.Data()->Equals(v8::String::NewFromUtf8(CcTest::isolate(), "data")));
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK(info.GetIsolate() == CcTest::isolate());
CHECK(info.This() == info.Holder());
CHECK(
......
......@@ -312,7 +312,7 @@ void InterceptorHasOwnPropertyGetter(
void InterceptorHasOwnPropertyGetterGC(
Local<Name> name, const v8::PropertyCallbackInfo<v8::Value>& info) {
ApiTestFuzzer::Fuzz();
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
}
} // namespace
......
This diff is collapsed.
......@@ -329,7 +329,7 @@ TEST(ConstantPoolCompacting) {
// Force compacting garbage collection.
CHECK(FLAG_always_compact);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK_NE(old_ptr, *object);
CHECK_EQ(*object, array->get_heap_ptr_entry(0));
......
......@@ -1178,7 +1178,7 @@ TEST(FunctionCallSample) {
// Collect garbage that might have be generated while installing
// extensions.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CompileRun(call_function_test_source);
v8::Local<v8::Function> function = GetFunction(*env, "start");
......
......@@ -397,7 +397,7 @@ void CheckDebuggerUnloaded(bool check_functions) {
CHECK(!CcTest::i_isolate()->debug()->debug_info_list_);
// Collect garbage to ensure weak handles are cleared.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask);
// Iterate the head and check that there are no debugger related objects left.
......@@ -870,7 +870,7 @@ static void DebugEventBreakPointCollectGarbage(
CcTest::heap()->CollectGarbage(v8::internal::NEW_SPACE);
} else {
// Mark sweep compact.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
}
}
}
......@@ -1376,7 +1376,7 @@ static void CallAndGC(v8::Local<v8::Object> recv,
CHECK_EQ(2 + i * 3, break_point_hit_count);
// Mark sweep (and perhaps compact) and call function.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
f->Call(recv, 0, NULL);
CHECK_EQ(3 + i * 3, break_point_hit_count);
}
......@@ -2220,7 +2220,7 @@ TEST(ScriptBreakPointLineTopLevel) {
f = v8::Local<v8::Function>::Cast(
env->Global()->Get(v8::String::NewFromUtf8(env->GetIsolate(), "f")));
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
SetScriptBreakPointByNameFromJS(env->GetIsolate(), "test.html", 3, -1);
......
......@@ -99,7 +99,7 @@ class AllowNativesSyntaxNoInlining {
// Abort any ongoing incremental marking to make sure that all weak global
// handle callbacks are processed.
static void NonIncrementalGC(i::Isolate* isolate) {
isolate->heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
}
......
......@@ -195,7 +195,7 @@ TEST(VectorICProfilerStatistics) {
CHECK_EQ(1, feedback_vector->ic_generic_count());
// A collection will not affect the site.
heap->CollectAllGarbage(i::Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK_EQ(0, feedback_vector->ic_with_type_info_count());
CHECK_EQ(1, feedback_vector->ic_generic_count());
......@@ -209,7 +209,7 @@ TEST(VectorICProfilerStatistics) {
CHECK(nexus.GetFeedback()->IsAllocationSite());
heap->CollectAllGarbage(i::Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK_EQ(1, feedback_vector->ic_with_type_info_count());
CHECK_EQ(0, feedback_vector->ic_generic_count());
CHECK(nexus.GetFeedback()->IsAllocationSite());
......@@ -243,7 +243,7 @@ TEST(VectorCallICStates) {
CHECK_EQ(GENERIC, nexus.StateFromFeedback());
// After a collection, state should remain GENERIC.
heap->CollectAllGarbage(i::Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK_EQ(GENERIC, nexus.StateFromFeedback());
// A call to Array is special, it contains an AllocationSite as feedback.
......@@ -253,7 +253,7 @@ TEST(VectorCallICStates) {
CHECK_EQ(MONOMORPHIC, nexus.StateFromFeedback());
CHECK(nexus.GetFeedback()->IsAllocationSite());
heap->CollectAllGarbage(i::Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK_EQ(MONOMORPHIC, nexus.StateFromFeedback());
}
......@@ -307,7 +307,7 @@ TEST(VectorLoadICStates) {
CHECK(!nexus.FindFirstMap());
// After a collection, state should not be reset to PREMONOMORPHIC.
heap->CollectAllGarbage(i::Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK_EQ(MEGAMORPHIC, nexus.StateFromFeedback());
}
......
......@@ -667,7 +667,7 @@ TEST(HeapSnapshotAddressReuse) {
CompileRun(
"for (var i = 0; i < 10000; ++i)\n"
" a[i] = new A();\n");
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
const v8::HeapSnapshot* snapshot2 = heap_profiler->TakeHeapSnapshot();
CHECK(ValidateSnapshot(snapshot2));
......@@ -709,7 +709,7 @@ TEST(HeapEntryIdsAndArrayShift) {
"for (var i = 0; i < 1; ++i)\n"
" a.shift();\n");
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
const v8::HeapSnapshot* snapshot2 = heap_profiler->TakeHeapSnapshot();
CHECK(ValidateSnapshot(snapshot2));
......@@ -750,7 +750,7 @@ TEST(HeapEntryIdsAndGC) {
const v8::HeapSnapshot* snapshot1 = heap_profiler->TakeHeapSnapshot();
CHECK(ValidateSnapshot(snapshot1));
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
const v8::HeapSnapshot* snapshot2 = heap_profiler->TakeHeapSnapshot();
CHECK(ValidateSnapshot(snapshot2));
......@@ -1059,7 +1059,7 @@ TEST(HeapSnapshotObjectsStats) {
// We have to call GC 6 times. In other case the garbage will be
// the reason of flakiness.
for (int i = 0; i < 6; ++i) {
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
}
v8::SnapshotObjectId initial_id;
......
This diff is collapsed.
......@@ -92,8 +92,8 @@ TEST(Promotion) {
CHECK(heap->InSpace(*array, NEW_SPACE));
// Call mark compact GC, so array becomes an old object.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
heap->CollectAllGarbage();
// Array now sits in the old space
CHECK(heap->InSpace(*array, OLD_SPACE));
......
......@@ -36,7 +36,7 @@ static void SetUpNewSpaceWithPoisonedMementoAtTop() {
NewSpace* new_space = heap->new_space();
// Make sure we can allocate some objects without causing a GC later.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
// Allocate a string, the GC may suspect a memento behind the string.
Handle<SeqOneByteString> string =
......
......@@ -429,7 +429,7 @@ TEST(ObservationWeakMap) {
CHECK_EQ(1, NumberOfElements(callbackInfoMap));
CHECK_EQ(1, NumberOfElements(objectInfoMap));
CHECK_EQ(1, NumberOfElements(notifierObjectInfoMap));
i_isolate->heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
i_isolate->heap()->CollectAllGarbage();
CHECK_EQ(0, NumberOfElements(callbackInfoMap));
CHECK_EQ(0, NumberOfElements(objectInfoMap));
CHECK_EQ(0, NumberOfElements(notifierObjectInfoMap));
......@@ -682,7 +682,7 @@ static void CheckSurvivingGlobalObjectsCount(int expected) {
// the first garbage collection but some of the maps have already
// been marked at that point. Therefore some of the maps are not
// collected until the second garbage collection.
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
int count = GetGlobalObjectsCount();
#ifdef DEBUG
......
......@@ -297,8 +297,8 @@ UNINITIALIZED_TEST(PartialSerialization) {
isolate->bootstrapper()->NativesSourceLookup(i);
}
}
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
heap->CollectAllGarbage();
Object* raw_foo;
{
......@@ -422,7 +422,7 @@ UNINITIALIZED_TEST(ContextSerialization) {
}
// If we don't do this then we end up with a stray root pointing at the
// context even after we have disposed of env.
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
int file_name_length = StrLength(FLAG_testing_serialization_file) + 10;
Vector<char> startup_name = Vector<char>::New(file_name_length + 1);
......
......@@ -64,7 +64,7 @@ TEST(Create) {
}
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
// All symbols should be distinct.
for (int i = 0; i < kNumSymbols; ++i) {
......
......@@ -97,7 +97,7 @@ class ThreadB : public v8::base::Thread {
v8::Context::Scope context_scope(context);
// Clear the caches by forcing major GC.
CcTest::heap()->CollectAllGarbage(v8::internal::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
turn = SECOND_TIME_FILL_CACHE;
break;
}
......
......@@ -909,7 +909,7 @@ TEST(Regress436816) {
CHECK(object->map()->HasFastPointerLayout());
// Trigger GCs and heap verification.
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
}
......@@ -966,7 +966,7 @@ TEST(DescriptorArrayTrimming) {
// Call GC that should trim both |map|'s descriptor array and layout
// descriptor.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
// The unused tail of the layout descriptor is now "clean" again.
CHECK(map->layout_descriptor()->IsConsistentWithMap(*map, true));
......@@ -1390,7 +1390,7 @@ TEST(StoreBufferScanOnScavenge) {
chunk->set_scan_on_scavenge(true);
// Trigger GCs and force evacuation. Should not crash there.
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(boom_value, GetDoubleFieldValue(*obj, field_index));
}
......
......@@ -199,7 +199,7 @@ TEST(Regress2060a) {
// Force compacting garbage collection.
CHECK(FLAG_always_compact);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
}
......@@ -241,9 +241,9 @@ TEST(Regress2060b) {
// Force compacting garbage collection. The subsequent collections are used
// to verify that key references were actually updated.
CHECK(FLAG_always_compact);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
heap->CollectAllGarbage();
heap->CollectAllGarbage();
}
......@@ -260,5 +260,5 @@ TEST(Regress399527) {
// The weak map is marked black here but leaving the handle scope will make
// the object unreachable. Aborting incremental marking will clear all the
// marking bits which makes the weak map garbage.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
}
......@@ -199,7 +199,7 @@ TEST(WeakSet_Regress2060a) {
// Force compacting garbage collection.
CHECK(FLAG_always_compact);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
}
......@@ -241,7 +241,7 @@ TEST(WeakSet_Regress2060b) {
// Force compacting garbage collection. The subsequent collections are used
// to verify that key references were actually updated.
CHECK(FLAG_always_compact);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
heap->CollectAllGarbage();
heap->CollectAllGarbage();
}
......@@ -128,7 +128,7 @@ TEST(WeakArrayBuffersFromApi) {
CHECK(HasArrayBufferInWeakList(isolate->heap(), *iab1));
CHECK(HasArrayBufferInWeakList(isolate->heap(), *iab2));
}
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
CHECK_EQ(1, CountArrayBuffersInWeakList(isolate->heap()) - start);
{
HandleScope scope2(isolate);
......@@ -138,7 +138,7 @@ TEST(WeakArrayBuffersFromApi) {
}
}
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
CHECK_EQ(start, CountArrayBuffersInWeakList(isolate->heap()));
}
......@@ -180,7 +180,7 @@ TEST(WeakArrayBuffersFromScript) {
i::ScopedVector<char> source(1024);
i::SNPrintF(source, "ab%d = null;", i);
CompileRun(source.start());
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
CHECK_EQ(2, CountArrayBuffersInWeakList(isolate->heap()) - start);
......@@ -199,7 +199,7 @@ TEST(WeakArrayBuffersFromScript) {
CompileRun("ab1 = null; ab2 = null; ab3 = null;");
}
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
CHECK_EQ(start, CountArrayBuffersInWeakList(isolate->heap()));
}
}
......@@ -227,12 +227,12 @@ void TestViewFromApi() {
CHECK(HasViewInWeakList(isolate->heap(), *iab, *ita1));
CHECK(HasViewInWeakList(isolate->heap(), *iab, *ita2));
}
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
CHECK_EQ(1, CountViews(isolate->heap(), *iab));
Handle<JSArrayBufferView> ita1 = v8::Utils::OpenHandle(*ta1);
CHECK(HasViewInWeakList(isolate->heap(), *iab, *ita1));
}
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
CHECK_EQ(0, CountViews(isolate->heap(), *iab));
}
......@@ -333,7 +333,7 @@ static void TestTypedArrayFromScript(const char* constructor) {
i::SNPrintF(source, "ta%d = null;", i);
CompileRun(source.start());
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
CHECK_EQ(1, CountArrayBuffersInWeakList(isolate->heap()) - start);
......@@ -354,7 +354,7 @@ static void TestTypedArrayFromScript(const char* constructor) {
}
CompileRun("ta1 = null; ta2 = null; ta3 = null;");
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
CHECK_EQ(1, CountArrayBuffersInWeakList(isolate->heap()) - start);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment