Commit a56f5373 authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Cleanup and fix GC flags

GC flags are now part of the {Heap} and should be respected by all
sub-components.

Also add a infrastructure to write tests accessing private methods.

Review URL: https://codereview.chromium.org/1301183002

Cr-Commit-Position: refs/heads/master@{#30281}
parent eaba98d9
......@@ -132,6 +132,7 @@ Heap::Heap()
ring_buffer_end_(0),
promotion_queue_(this),
configured_(false),
current_gc_flags_(Heap::kNoGCFlags),
external_string_table_(this),
chunks_queued_for_free_(NULL),
gc_callbacks_depth_(0),
......@@ -795,9 +796,9 @@ void Heap::CollectAllGarbage(int flags, const char* gc_reason,
// Since we are ignoring the return value, the exact choice of space does
// not matter, so long as we do not specify NEW_SPACE, which would not
// cause a full GC.
mark_compact_collector_.SetFlags(flags);
set_current_gc_flags(flags);
CollectGarbage(OLD_SPACE, gc_reason, gc_callback_flags);
mark_compact_collector_.SetFlags(kNoGCFlags);
set_current_gc_flags(kNoGCFlags);
}
......@@ -819,8 +820,7 @@ void Heap::CollectAllAvailableGarbage(const char* gc_reason) {
isolate()->optimizing_compile_dispatcher()->Flush();
}
isolate()->ClearSerializerData();
mark_compact_collector()->SetFlags(kMakeHeapIterableMask |
kReduceMemoryFootprintMask);
set_current_gc_flags(kMakeHeapIterableMask | kReduceMemoryFootprintMask);
isolate_->compilation_cache()->Clear();
const int kMaxNumberOfAttempts = 7;
const int kMinNumberOfAttempts = 2;
......@@ -831,7 +831,7 @@ void Heap::CollectAllAvailableGarbage(const char* gc_reason) {
break;
}
}
mark_compact_collector()->SetFlags(kNoGCFlags);
set_current_gc_flags(kNoGCFlags);
new_space_.Shrink();
UncommitFromSpace();
}
......@@ -879,10 +879,8 @@ bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason,
}
}
if (collector == MARK_COMPACTOR &&
!mark_compact_collector()->finalize_incremental_marking() &&
!mark_compact_collector()->abort_incremental_marking() &&
!incremental_marking()->IsStopped() &&
if (collector == MARK_COMPACTOR && !ShouldFinalizeIncrementalMarking() &&
!ShouldAbortIncrementalMarking() && !incremental_marking()->IsStopped() &&
!incremental_marking()->should_hurry() && FLAG_incremental_marking) {
// Make progress in incremental marking.
const intptr_t kStepSizeWhenDelayedByScavenge = 1 * MB;
......@@ -953,8 +951,7 @@ bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason,
// Start incremental marking for the next cycle. The heap snapshot
// generator needs incremental marking to stay off after it aborted.
if (!mark_compact_collector()->abort_incremental_marking() &&
incremental_marking()->IsStopped() &&
if (!ShouldAbortIncrementalMarking() && incremental_marking()->IsStopped() &&
incremental_marking()->ShouldActivateEvenWithoutIdleNotification()) {
incremental_marking()->Start(kNoGCFlags, kNoGCCallbackFlags, "GC epilogue");
}
......@@ -5652,8 +5649,7 @@ void Heap::SetOldGenerationAllocationLimit(intptr_t old_gen_size,
factor = Min(factor, kConservativeHeapGrowingFactor);
}
if (FLAG_stress_compaction ||
mark_compact_collector()->reduce_memory_footprint_) {
if (FLAG_stress_compaction || ShouldReduceMemory()) {
factor = kMinHeapGrowingFactor;
}
......
......@@ -1681,6 +1681,25 @@ class Heap {
private:
Heap();
int current_gc_flags() { return current_gc_flags_; }
void set_current_gc_flags(int flags) {
current_gc_flags_ = flags;
DCHECK(!ShouldFinalizeIncrementalMarking() ||
!ShouldAbortIncrementalMarking());
}
inline bool ShouldReduceMemory() const {
return current_gc_flags_ & kReduceMemoryFootprintMask;
}
inline bool ShouldAbortIncrementalMarking() const {
return current_gc_flags_ & kAbortIncrementalMarkingMask;
}
inline bool ShouldFinalizeIncrementalMarking() const {
return current_gc_flags_ & kFinalizeIncrementalMarkingMask;
}
// The amount of external memory registered through the API kept alive
// by global handles
int64_t amount_of_external_allocated_memory_;
......@@ -2271,6 +2290,9 @@ class Heap {
// configured through the API until it is set up.
bool configured_;
// Currently set GC flags that are respected by all GC components.
int current_gc_flags_;
ExternalStringTable external_string_table_;
VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
......@@ -2314,12 +2336,16 @@ class Heap {
friend class GCCallbacksScope;
friend class GCTracer;
friend class HeapIterator;
friend class IncrementalMarking;
friend class Isolate;
friend class MarkCompactCollector;
friend class MarkCompactMarkingVisitor;
friend class MapCompact;
friend class Page;
// Used in cctest.
friend class HeapTester;
DISALLOW_COPY_AND_ASSIGN(Heap);
};
......
......@@ -469,7 +469,7 @@ static void PatchIncrementalMarkingRecordWriteStubs(
}
void IncrementalMarking::Start(int mark_compact_flags,
void IncrementalMarking::Start(int flags,
const GCCallbackFlags gc_callback_flags,
const char* reason) {
if (FLAG_trace_incremental_marking) {
......@@ -487,9 +487,8 @@ void IncrementalMarking::Start(int mark_compact_flags,
was_activated_ = true;
if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
heap_->mark_compact_collector()->SetFlags(mark_compact_flags);
heap_->set_current_gc_flags(flags);
StartMarking();
heap_->mark_compact_collector()->SetFlags(Heap::kNoGCFlags);
} else {
if (FLAG_trace_incremental_marking) {
PrintF("[IncrementalMarking] Start sweeping.\n");
......
......@@ -81,7 +81,7 @@ class IncrementalMarking {
bool WasActivated();
void Start(int mark_compact_flags,
void Start(int flags,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags,
const char* reason = nullptr);
......
......@@ -11,17 +11,6 @@
namespace v8 {
namespace internal {
void MarkCompactCollector::SetFlags(int flags) {
reduce_memory_footprint_ = ((flags & Heap::kReduceMemoryFootprintMask) != 0);
abort_incremental_marking_ =
((flags & Heap::kAbortIncrementalMarkingMask) != 0);
finalize_incremental_marking_ =
((flags & Heap::kFinalizeIncrementalMarkingMask) != 0);
DCHECK(!finalize_incremental_marking_ || !abort_incremental_marking_);
}
void MarkCompactCollector::PushBlack(HeapObject* obj) {
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(obj)));
if (marking_deque_.Push(obj)) {
......
......@@ -43,9 +43,6 @@ MarkCompactCollector::MarkCompactCollector(Heap* heap)
#ifdef DEBUG
state_(IDLE),
#endif
reduce_memory_footprint_(false),
abort_incremental_marking_(false),
finalize_incremental_marking_(false),
marking_parity_(ODD_MARKING_PARITY),
compacting_(false),
was_marked_incrementally_(false),
......@@ -667,7 +664,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
int total_live_bytes = 0;
bool reduce_memory =
reduce_memory_footprint_ || heap()->HasLowAllocationRate();
heap()->ShouldReduceMemory() || heap()->HasLowAllocationRate();
if (FLAG_manual_evacuation_candidates_selection) {
for (size_t i = 0; i < pages.size(); i++) {
Page* p = pages[i].second;
......@@ -789,7 +786,7 @@ void MarkCompactCollector::Prepare() {
}
// Clear marking bits if incremental marking is aborted.
if (was_marked_incrementally_ && abort_incremental_marking_) {
if (was_marked_incrementally_ && heap_->ShouldAbortIncrementalMarking()) {
heap()->incremental_marking()->Stop();
ClearMarkbits();
AbortWeakCollections();
......@@ -2061,7 +2058,7 @@ void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) {
void MarkCompactCollector::RetainMaps() {
if (reduce_memory_footprint_ || abort_incremental_marking_ ||
if (heap()->ShouldReduceMemory() || heap()->ShouldAbortIncrementalMarking() ||
FLAG_retain_maps_for_n_gc == 0) {
// Do not retain dead maps if flag disables it or there is
// - memory pressure (reduce_memory_footprint_),
......
......@@ -504,9 +504,6 @@ class ThreadLocalTop;
// Mark-Compact collector
class MarkCompactCollector {
public:
// Set the global flags, it must be called before Prepare to take effect.
inline void SetFlags(int flags);
static void Initialize();
void SetUp();
......@@ -600,12 +597,6 @@ class MarkCompactCollector {
void ClearMarkbits();
bool abort_incremental_marking() const { return abort_incremental_marking_; }
bool finalize_incremental_marking() const {
return finalize_incremental_marking_;
}
bool is_compacting() const { return compacting_; }
MarkingParity marking_parity() { return marking_parity_; }
......@@ -703,12 +694,6 @@ class MarkCompactCollector {
CollectorState state_;
#endif
bool reduce_memory_footprint_;
bool abort_incremental_marking_;
bool finalize_incremental_marking_;
MarkingParity marking_parity_;
// True if we are collecting slots to perform evacuation from evacuation
......
......@@ -7313,7 +7313,12 @@ TEST(Regress131642) {
// Import from test-heap.cc
namespace v8 {
namespace internal {
int CountNativeContexts();
}
}
static void NopListener(const v8::Debug::EventDetails& event_details) {
......@@ -7323,15 +7328,15 @@ static void NopListener(const v8::Debug::EventDetails& event_details) {
TEST(DebuggerCreatesContextIffActive) {
DebugLocalContext env;
v8::HandleScope scope(env->GetIsolate());
CHECK_EQ(1, CountNativeContexts());
CHECK_EQ(1, v8::internal::CountNativeContexts());
v8::Debug::SetDebugEventListener(NULL);
CompileRun("debugger;");
CHECK_EQ(1, CountNativeContexts());
CHECK_EQ(1, v8::internal::CountNativeContexts());
v8::Debug::SetDebugEventListener(NopListener);
CompileRun("debugger;");
CHECK_EQ(2, CountNativeContexts());
CHECK_EQ(2, v8::internal::CountNativeContexts());
v8::Debug::SetDebugEventListener(NULL);
}
......
......@@ -40,9 +40,32 @@
#include "src/snapshot/snapshot.h"
#include "test/cctest/cctest.h"
using namespace v8::internal;
using v8::Just;
namespace v8 {
namespace internal {
// Tests that should have access to private methods of {v8::internal::Heap}.
// Those tests need to be defined using HEAP_TEST(Name) { ... }.
#define HEAP_TEST_METHODS(V) \
V(GCFlags)
#define HEAP_TEST(Name) \
CcTest register_test_##Name(HeapTester::Test##Name, __FILE__, #Name, NULL, \
true, true); \
void HeapTester::Test##Name()
class HeapTester {
public:
#define DECLARE_STATIC(Name) static void Test##Name();
HEAP_TEST_METHODS(DECLARE_STATIC)
#undef HEAP_TEST_METHODS
};
static void CheckMap(Map* map, int type, int instance_size) {
CHECK(map->IsHeapObject());
#ifdef DEBUG
......@@ -2767,6 +2790,36 @@ TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
}
HEAP_TEST(GCFlags) {
CcTest::InitializeVM();
Heap* heap = CcTest::heap();
heap->set_current_gc_flags(Heap::kNoGCFlags);
CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags());
// Set the flags to check whether we appropriately resets them after the GC.
heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags());
MarkCompactCollector* collector = heap->mark_compact_collector();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
}
IncrementalMarking* marking = heap->incremental_marking();
marking->Stop();
marking->Start(Heap::kReduceMemoryFootprintMask);
CHECK_NE(0, heap->current_gc_flags() & Heap::kReduceMemoryFootprintMask);
heap->Scavenge();
CHECK_NE(0, heap->current_gc_flags() & Heap::kReduceMemoryFootprintMask);
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags());
}
TEST(IdleNotificationFinishMarking) {
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
......@@ -6386,3 +6439,6 @@ TEST(ContextMeasure) {
CHECK_LE(measure.Count(), count_upper_limit);
CHECK_LE(measure.Size(), size_upper_limit);
}
} // namespace internal
} // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment