Commit 7e5755cb authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Minor MC: Add marking

Adds the marking logic to mark the young generation.

BUG=chromium:651354

Review-Url: https://codereview.chromium.org/2498583002
Cr-Commit-Position: refs/heads/master@{#41104}
parent 46a4156a
......@@ -719,7 +719,7 @@ void GlobalHandles::MarkNewSpaceWeakUnmodifiedObjectsPending(
}
}
template <GlobalHandles::IterationMode mode>
void GlobalHandles::IterateNewSpaceWeakUnmodifiedRoots(ObjectVisitor* v) {
for (int i = 0; i < new_space_nodes_.length(); ++i) {
Node* node = new_space_nodes_[i];
......@@ -728,18 +728,35 @@ void GlobalHandles::IterateNewSpaceWeakUnmodifiedRoots(ObjectVisitor* v) {
node->IsWeakRetainer()) {
// Pending weak phantom handles die immediately. Everything else survives.
if (node->IsPendingPhantomResetHandle()) {
node->ResetPhantomHandle();
++number_of_phantom_handle_resets_;
if (mode == IterationMode::HANDLE_PHANTOM_NODES ||
mode == IterationMode::HANDLE_PHANTOM_NODES_VISIT_OTHERS) {
node->ResetPhantomHandle();
++number_of_phantom_handle_resets_;
}
} else if (node->IsPendingPhantomCallback()) {
node->CollectPhantomCallbackData(isolate(),
&pending_phantom_callbacks_);
if (mode == IterationMode::HANDLE_PHANTOM_NODES ||
mode == IterationMode::HANDLE_PHANTOM_NODES_VISIT_OTHERS) {
node->CollectPhantomCallbackData(isolate(),
&pending_phantom_callbacks_);
}
} else {
v->VisitPointer(node->location());
if (mode == IterationMode::VISIT_OTHERS ||
mode == IterationMode::HANDLE_PHANTOM_NODES_VISIT_OTHERS) {
v->VisitPointer(node->location());
}
}
}
}
}
template void GlobalHandles::IterateNewSpaceWeakUnmodifiedRoots<
GlobalHandles::HANDLE_PHANTOM_NODES>(ObjectVisitor* v);
template void GlobalHandles::IterateNewSpaceWeakUnmodifiedRoots<
GlobalHandles::VISIT_OTHERS>(ObjectVisitor* v);
template void GlobalHandles::IterateNewSpaceWeakUnmodifiedRoots<
GlobalHandles::HANDLE_PHANTOM_NODES_VISIT_OTHERS>(ObjectVisitor* v);
DISABLE_CFI_PERF
bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v,
......
......@@ -113,6 +113,12 @@ enum WeaknessType {
class GlobalHandles {
public:
enum IterationMode {
HANDLE_PHANTOM_NODES_VISIT_OTHERS,
VISIT_OTHERS,
HANDLE_PHANTOM_NODES
};
~GlobalHandles();
// Creates a new global handle that is alive until Destroy is called.
......@@ -227,6 +233,7 @@ class GlobalHandles {
// Iterates over weak independent or unmodified handles.
// See the note above.
template <IterationMode mode>
void IterateNewSpaceWeakUnmodifiedRoots(ObjectVisitor* v);
// Identify unmodified objects that are in weak state and marks them
......@@ -290,7 +297,7 @@ class GlobalHandles {
#ifdef DEBUG
void PrintStats();
void Print();
#endif
#endif // DEBUG
private:
explicit GlobalHandles(Isolate* isolate);
......
......@@ -146,6 +146,7 @@ GCIdleTimeAction GCIdleTimeHandler::Compute(double idle_time_in_ms,
return GCIdleTimeAction::IncrementalStep();
}
bool GCIdleTimeHandler::Enabled() { return FLAG_incremental_marking; }
} // namespace internal
} // namespace v8
......@@ -125,6 +125,8 @@ class V8_EXPORT_PRIVATE GCIdleTimeHandler {
GCIdleTimeAction Compute(double idle_time_in_ms,
GCIdleTimeHeapState heap_state);
bool Enabled();
void ResetNoProgressCounter() { idle_times_which_made_no_progress_ = 0; }
static size_t EstimateMarkingStepSize(double idle_time_in_ms,
......
......@@ -509,9 +509,14 @@ void GCTracer::PrintNVP() const {
"pause=%.1f "
"mutator=%.1f "
"gc=%s "
"reduce_memory=%d\n",
duration, spent_in_mutator, current_.TypeName(true),
current_.reduce_memory);
"reduce_memory=%d "
"mark=%.2f "
"mark.roots=%.2f "
"mark.old_to_new=%.2f\n",
duration, spent_in_mutator, "mmc", current_.reduce_memory,
current_.scopes[Scope::MINOR_MC_MARK],
current_.scopes[Scope::MINOR_MC_MARK_ROOTS],
current_.scopes[Scope::MINOR_MC_MARK_OLD_TO_NEW_POINTERS]);
break;
case Event::MARK_COMPACTOR:
case Event::INCREMENTAL_MARK_COMPACTOR:
......
......@@ -79,6 +79,12 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects };
F(MC_SWEEP_CODE) \
F(MC_SWEEP_MAP) \
F(MC_SWEEP_OLD) \
F(MINOR_MC_MARK) \
F(MINOR_MC_MARK_CODE_FLUSH_CANDIDATES) \
F(MINOR_MC_MARK_GLOBAL_HANDLES) \
F(MINOR_MC_MARK_OLD_TO_NEW_POINTERS) \
F(MINOR_MC_MARK_ROOTS) \
F(MINOR_MC_MARK_WEAK) \
F(SCAVENGER_CODE_FLUSH_CANDIDATES) \
F(SCAVENGER_OLD_TO_NEW_POINTERS) \
F(SCAVENGER_ROOTS) \
......
......@@ -1502,21 +1502,6 @@ static bool IsUnscavengedHeapObject(Heap* heap, Object** p) {
!HeapObject::cast(*p)->map_word().IsForwardingAddress();
}
static bool IsUnmodifiedHeapObject(Object** p) {
Object* object = *p;
if (object->IsSmi()) return false;
HeapObject* heap_object = HeapObject::cast(object);
if (!object->IsJSObject()) return false;
JSObject* js_object = JSObject::cast(object);
if (!js_object->WasConstructedFromApiFunction()) return false;
JSFunction* constructor =
JSFunction::cast(js_object->map()->GetConstructor());
return constructor->initial_map() == heap_object->map();
}
void PromotionQueue::Initialize() {
// The last to-space page may be used for promotion queue. On promotion
// conflict, we use the emergency stack.
......@@ -1691,8 +1676,10 @@ void Heap::Scavenge() {
isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending(
&IsUnscavengedHeapObject);
isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots(
&scavenge_visitor);
isolate()
->global_handles()
->IterateNewSpaceWeakUnmodifiedRoots<
GlobalHandles::HANDLE_PHANTOM_NODES_VISIT_OTHERS>(&scavenge_visitor);
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
UpdateNewSpaceReferencesInExternalStringTable(
......@@ -2912,6 +2899,18 @@ bool Heap::RootCanBeTreatedAsConstant(RootListIndex root_index) {
!InNewSpace(root(root_index));
}
bool Heap::IsUnmodifiedHeapObject(Object** p) {
Object* object = *p;
if (object->IsSmi()) return false;
HeapObject* heap_object = HeapObject::cast(object);
if (!object->IsJSObject()) return false;
JSObject* js_object = JSObject::cast(object);
if (!js_object->WasConstructedFromApiFunction()) return false;
JSFunction* constructor =
JSFunction::cast(js_object->map()->GetConstructor());
return constructor->initial_map() == heap_object->map();
}
int Heap::FullSizeNumberStringCacheLength() {
// Compute the size of the number string cache based on the max newspace size.
......
......@@ -669,6 +669,8 @@ class Heap {
// they are in new space.
static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
static bool IsUnmodifiedHeapObject(Object** p);
// Zapping is needed for verify heap, and always done in debug builds.
static inline bool ShouldZapGarbage() {
#ifdef DEBUG
......@@ -782,6 +784,9 @@ class Heap {
Object* encountered_weak_collections() const {
return encountered_weak_collections_;
}
void VisitEncounteredWeakCollections(ObjectVisitor* visitor) {
visitor->VisitPointer(&encountered_weak_collections_);
}
void set_encountered_weak_cells(Object* weak_cell) {
encountered_weak_cells_ = weak_cell;
......
This diff is collapsed.
......@@ -17,6 +17,8 @@
namespace v8 {
namespace internal {
enum class MarkCompactMode { FULL, YOUNG_GENERATION };
// Callback function, returns whether an object is alive. The heap size
// of the object is returned in size. It optionally updates the offset
// to the first live object in the page (only used for old and map objects).
......@@ -29,6 +31,7 @@ typedef void (*MarkObjectFunction)(Heap* heap, HeapObject* object);
class CodeFlusher;
class MarkCompactCollector;
class MarkingVisitor;
template <MarkCompactMode mode>
class RootMarkingVisitor;
class ObjectMarking : public AllStatic {
......@@ -416,6 +419,9 @@ class MarkCompactCollector {
static void Initialize();
static SlotCallbackResult CheckAndMarkObject(Heap* heap,
Address slot_address);
void SetUp();
void TearDown();
......@@ -435,12 +441,6 @@ class MarkCompactCollector {
void AbortCompaction();
#ifdef DEBUG
// Checks whether performing mark-compact collection.
bool in_use() { return state_ > PREPARE_GC; }
bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; }
#endif
// Determine type of object and emit deletion log event.
static void ReportDeleteIfNeeded(HeapObject* obj, Isolate* isolate);
......@@ -458,15 +458,6 @@ class MarkCompactCollector {
CodeFlusher* code_flusher() { return code_flusher_; }
inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; }
#ifdef VERIFY_HEAP
void VerifyValidStoreAndSlotsBufferEntries();
void VerifyMarkbitsAreClean();
static void VerifyMarkbitsAreClean(PagedSpace* space);
static void VerifyMarkbitsAreClean(NewSpace* space);
void VerifyWeakEmbeddedObjectsInCode();
void VerifyOmittedMapChecks();
#endif
INLINE(static bool ShouldSkipEvacuationSlotRecording(Object* host)) {
return Page::FromAddress(reinterpret_cast<Address>(host))
->ShouldSkipEvacuationSlotRecording();
......@@ -525,6 +516,21 @@ class MarkCompactCollector {
Sweeper& sweeper() { return sweeper_; }
#ifdef DEBUG
// Checks whether performing mark-compact collection.
bool in_use() { return state_ > PREPARE_GC; }
bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; }
#endif
#ifdef VERIFY_HEAP
void VerifyValidStoreAndSlotsBufferEntries();
void VerifyMarkbitsAreClean();
static void VerifyMarkbitsAreClean(PagedSpace* space);
static void VerifyMarkbitsAreClean(NewSpace* space);
void VerifyWeakEmbeddedObjectsInCode();
void VerifyOmittedMapChecks();
#endif
private:
template <PageEvacuationMode mode>
class EvacuateNewSpacePageVisitor;
......@@ -564,8 +570,10 @@ class MarkCompactCollector {
friend class MarkCompactMarkingVisitor;
friend class MarkingVisitor;
friend class RecordMigratedSlotVisitor;
template <MarkCompactMode mode>
friend class RootMarkingVisitor;
friend class SharedFunctionInfoMarkingVisitor;
friend class StaticYoungGenerationMarkingVisitor;
// Mark code objects that are active on the stack to prevent them
// from being flushed.
......@@ -575,6 +583,8 @@ class MarkCompactCollector {
// Marking operations for objects reachable from roots.
void MarkLiveObjects();
// Mark the young generation.
void MarkLiveObjectsInYoungGeneration();
// Pushes a black object onto the marking stack and accounts for live bytes.
// Note that this assumes live bytes have not yet been counted.
......@@ -593,14 +603,15 @@ class MarkCompactCollector {
INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit));
// Mark the heap roots and all objects reachable from them.
void MarkRoots(RootMarkingVisitor* visitor);
void MarkRoots(RootMarkingVisitor<MarkCompactMode::FULL>* visitor);
// Mark the string table specially. References to internalized strings from
// the string table are weak.
void MarkStringTable(RootMarkingVisitor* visitor);
void MarkStringTable(RootMarkingVisitor<MarkCompactMode::FULL>* visitor);
// Mark objects reachable (transitively) from objects in the marking stack
// or overflowed in the heap.
template <MarkCompactMode mode>
void ProcessMarkingDeque();
// Mark objects reachable (transitively) from objects in the marking stack
......@@ -624,11 +635,13 @@ class MarkCompactCollector {
// stack. This function empties the marking stack, but may leave
// overflowed objects in the heap, in which case the marking stack's
// overflow flag will be set.
template <MarkCompactMode mode>
void EmptyMarkingDeque();
// Refill the marking stack with overflowed objects from the heap. This
// function either leaves the marking stack full or clears the overflow
// flag on the marking stack.
template <MarkCompactMode mode>
void RefillMarkingDeque();
// Helper methods for refilling the marking stack by discovering grey objects
......
......@@ -142,6 +142,7 @@ void SimulateFullSpace(v8::internal::NewSpace* space,
}
void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
CHECK(FLAG_incremental_marking);
i::IncrementalMarking* marking = heap->incremental_marking();
i::MarkCompactCollector* collector = heap->mark_compact_collector();
if (collector->sweeping_in_progress()) {
......
......@@ -188,6 +188,7 @@ TEST(ArrayBuffer_UnregisterDuringSweep) {
}
TEST(ArrayBuffer_NonLivePromotion) {
if (!FLAG_incremental_marking) return;
// The test verifies that the marking state is preserved when promoting
// a buffer to old space.
CcTest::InitializeVM();
......@@ -223,6 +224,7 @@ TEST(ArrayBuffer_NonLivePromotion) {
}
TEST(ArrayBuffer_LivePromotion) {
if (!FLAG_incremental_marking) return;
// The test verifies that the marking state is preserved when promoting
// a buffer to old space.
CcTest::InitializeVM();
......@@ -257,6 +259,7 @@ TEST(ArrayBuffer_LivePromotion) {
}
TEST(ArrayBuffer_SemiSpaceCopyThenPagePromotion) {
if (!i::FLAG_incremental_marking) return;
// The test verifies that the marking state is preserved across semispace
// copy.
CcTest::InitializeVM();
......
......@@ -1362,6 +1362,7 @@ TEST(TestCodeFlushingPreAged) {
TEST(TestCodeFlushingIncremental) {
if (!i::FLAG_incremental_marking) return;
// If we do not flush code this test is invalid.
if (!FLAG_flush_code) return;
i::FLAG_allow_natives_syntax = true;
......@@ -1434,6 +1435,7 @@ TEST(TestCodeFlushingIncremental) {
TEST(TestCodeFlushingIncrementalScavenge) {
if (!FLAG_incremental_marking) return;
// If we do not flush code this test is invalid.
if (!FLAG_flush_code) return;
i::FLAG_allow_natives_syntax = true;
......@@ -1505,6 +1507,7 @@ TEST(TestCodeFlushingIncrementalScavenge) {
TEST(TestCodeFlushingIncrementalAbort) {
if (!i::FLAG_incremental_marking) return;
// If we do not flush code this test is invalid.
if (!FLAG_flush_code) return;
i::FLAG_allow_natives_syntax = true;
......@@ -1571,6 +1574,7 @@ TEST(TestCodeFlushingIncrementalAbort) {
}
TEST(TestUseOfIncrementalBarrierOnCompileLazy) {
if (!i::FLAG_incremental_marking) return;
// Turn off always_opt because it interferes with running the built-in for
// the last call to g().
i::FLAG_always_opt = false;
......@@ -2605,6 +2609,7 @@ TEST(LeakNativeContextViaMapProto) {
TEST(InstanceOfStubWriteBarrier) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_allow_natives_syntax = true;
#ifdef VERIFY_HEAP
i::FLAG_verify_heap = true;
......@@ -2671,6 +2676,7 @@ int GetProfilerTicks(SharedFunctionInfo* shared) {
} // namespace
TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_stress_compaction = false;
i::FLAG_allow_natives_syntax = true;
#ifdef VERIFY_HEAP
......@@ -2762,6 +2768,7 @@ TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
HEAP_TEST(GCFlags) {
if (!i::FLAG_incremental_marking) return;
CcTest::InitializeVM();
Heap* heap = CcTest::heap();
......@@ -2794,6 +2801,7 @@ HEAP_TEST(GCFlags) {
TEST(IdleNotificationFinishMarking) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
const int initial_gc_count = CcTest::heap()->gc_count();
......@@ -3293,6 +3301,7 @@ static int CountMapTransitions(Map* map) {
// Test that map transitions are cleared and maps are collected with
// incremental marking as well.
TEST(Regress1465) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_stress_compaction = false;
i::FLAG_allow_natives_syntax = true;
i::FLAG_trace_incremental_marking = true;
......@@ -3668,6 +3677,7 @@ TEST(PrintSharedFunctionInfo) {
TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
if (!FLAG_incremental_marking) return;
if (i::FLAG_always_opt) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -3743,6 +3753,7 @@ static void CheckVectorIC(Handle<JSFunction> f, int slot_index,
}
TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
if (!i::FLAG_incremental_marking) return;
if (i::FLAG_always_opt) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -3766,6 +3777,7 @@ TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
}
TEST(IncrementalMarkingPreservesMonomorphicIC) {
if (!i::FLAG_incremental_marking) return;
if (i::FLAG_always_opt) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -3787,6 +3799,7 @@ TEST(IncrementalMarkingPreservesMonomorphicIC) {
}
TEST(IncrementalMarkingPreservesPolymorphicIC) {
if (!i::FLAG_incremental_marking) return;
if (i::FLAG_always_opt) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -3824,6 +3837,7 @@ TEST(IncrementalMarkingPreservesPolymorphicIC) {
}
TEST(ContextDisposeDoesntClearPolymorphicIC) {
if (!i::FLAG_incremental_marking) return;
if (i::FLAG_always_opt) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -3979,6 +3993,7 @@ UNINITIALIZED_TEST(ReleaseStackTraceData) {
TEST(Regress159140) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
......@@ -4039,6 +4054,7 @@ TEST(Regress159140) {
TEST(Regress165495) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
......@@ -4085,6 +4101,7 @@ TEST(Regress165495) {
TEST(Regress169209) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_always_opt = false;
i::FLAG_stress_compaction = false;
i::FLAG_allow_natives_syntax = true;
......@@ -4301,6 +4318,7 @@ TEST(Regress513507) {
TEST(Regress514122) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_flush_optimized_code_cache = false;
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
......@@ -4562,6 +4580,7 @@ TEST(Regress513496) {
TEST(LargeObjectSlotRecording) {
if (!i::FLAG_incremental_marking) return;
FLAG_manual_evacuation_candidates_selection = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
......@@ -4634,6 +4653,7 @@ TEST(DeferredHandles) {
TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
if (!FLAG_incremental_marking) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
CompileRun("function f(n) {"
......@@ -5445,6 +5465,7 @@ TEST(WeakCell) {
TEST(WeakCellsWithIncrementalMarking) {
if (!FLAG_incremental_marking) return;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
v8::internal::Heap* heap = CcTest::heap();
......@@ -5716,6 +5737,7 @@ UNINITIALIZED_TEST(PromotionQueue) {
TEST(Regress388880) {
if (!FLAG_incremental_marking) return;
i::FLAG_expose_gc = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -5763,6 +5785,7 @@ TEST(Regress388880) {
TEST(Regress3631) {
if (!FLAG_incremental_marking) return;
i::FLAG_expose_gc = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -5896,6 +5919,7 @@ void CheckMapRetainingFor(int n) {
TEST(MapRetaining) {
if (!FLAG_incremental_marking) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
......@@ -6395,6 +6419,7 @@ static UniqueId MakeUniqueId(const Persistent<T>& p) {
TEST(Regress519319) {
if (!FLAG_incremental_marking) return;
CcTest::InitializeVM();
v8::Isolate* isolate = CcTest::isolate();
v8::HandleScope scope(isolate);
......@@ -6463,6 +6488,7 @@ HEAP_TEST(Regress587004) {
}
HEAP_TEST(Regress589413) {
if (!FLAG_incremental_marking) return;
FLAG_stress_compaction = true;
FLAG_manual_evacuation_candidates_selection = true;
FLAG_parallel_compaction = false;
......@@ -6538,6 +6564,7 @@ HEAP_TEST(Regress589413) {
}
TEST(Regress598319) {
if (!FLAG_incremental_marking) return;
// This test ensures that no white objects can cross the progress bar of large
// objects during incremental marking. It checks this by using Shift() during
// incremental marking.
......@@ -6654,6 +6681,7 @@ TEST(Regress609761) {
}
TEST(Regress615489) {
if (!i::FLAG_incremental_marking) return;
FLAG_black_allocation = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -6708,6 +6736,7 @@ class StaticOneByteResource : public v8::String::ExternalOneByteStringResource {
};
TEST(Regress631969) {
if (!FLAG_incremental_marking) return;
FLAG_manual_evacuation_candidates_selection = true;
FLAG_parallel_compaction = false;
FLAG_concurrent_sweeping = false;
......@@ -6753,6 +6782,7 @@ TEST(Regress631969) {
}
TEST(LeftTrimFixedArrayInBlackArea) {
if (!i::FLAG_incremental_marking) return;
FLAG_black_allocation = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -6791,6 +6821,7 @@ TEST(LeftTrimFixedArrayInBlackArea) {
}
TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
if (!i::FLAG_incremental_marking) return;
FLAG_black_allocation = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -6856,6 +6887,7 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
}
TEST(ContinuousRightTrimFixedArrayInBlackArea) {
if (!i::FLAG_incremental_marking) return;
FLAG_black_allocation = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -6914,6 +6946,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
}
TEST(Regress618958) {
if (!i::FLAG_incremental_marking) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Heap* heap = CcTest::heap();
......
......@@ -42,6 +42,7 @@ namespace v8 {
namespace internal {
UNINITIALIZED_TEST(PagePromotion_NewToOld) {
if (!i::FLAG_incremental_marking) return;
v8::Isolate* isolate = NewIsolateForPagePromotion();
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
{
......
......@@ -1821,6 +1821,7 @@ TEST(CodeSerializerEagerCompilationAndPreAge) {
}
TEST(Regress503552) {
if (!FLAG_incremental_marking) return;
// Test that the code serializer can deal with weak cells that form a linked
// list during incremental marking.
CcTest::InitializeVM();
......
......@@ -249,6 +249,7 @@ TEST(Regress2060b) {
TEST(Regress399527) {
if (!FLAG_incremental_marking) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Isolate* isolate = CcTest::i_isolate();
......
......@@ -89,6 +89,7 @@ TEST_F(GCIdleTimeHandlerTest, DontDoFinalIncrementalMarkCompact) {
TEST_F(GCIdleTimeHandlerTest, ContextDisposeLowRate) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.contexts_disposed = 1;
heap_state.incremental_marking_stopped = true;
......@@ -99,6 +100,7 @@ TEST_F(GCIdleTimeHandlerTest, ContextDisposeLowRate) {
TEST_F(GCIdleTimeHandlerTest, ContextDisposeHighRate) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.contexts_disposed = 1;
heap_state.contexts_disposal_rate =
......@@ -111,6 +113,7 @@ TEST_F(GCIdleTimeHandlerTest, ContextDisposeHighRate) {
TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeZeroIdleTime) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.contexts_disposed = 1;
heap_state.contexts_disposal_rate = 1.0;
......@@ -122,6 +125,7 @@ TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeZeroIdleTime) {
TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeSmallIdleTime1) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.contexts_disposed = 1;
heap_state.contexts_disposal_rate =
......@@ -134,6 +138,7 @@ TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeSmallIdleTime1) {
TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeSmallIdleTime2) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.contexts_disposed = 1;
heap_state.contexts_disposal_rate =
......@@ -146,6 +151,7 @@ TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeSmallIdleTime2) {
TEST_F(GCIdleTimeHandlerTest, IncrementalMarking1) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
double idle_time_ms = 10;
GCIdleTimeAction action = handler()->Compute(idle_time_ms, heap_state);
......@@ -154,6 +160,7 @@ TEST_F(GCIdleTimeHandlerTest, IncrementalMarking1) {
TEST_F(GCIdleTimeHandlerTest, NotEnoughTime) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.incremental_marking_stopped = true;
size_t speed = kMarkCompactSpeed;
......@@ -164,6 +171,7 @@ TEST_F(GCIdleTimeHandlerTest, NotEnoughTime) {
TEST_F(GCIdleTimeHandlerTest, DoNotStartIncrementalMarking) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.incremental_marking_stopped = true;
double idle_time_ms = 10.0;
......@@ -173,6 +181,7 @@ TEST_F(GCIdleTimeHandlerTest, DoNotStartIncrementalMarking) {
TEST_F(GCIdleTimeHandlerTest, ContinueAfterStop) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.incremental_marking_stopped = true;
double idle_time_ms = 10.0;
......@@ -185,6 +194,7 @@ TEST_F(GCIdleTimeHandlerTest, ContinueAfterStop) {
TEST_F(GCIdleTimeHandlerTest, ZeroIdleTimeNothingToDo) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
for (int i = 0; i < kMaxNotifications; i++) {
GCIdleTimeAction action = handler()->Compute(0, heap_state);
......@@ -194,6 +204,7 @@ TEST_F(GCIdleTimeHandlerTest, ZeroIdleTimeNothingToDo) {
TEST_F(GCIdleTimeHandlerTest, SmallIdleTimeNothingToDo) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.incremental_marking_stopped = true;
for (int i = 0; i < kMaxNotifications; i++) {
......@@ -204,6 +215,8 @@ TEST_F(GCIdleTimeHandlerTest, SmallIdleTimeNothingToDo) {
TEST_F(GCIdleTimeHandlerTest, DoneIfNotMakingProgressOnIncrementalMarking) {
if (!handler()->Enabled()) return;
// Regression test for crbug.com/489323.
GCIdleTimeHeapState heap_state = DefaultHeapState();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment