Commit 7e5755cb authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Minor MC: Add marking

Adds the marking logic to mark the young generation.

BUG=chromium:651354

Review-Url: https://codereview.chromium.org/2498583002
Cr-Commit-Position: refs/heads/master@{#41104}
parent 46a4156a
......@@ -719,7 +719,7 @@ void GlobalHandles::MarkNewSpaceWeakUnmodifiedObjectsPending(
}
}
template <GlobalHandles::IterationMode mode>
void GlobalHandles::IterateNewSpaceWeakUnmodifiedRoots(ObjectVisitor* v) {
for (int i = 0; i < new_space_nodes_.length(); ++i) {
Node* node = new_space_nodes_[i];
......@@ -728,18 +728,35 @@ void GlobalHandles::IterateNewSpaceWeakUnmodifiedRoots(ObjectVisitor* v) {
node->IsWeakRetainer()) {
// Pending weak phantom handles die immediately. Everything else survives.
if (node->IsPendingPhantomResetHandle()) {
if (mode == IterationMode::HANDLE_PHANTOM_NODES ||
mode == IterationMode::HANDLE_PHANTOM_NODES_VISIT_OTHERS) {
node->ResetPhantomHandle();
++number_of_phantom_handle_resets_;
}
} else if (node->IsPendingPhantomCallback()) {
if (mode == IterationMode::HANDLE_PHANTOM_NODES ||
mode == IterationMode::HANDLE_PHANTOM_NODES_VISIT_OTHERS) {
node->CollectPhantomCallbackData(isolate(),
&pending_phantom_callbacks_);
}
} else {
if (mode == IterationMode::VISIT_OTHERS ||
mode == IterationMode::HANDLE_PHANTOM_NODES_VISIT_OTHERS) {
v->VisitPointer(node->location());
}
}
}
}
}
template void GlobalHandles::IterateNewSpaceWeakUnmodifiedRoots<
GlobalHandles::HANDLE_PHANTOM_NODES>(ObjectVisitor* v);
template void GlobalHandles::IterateNewSpaceWeakUnmodifiedRoots<
GlobalHandles::VISIT_OTHERS>(ObjectVisitor* v);
template void GlobalHandles::IterateNewSpaceWeakUnmodifiedRoots<
GlobalHandles::HANDLE_PHANTOM_NODES_VISIT_OTHERS>(ObjectVisitor* v);
DISABLE_CFI_PERF
bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v,
......
......@@ -113,6 +113,12 @@ enum WeaknessType {
class GlobalHandles {
public:
enum IterationMode {
HANDLE_PHANTOM_NODES_VISIT_OTHERS,
VISIT_OTHERS,
HANDLE_PHANTOM_NODES
};
~GlobalHandles();
// Creates a new global handle that is alive until Destroy is called.
......@@ -227,6 +233,7 @@ class GlobalHandles {
// Iterates over weak independent or unmodified handles.
// See the note above.
template <IterationMode mode>
void IterateNewSpaceWeakUnmodifiedRoots(ObjectVisitor* v);
// Identify unmodified objects that are in weak state and marks them
......@@ -290,7 +297,7 @@ class GlobalHandles {
#ifdef DEBUG
void PrintStats();
void Print();
#endif
#endif // DEBUG
private:
explicit GlobalHandles(Isolate* isolate);
......
......@@ -146,6 +146,7 @@ GCIdleTimeAction GCIdleTimeHandler::Compute(double idle_time_in_ms,
return GCIdleTimeAction::IncrementalStep();
}
bool GCIdleTimeHandler::Enabled() { return FLAG_incremental_marking; }
} // namespace internal
} // namespace v8
......@@ -125,6 +125,8 @@ class V8_EXPORT_PRIVATE GCIdleTimeHandler {
GCIdleTimeAction Compute(double idle_time_in_ms,
GCIdleTimeHeapState heap_state);
bool Enabled();
void ResetNoProgressCounter() { idle_times_which_made_no_progress_ = 0; }
static size_t EstimateMarkingStepSize(double idle_time_in_ms,
......
......@@ -509,9 +509,14 @@ void GCTracer::PrintNVP() const {
"pause=%.1f "
"mutator=%.1f "
"gc=%s "
"reduce_memory=%d\n",
duration, spent_in_mutator, current_.TypeName(true),
current_.reduce_memory);
"reduce_memory=%d "
"mark=%.2f "
"mark.roots=%.2f "
"mark.old_to_new=%.2f\n",
duration, spent_in_mutator, "mmc", current_.reduce_memory,
current_.scopes[Scope::MINOR_MC_MARK],
current_.scopes[Scope::MINOR_MC_MARK_ROOTS],
current_.scopes[Scope::MINOR_MC_MARK_OLD_TO_NEW_POINTERS]);
break;
case Event::MARK_COMPACTOR:
case Event::INCREMENTAL_MARK_COMPACTOR:
......
......@@ -79,6 +79,12 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects };
F(MC_SWEEP_CODE) \
F(MC_SWEEP_MAP) \
F(MC_SWEEP_OLD) \
F(MINOR_MC_MARK) \
F(MINOR_MC_MARK_CODE_FLUSH_CANDIDATES) \
F(MINOR_MC_MARK_GLOBAL_HANDLES) \
F(MINOR_MC_MARK_OLD_TO_NEW_POINTERS) \
F(MINOR_MC_MARK_ROOTS) \
F(MINOR_MC_MARK_WEAK) \
F(SCAVENGER_CODE_FLUSH_CANDIDATES) \
F(SCAVENGER_OLD_TO_NEW_POINTERS) \
F(SCAVENGER_ROOTS) \
......
......@@ -1502,21 +1502,6 @@ static bool IsUnscavengedHeapObject(Heap* heap, Object** p) {
!HeapObject::cast(*p)->map_word().IsForwardingAddress();
}
static bool IsUnmodifiedHeapObject(Object** p) {
Object* object = *p;
if (object->IsSmi()) return false;
HeapObject* heap_object = HeapObject::cast(object);
if (!object->IsJSObject()) return false;
JSObject* js_object = JSObject::cast(object);
if (!js_object->WasConstructedFromApiFunction()) return false;
JSFunction* constructor =
JSFunction::cast(js_object->map()->GetConstructor());
return constructor->initial_map() == heap_object->map();
}
void PromotionQueue::Initialize() {
// The last to-space page may be used for promotion queue. On promotion
// conflict, we use the emergency stack.
......@@ -1691,8 +1676,10 @@ void Heap::Scavenge() {
isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending(
&IsUnscavengedHeapObject);
isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots(
&scavenge_visitor);
isolate()
->global_handles()
->IterateNewSpaceWeakUnmodifiedRoots<
GlobalHandles::HANDLE_PHANTOM_NODES_VISIT_OTHERS>(&scavenge_visitor);
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
UpdateNewSpaceReferencesInExternalStringTable(
......@@ -2912,6 +2899,18 @@ bool Heap::RootCanBeTreatedAsConstant(RootListIndex root_index) {
!InNewSpace(root(root_index));
}
bool Heap::IsUnmodifiedHeapObject(Object** p) {
Object* object = *p;
if (object->IsSmi()) return false;
HeapObject* heap_object = HeapObject::cast(object);
if (!object->IsJSObject()) return false;
JSObject* js_object = JSObject::cast(object);
if (!js_object->WasConstructedFromApiFunction()) return false;
JSFunction* constructor =
JSFunction::cast(js_object->map()->GetConstructor());
return constructor->initial_map() == heap_object->map();
}
int Heap::FullSizeNumberStringCacheLength() {
// Compute the size of the number string cache based on the max newspace size.
......
......@@ -669,6 +669,8 @@ class Heap {
// they are in new space.
static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
static bool IsUnmodifiedHeapObject(Object** p);
// Zapping is needed for verify heap, and always done in debug builds.
static inline bool ShouldZapGarbage() {
#ifdef DEBUG
......@@ -782,6 +784,9 @@ class Heap {
Object* encountered_weak_collections() const {
return encountered_weak_collections_;
}
void VisitEncounteredWeakCollections(ObjectVisitor* visitor) {
visitor->VisitPointer(&encountered_weak_collections_);
}
void set_encountered_weak_cells(Object* weak_cell) {
encountered_weak_cells_ = weak_cell;
......
......@@ -322,7 +322,6 @@ void MarkCompactCollector::CollectGarbage() {
Finish();
}
#ifdef VERIFY_HEAP
void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
for (Page* p : *space) {
......@@ -354,7 +353,6 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() {
}
}
void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() {
HeapObjectIterator code_iterator(heap()->code_space());
for (HeapObject* obj = code_iterator.Next(); obj != NULL;
......@@ -1083,6 +1081,39 @@ void CodeFlusher::IteratePointersToFromSpace(ObjectVisitor* v) {
}
}
class StaticYoungGenerationMarkingVisitor
: public StaticNewSpaceVisitor<StaticYoungGenerationMarkingVisitor> {
public:
static void Initialize(Heap* heap) {
StaticNewSpaceVisitor<StaticYoungGenerationMarkingVisitor>::Initialize();
}
inline static void VisitPointer(Heap* heap, HeapObject* object, Object** p) {
Object* target = *p;
if (heap->InNewSpace(target)) {
if (MarkRecursively(heap, HeapObject::cast(target))) return;
PushOnMarkingDeque(heap, target);
}
}
protected:
inline static void PushOnMarkingDeque(Heap* heap, Object* obj) {
HeapObject* object = HeapObject::cast(obj);
MarkBit mark_bit = ObjectMarking::MarkBitFrom(object);
heap->mark_compact_collector()->MarkObject(object, mark_bit);
}
inline static bool MarkRecursively(Heap* heap, HeapObject* object) {
StackLimitCheck check(heap->isolate());
if (check.HasOverflowed()) return false;
MarkBit mark = ObjectMarking::MarkBitFrom(object);
if (Marking::IsBlackOrGrey(mark)) return true;
heap->mark_compact_collector()->SetMark(object, mark);
IterateBody(object->map(), object);
return true;
}
};
class MarkCompactMarkingVisitor
: public StaticMarkingVisitor<MarkCompactMarkingVisitor> {
......@@ -1336,11 +1367,12 @@ void MarkCompactCollector::PrepareForCodeFlushing() {
heap()->isolate()->compilation_cache()->IterateFunctions(&visitor);
heap()->isolate()->handle_scope_implementer()->Iterate(&visitor);
ProcessMarkingDeque();
ProcessMarkingDeque<MarkCompactMode::FULL>();
}
// Visitor class for marking heap roots.
template <MarkCompactMode mode>
class RootMarkingVisitor : public ObjectVisitor {
public:
explicit RootMarkingVisitor(Heap* heap)
......@@ -1362,6 +1394,10 @@ class RootMarkingVisitor : public ObjectVisitor {
HeapObject* object = HeapObject::cast(*p);
if (mode == MarkCompactMode::YOUNG_GENERATION &&
!collector_->heap()->InNewSpace(object))
return;
MarkBit mark_bit = ObjectMarking::MarkBitFrom(object);
if (Marking::IsBlackOrGrey(mark_bit)) return;
......@@ -1369,14 +1405,21 @@ class RootMarkingVisitor : public ObjectVisitor {
// Mark the object.
collector_->SetMark(object, mark_bit);
switch (mode) {
case MarkCompactMode::FULL: {
// Mark the map pointer and body, and push them on the marking stack.
MarkBit map_mark = ObjectMarking::MarkBitFrom(map);
collector_->MarkObject(map, map_mark);
MarkCompactMarkingVisitor::IterateBody(map, object);
} break;
case MarkCompactMode::YOUNG_GENERATION:
StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
break;
}
// Mark all the objects reachable from the map and body. May leave
// overflowed objects in the heap.
collector_->EmptyMarkingDeque();
collector_->EmptyMarkingDeque<mode>();
}
MarkCompactCollector* collector_;
......@@ -1946,8 +1989,8 @@ bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap,
return Marking::IsWhite(mark);
}
void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) {
void MarkCompactCollector::MarkStringTable(
RootMarkingVisitor<MarkCompactMode::FULL>* visitor) {
StringTable* string_table = heap()->string_table();
// Mark the string table itself.
MarkBit string_table_mark = ObjectMarking::MarkBitFrom(string_table);
......@@ -1957,7 +2000,7 @@ void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) {
}
// Explicitly mark the prefix.
string_table->IteratePrefix(visitor);
ProcessMarkingDeque();
ProcessMarkingDeque<MarkCompactMode::FULL>();
}
......@@ -1966,8 +2009,8 @@ void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) {
SetMark(site, mark_bit);
}
void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
void MarkCompactCollector::MarkRoots(
RootMarkingVisitor<MarkCompactMode::FULL>* visitor) {
// Mark the heap roots including global variables, stack variables,
// etc., and all objects reachable from them.
heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
......@@ -1977,8 +2020,8 @@ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
// There may be overflowed objects in the heap. Visit them now.
while (marking_deque()->overflowed()) {
RefillMarkingDeque();
EmptyMarkingDeque();
RefillMarkingDeque<MarkCompactMode::FULL>();
EmptyMarkingDeque<MarkCompactMode::FULL>();
}
}
......@@ -2018,6 +2061,7 @@ void MarkCompactCollector::MarkImplicitRefGroups(
// Before: the marking stack contains zero or more heap object pointers.
// After: the marking stack is empty, and all objects reachable from the
// marking stack have been marked, or are overflowed in the heap.
template <MarkCompactMode mode>
void MarkCompactCollector::EmptyMarkingDeque() {
while (!marking_deque()->IsEmpty()) {
HeapObject* object = marking_deque()->Pop();
......@@ -2028,10 +2072,17 @@ void MarkCompactCollector::EmptyMarkingDeque() {
DCHECK(!Marking::IsWhite(ObjectMarking::MarkBitFrom(object)));
Map* map = object->map();
switch (mode) {
case MarkCompactMode::FULL: {
MarkBit map_mark = ObjectMarking::MarkBitFrom(map);
MarkObject(map, map_mark);
MarkCompactMarkingVisitor::IterateBody(map, object);
} break;
case MarkCompactMode::YOUNG_GENERATION: {
DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
} break;
}
}
}
......@@ -2041,6 +2092,7 @@ void MarkCompactCollector::EmptyMarkingDeque() {
// before sweeping completes. If sweeping completes, there are no remaining
// overflowed objects in the heap so the overflow flag on the markings stack
// is cleared.
template <MarkCompactMode mode>
void MarkCompactCollector::RefillMarkingDeque() {
isolate()->CountUsage(v8::Isolate::UseCounterFeature::kMarkDequeOverflow);
DCHECK(marking_deque()->overflowed());
......@@ -2048,18 +2100,17 @@ void MarkCompactCollector::RefillMarkingDeque() {
DiscoverGreyObjectsInNewSpace();
if (marking_deque()->IsFull()) return;
if (mode == MarkCompactMode::FULL) {
DiscoverGreyObjectsInSpace(heap()->old_space());
if (marking_deque()->IsFull()) return;
DiscoverGreyObjectsInSpace(heap()->code_space());
if (marking_deque()->IsFull()) return;
DiscoverGreyObjectsInSpace(heap()->map_space());
if (marking_deque()->IsFull()) return;
LargeObjectIterator lo_it(heap()->lo_space());
DiscoverGreyObjectsWithIterator(&lo_it);
if (marking_deque()->IsFull()) return;
}
marking_deque()->ClearOverflowed();
}
......@@ -2069,12 +2120,14 @@ void MarkCompactCollector::RefillMarkingDeque() {
// stack. Before: the marking stack contains zero or more heap object
// pointers. After: the marking stack is empty and there are no overflowed
// objects in the heap.
template <MarkCompactMode mode>
void MarkCompactCollector::ProcessMarkingDeque() {
EmptyMarkingDeque();
EmptyMarkingDeque<mode>();
while (marking_deque()->overflowed()) {
RefillMarkingDeque();
EmptyMarkingDeque();
RefillMarkingDeque<mode>();
EmptyMarkingDeque<mode>();
}
DCHECK(marking_deque()->IsEmpty());
}
// Mark all objects reachable (transitively) from objects on the marking
......@@ -2099,7 +2152,7 @@ void MarkCompactCollector::ProcessEphemeralMarking(
}
ProcessWeakCollections();
work_to_do = !marking_deque()->IsEmpty();
ProcessMarkingDeque();
ProcessMarkingDeque<MarkCompactMode::FULL>();
}
}
......@@ -2114,7 +2167,7 @@ void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) {
if (!code->CanDeoptAt(it.frame()->pc())) {
Code::BodyDescriptor::IterateBody(code, visitor);
}
ProcessMarkingDeque();
ProcessMarkingDeque<MarkCompactMode::FULL>();
return;
}
}
......@@ -2154,6 +2207,7 @@ void MarkingDeque::StartUsing() {
void MarkingDeque::StopUsing() {
base::LockGuard<base::Mutex> guard(&mutex_);
if (!in_use_) return;
DCHECK(IsEmpty());
DCHECK(!overflowed_);
top_ = bottom_ = mask_ = 0;
......@@ -2267,6 +2321,95 @@ void MarkCompactCollector::RecordObjectStats() {
}
}
SlotCallbackResult MarkCompactCollector::CheckAndMarkObject(
Heap* heap, Address slot_address) {
Object* object = *reinterpret_cast<Object**>(slot_address);
if (heap->InNewSpace(object)) {
// Marking happens before flipping the young generation, so the object
// has to be in ToSpace.
DCHECK(heap->InToSpace(object));
HeapObject* heap_object = reinterpret_cast<HeapObject*>(object);
MarkBit mark_bit = ObjectMarking::MarkBitFrom(heap_object);
if (Marking::IsBlackOrGrey(mark_bit)) {
return KEEP_SLOT;
}
heap->mark_compact_collector()->SetMark(heap_object, mark_bit);
StaticYoungGenerationMarkingVisitor::IterateBody(heap_object->map(),
heap_object);
return KEEP_SLOT;
}
return REMOVE_SLOT;
}
static bool IsUnmarkedObject(Heap* heap, Object** p) {
DCHECK_IMPLIES(heap->InNewSpace(*p), heap->InToSpace(*p));
return heap->InNewSpace(*p) &&
!Marking::IsBlack(ObjectMarking::MarkBitFrom(HeapObject::cast(*p)));
}
void MarkCompactCollector::MarkLiveObjectsInYoungGeneration() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK);
PostponeInterruptsScope postpone(isolate());
StaticYoungGenerationMarkingVisitor::Initialize(heap());
RootMarkingVisitor<MarkCompactMode::YOUNG_GENERATION> root_visitor(heap());
marking_deque()->StartUsing();
isolate()->global_handles()->IdentifyWeakUnmodifiedObjects(
&Heap::IsUnmodifiedHeapObject);
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_ROOTS);
heap()->IterateRoots(&root_visitor, VISIT_ALL_IN_SCAVENGE);
ProcessMarkingDeque<MarkCompactMode::YOUNG_GENERATION>();
}
{
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MINOR_MC_MARK_OLD_TO_NEW_POINTERS);
RememberedSet<OLD_TO_NEW>::Iterate(heap(), [this](Address addr) {
return CheckAndMarkObject(heap(), addr);
});
RememberedSet<OLD_TO_NEW>::IterateTyped(
heap(), [this](SlotType type, Address host_addr, Address addr) {
return UpdateTypedSlotHelper::UpdateTypedSlot(
isolate(), type, addr, [this](Object** addr) {
return CheckAndMarkObject(heap(),
reinterpret_cast<Address>(addr));
});
});
ProcessMarkingDeque<MarkCompactMode::YOUNG_GENERATION>();
}
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_WEAK);
heap()->VisitEncounteredWeakCollections(&root_visitor);
ProcessMarkingDeque<MarkCompactMode::YOUNG_GENERATION>();
}
if (is_code_flushing_enabled()) {
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MINOR_MC_MARK_CODE_FLUSH_CANDIDATES);
code_flusher()->IteratePointersToFromSpace(&root_visitor);
ProcessMarkingDeque<MarkCompactMode::YOUNG_GENERATION>();
}
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_GLOBAL_HANDLES);
isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending(
&IsUnmarkedObject);
isolate()
->global_handles()
->IterateNewSpaceWeakUnmodifiedRoots<GlobalHandles::VISIT_OTHERS>(
&root_visitor);
ProcessMarkingDeque<MarkCompactMode::YOUNG_GENERATION>();
}
marking_deque()->StopUsing();
}
void MarkCompactCollector::MarkLiveObjects() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK);
// The recursive GC marker detects when it is nearing stack overflow,
......@@ -2296,7 +2439,7 @@ void MarkCompactCollector::MarkLiveObjects() {
PrepareForCodeFlushing();
}
RootMarkingVisitor root_visitor(heap());
RootMarkingVisitor<MarkCompactMode::FULL> root_visitor(heap());
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOTS);
......@@ -2328,7 +2471,7 @@ void MarkCompactCollector::MarkLiveObjects() {
GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES);
heap()->isolate()->global_handles()->IdentifyWeakHandles(
&IsUnmarkedHeapObject);
ProcessMarkingDeque();
ProcessMarkingDeque<MarkCompactMode::FULL>();
}
// Then we mark the objects.
......@@ -2336,7 +2479,7 @@ void MarkCompactCollector::MarkLiveObjects() {
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS);
heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor);
ProcessMarkingDeque();
ProcessMarkingDeque<MarkCompactMode::FULL>();
}
// Repeat Harmony weak maps marking to mark unmarked objects reachable from
......
......@@ -17,6 +17,8 @@
namespace v8 {
namespace internal {
enum class MarkCompactMode { FULL, YOUNG_GENERATION };
// Callback function, returns whether an object is alive. The heap size
// of the object is returned in size. It optionally updates the offset
// to the first live object in the page (only used for old and map objects).
......@@ -29,6 +31,7 @@ typedef void (*MarkObjectFunction)(Heap* heap, HeapObject* object);
class CodeFlusher;
class MarkCompactCollector;
class MarkingVisitor;
template <MarkCompactMode mode>
class RootMarkingVisitor;
class ObjectMarking : public AllStatic {
......@@ -416,6 +419,9 @@ class MarkCompactCollector {
static void Initialize();
static SlotCallbackResult CheckAndMarkObject(Heap* heap,
Address slot_address);
void SetUp();
void TearDown();
......@@ -435,12 +441,6 @@ class MarkCompactCollector {
void AbortCompaction();
#ifdef DEBUG
// Checks whether performing mark-compact collection.
bool in_use() { return state_ > PREPARE_GC; }
bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; }
#endif
// Determine type of object and emit deletion log event.
static void ReportDeleteIfNeeded(HeapObject* obj, Isolate* isolate);
......@@ -458,15 +458,6 @@ class MarkCompactCollector {
CodeFlusher* code_flusher() { return code_flusher_; }
inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; }
#ifdef VERIFY_HEAP
void VerifyValidStoreAndSlotsBufferEntries();
void VerifyMarkbitsAreClean();
static void VerifyMarkbitsAreClean(PagedSpace* space);
static void VerifyMarkbitsAreClean(NewSpace* space);
void VerifyWeakEmbeddedObjectsInCode();
void VerifyOmittedMapChecks();
#endif
INLINE(static bool ShouldSkipEvacuationSlotRecording(Object* host)) {
return Page::FromAddress(reinterpret_cast<Address>(host))
->ShouldSkipEvacuationSlotRecording();
......@@ -525,6 +516,21 @@ class MarkCompactCollector {
Sweeper& sweeper() { return sweeper_; }
#ifdef DEBUG
// Checks whether performing mark-compact collection.
bool in_use() { return state_ > PREPARE_GC; }
bool are_map_pointers_encoded() { return state_ == UPDATE_POINTERS; }
#endif
#ifdef VERIFY_HEAP
void VerifyValidStoreAndSlotsBufferEntries();
void VerifyMarkbitsAreClean();
static void VerifyMarkbitsAreClean(PagedSpace* space);
static void VerifyMarkbitsAreClean(NewSpace* space);
void VerifyWeakEmbeddedObjectsInCode();
void VerifyOmittedMapChecks();
#endif
private:
template <PageEvacuationMode mode>
class EvacuateNewSpacePageVisitor;
......@@ -564,8 +570,10 @@ class MarkCompactCollector {
friend class MarkCompactMarkingVisitor;
friend class MarkingVisitor;
friend class RecordMigratedSlotVisitor;
template <MarkCompactMode mode>
friend class RootMarkingVisitor;
friend class SharedFunctionInfoMarkingVisitor;
friend class StaticYoungGenerationMarkingVisitor;
// Mark code objects that are active on the stack to prevent them
// from being flushed.
......@@ -575,6 +583,8 @@ class MarkCompactCollector {
// Marking operations for objects reachable from roots.
void MarkLiveObjects();
// Mark the young generation.
void MarkLiveObjectsInYoungGeneration();
// Pushes a black object onto the marking stack and accounts for live bytes.
// Note that this assumes live bytes have not yet been counted.
......@@ -593,14 +603,15 @@ class MarkCompactCollector {
INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit));
// Mark the heap roots and all objects reachable from them.
void MarkRoots(RootMarkingVisitor* visitor);
void MarkRoots(RootMarkingVisitor<MarkCompactMode::FULL>* visitor);
// Mark the string table specially. References to internalized strings from
// the string table are weak.
void MarkStringTable(RootMarkingVisitor* visitor);
void MarkStringTable(RootMarkingVisitor<MarkCompactMode::FULL>* visitor);
// Mark objects reachable (transitively) from objects in the marking stack
// or overflowed in the heap.
template <MarkCompactMode mode>
void ProcessMarkingDeque();
// Mark objects reachable (transitively) from objects in the marking stack
......@@ -624,11 +635,13 @@ class MarkCompactCollector {
// stack. This function empties the marking stack, but may leave
// overflowed objects in the heap, in which case the marking stack's
// overflow flag will be set.
template <MarkCompactMode mode>
void EmptyMarkingDeque();
// Refill the marking stack with overflowed objects from the heap. This
// function either leaves the marking stack full or clears the overflow
// flag on the marking stack.
template <MarkCompactMode mode>
void RefillMarkingDeque();
// Helper methods for refilling the marking stack by discovering grey objects
......
......@@ -142,6 +142,7 @@ void SimulateFullSpace(v8::internal::NewSpace* space,
}
void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
CHECK(FLAG_incremental_marking);
i::IncrementalMarking* marking = heap->incremental_marking();
i::MarkCompactCollector* collector = heap->mark_compact_collector();
if (collector->sweeping_in_progress()) {
......
......@@ -188,6 +188,7 @@ TEST(ArrayBuffer_UnregisterDuringSweep) {
}
TEST(ArrayBuffer_NonLivePromotion) {
if (!FLAG_incremental_marking) return;
// The test verifies that the marking state is preserved when promoting
// a buffer to old space.
CcTest::InitializeVM();
......@@ -223,6 +224,7 @@ TEST(ArrayBuffer_NonLivePromotion) {
}
TEST(ArrayBuffer_LivePromotion) {
if (!FLAG_incremental_marking) return;
// The test verifies that the marking state is preserved when promoting
// a buffer to old space.
CcTest::InitializeVM();
......@@ -257,6 +259,7 @@ TEST(ArrayBuffer_LivePromotion) {
}
TEST(ArrayBuffer_SemiSpaceCopyThenPagePromotion) {
if (!i::FLAG_incremental_marking) return;
// The test verifies that the marking state is preserved across semispace
// copy.
CcTest::InitializeVM();
......
......@@ -1362,6 +1362,7 @@ TEST(TestCodeFlushingPreAged) {
TEST(TestCodeFlushingIncremental) {
if (!i::FLAG_incremental_marking) return;
// If we do not flush code this test is invalid.
if (!FLAG_flush_code) return;
i::FLAG_allow_natives_syntax = true;
......@@ -1434,6 +1435,7 @@ TEST(TestCodeFlushingIncremental) {
TEST(TestCodeFlushingIncrementalScavenge) {
if (!FLAG_incremental_marking) return;
// If we do not flush code this test is invalid.
if (!FLAG_flush_code) return;
i::FLAG_allow_natives_syntax = true;
......@@ -1505,6 +1507,7 @@ TEST(TestCodeFlushingIncrementalScavenge) {
TEST(TestCodeFlushingIncrementalAbort) {
if (!i::FLAG_incremental_marking) return;
// If we do not flush code this test is invalid.
if (!FLAG_flush_code) return;
i::FLAG_allow_natives_syntax = true;
......@@ -1571,6 +1574,7 @@ TEST(TestCodeFlushingIncrementalAbort) {
}
TEST(TestUseOfIncrementalBarrierOnCompileLazy) {
if (!i::FLAG_incremental_marking) return;
// Turn off always_opt because it interferes with running the built-in for
// the last call to g().
i::FLAG_always_opt = false;
......@@ -2605,6 +2609,7 @@ TEST(LeakNativeContextViaMapProto) {
TEST(InstanceOfStubWriteBarrier) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_allow_natives_syntax = true;
#ifdef VERIFY_HEAP
i::FLAG_verify_heap = true;
......@@ -2671,6 +2676,7 @@ int GetProfilerTicks(SharedFunctionInfo* shared) {
} // namespace
TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_stress_compaction = false;
i::FLAG_allow_natives_syntax = true;
#ifdef VERIFY_HEAP
......@@ -2762,6 +2768,7 @@ TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
HEAP_TEST(GCFlags) {
if (!i::FLAG_incremental_marking) return;
CcTest::InitializeVM();
Heap* heap = CcTest::heap();
......@@ -2794,6 +2801,7 @@ HEAP_TEST(GCFlags) {
TEST(IdleNotificationFinishMarking) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
const int initial_gc_count = CcTest::heap()->gc_count();
......@@ -3293,6 +3301,7 @@ static int CountMapTransitions(Map* map) {
// Test that map transitions are cleared and maps are collected with
// incremental marking as well.
TEST(Regress1465) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_stress_compaction = false;
i::FLAG_allow_natives_syntax = true;
i::FLAG_trace_incremental_marking = true;
......@@ -3668,6 +3677,7 @@ TEST(PrintSharedFunctionInfo) {
TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
if (!FLAG_incremental_marking) return;
if (i::FLAG_always_opt) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -3743,6 +3753,7 @@ static void CheckVectorIC(Handle<JSFunction> f, int slot_index,
}
TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
if (!i::FLAG_incremental_marking) return;
if (i::FLAG_always_opt) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -3766,6 +3777,7 @@ TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
}
TEST(IncrementalMarkingPreservesMonomorphicIC) {
if (!i::FLAG_incremental_marking) return;
if (i::FLAG_always_opt) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -3787,6 +3799,7 @@ TEST(IncrementalMarkingPreservesMonomorphicIC) {
}
TEST(IncrementalMarkingPreservesPolymorphicIC) {
if (!i::FLAG_incremental_marking) return;
if (i::FLAG_always_opt) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -3824,6 +3837,7 @@ TEST(IncrementalMarkingPreservesPolymorphicIC) {
}
TEST(ContextDisposeDoesntClearPolymorphicIC) {
if (!i::FLAG_incremental_marking) return;
if (i::FLAG_always_opt) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -3979,6 +3993,7 @@ UNINITIALIZED_TEST(ReleaseStackTraceData) {
TEST(Regress159140) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
......@@ -4039,6 +4054,7 @@ TEST(Regress159140) {
TEST(Regress165495) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
......@@ -4085,6 +4101,7 @@ TEST(Regress165495) {
TEST(Regress169209) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_always_opt = false;
i::FLAG_stress_compaction = false;
i::FLAG_allow_natives_syntax = true;
......@@ -4301,6 +4318,7 @@ TEST(Regress513507) {
TEST(Regress514122) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_flush_optimized_code_cache = false;
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
......@@ -4562,6 +4580,7 @@ TEST(Regress513496) {
TEST(LargeObjectSlotRecording) {
if (!i::FLAG_incremental_marking) return;
FLAG_manual_evacuation_candidates_selection = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
......@@ -4634,6 +4653,7 @@ TEST(DeferredHandles) {
TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
if (!FLAG_incremental_marking) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
CompileRun("function f(n) {"
......@@ -5445,6 +5465,7 @@ TEST(WeakCell) {
TEST(WeakCellsWithIncrementalMarking) {
if (!FLAG_incremental_marking) return;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
v8::internal::Heap* heap = CcTest::heap();
......@@ -5716,6 +5737,7 @@ UNINITIALIZED_TEST(PromotionQueue) {
TEST(Regress388880) {
if (!FLAG_incremental_marking) return;
i::FLAG_expose_gc = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -5763,6 +5785,7 @@ TEST(Regress388880) {
TEST(Regress3631) {
if (!FLAG_incremental_marking) return;
i::FLAG_expose_gc = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -5896,6 +5919,7 @@ void CheckMapRetainingFor(int n) {
TEST(MapRetaining) {
if (!FLAG_incremental_marking) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
......@@ -6395,6 +6419,7 @@ static UniqueId MakeUniqueId(const Persistent<T>& p) {
TEST(Regress519319) {
if (!FLAG_incremental_marking) return;
CcTest::InitializeVM();
v8::Isolate* isolate = CcTest::isolate();
v8::HandleScope scope(isolate);
......@@ -6463,6 +6488,7 @@ HEAP_TEST(Regress587004) {
}
HEAP_TEST(Regress589413) {
if (!FLAG_incremental_marking) return;
FLAG_stress_compaction = true;
FLAG_manual_evacuation_candidates_selection = true;
FLAG_parallel_compaction = false;
......@@ -6538,6 +6564,7 @@ HEAP_TEST(Regress589413) {
}
TEST(Regress598319) {
if (!FLAG_incremental_marking) return;
// This test ensures that no white objects can cross the progress bar of large
// objects during incremental marking. It checks this by using Shift() during
// incremental marking.
......@@ -6654,6 +6681,7 @@ TEST(Regress609761) {
}
TEST(Regress615489) {
if (!i::FLAG_incremental_marking) return;
FLAG_black_allocation = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -6708,6 +6736,7 @@ class StaticOneByteResource : public v8::String::ExternalOneByteStringResource {
};
TEST(Regress631969) {
if (!FLAG_incremental_marking) return;
FLAG_manual_evacuation_candidates_selection = true;
FLAG_parallel_compaction = false;
FLAG_concurrent_sweeping = false;
......@@ -6753,6 +6782,7 @@ TEST(Regress631969) {
}
TEST(LeftTrimFixedArrayInBlackArea) {
if (!i::FLAG_incremental_marking) return;
FLAG_black_allocation = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -6791,6 +6821,7 @@ TEST(LeftTrimFixedArrayInBlackArea) {
}
TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
if (!i::FLAG_incremental_marking) return;
FLAG_black_allocation = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -6856,6 +6887,7 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
}
TEST(ContinuousRightTrimFixedArrayInBlackArea) {
if (!i::FLAG_incremental_marking) return;
FLAG_black_allocation = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......@@ -6914,6 +6946,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
}
TEST(Regress618958) {
if (!i::FLAG_incremental_marking) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Heap* heap = CcTest::heap();
......
......@@ -42,6 +42,7 @@ namespace v8 {
namespace internal {
UNINITIALIZED_TEST(PagePromotion_NewToOld) {
if (!i::FLAG_incremental_marking) return;
v8::Isolate* isolate = NewIsolateForPagePromotion();
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
{
......
......@@ -1821,6 +1821,7 @@ TEST(CodeSerializerEagerCompilationAndPreAge) {
}
TEST(Regress503552) {
if (!FLAG_incremental_marking) return;
// Test that the code serializer can deal with weak cells that form a linked
// list during incremental marking.
CcTest::InitializeVM();
......
......@@ -249,6 +249,7 @@ TEST(Regress2060b) {
TEST(Regress399527) {
if (!FLAG_incremental_marking) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Isolate* isolate = CcTest::i_isolate();
......
......@@ -89,6 +89,7 @@ TEST_F(GCIdleTimeHandlerTest, DontDoFinalIncrementalMarkCompact) {
TEST_F(GCIdleTimeHandlerTest, ContextDisposeLowRate) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.contexts_disposed = 1;
heap_state.incremental_marking_stopped = true;
......@@ -99,6 +100,7 @@ TEST_F(GCIdleTimeHandlerTest, ContextDisposeLowRate) {
TEST_F(GCIdleTimeHandlerTest, ContextDisposeHighRate) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.contexts_disposed = 1;
heap_state.contexts_disposal_rate =
......@@ -111,6 +113,7 @@ TEST_F(GCIdleTimeHandlerTest, ContextDisposeHighRate) {
TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeZeroIdleTime) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.contexts_disposed = 1;
heap_state.contexts_disposal_rate = 1.0;
......@@ -122,6 +125,7 @@ TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeZeroIdleTime) {
TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeSmallIdleTime1) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.contexts_disposed = 1;
heap_state.contexts_disposal_rate =
......@@ -134,6 +138,7 @@ TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeSmallIdleTime1) {
TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeSmallIdleTime2) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.contexts_disposed = 1;
heap_state.contexts_disposal_rate =
......@@ -146,6 +151,7 @@ TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeSmallIdleTime2) {
TEST_F(GCIdleTimeHandlerTest, IncrementalMarking1) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
double idle_time_ms = 10;
GCIdleTimeAction action = handler()->Compute(idle_time_ms, heap_state);
......@@ -154,6 +160,7 @@ TEST_F(GCIdleTimeHandlerTest, IncrementalMarking1) {
TEST_F(GCIdleTimeHandlerTest, NotEnoughTime) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.incremental_marking_stopped = true;
size_t speed = kMarkCompactSpeed;
......@@ -164,6 +171,7 @@ TEST_F(GCIdleTimeHandlerTest, NotEnoughTime) {
TEST_F(GCIdleTimeHandlerTest, DoNotStartIncrementalMarking) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.incremental_marking_stopped = true;
double idle_time_ms = 10.0;
......@@ -173,6 +181,7 @@ TEST_F(GCIdleTimeHandlerTest, DoNotStartIncrementalMarking) {
TEST_F(GCIdleTimeHandlerTest, ContinueAfterStop) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.incremental_marking_stopped = true;
double idle_time_ms = 10.0;
......@@ -185,6 +194,7 @@ TEST_F(GCIdleTimeHandlerTest, ContinueAfterStop) {
TEST_F(GCIdleTimeHandlerTest, ZeroIdleTimeNothingToDo) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
for (int i = 0; i < kMaxNotifications; i++) {
GCIdleTimeAction action = handler()->Compute(0, heap_state);
......@@ -194,6 +204,7 @@ TEST_F(GCIdleTimeHandlerTest, ZeroIdleTimeNothingToDo) {
TEST_F(GCIdleTimeHandlerTest, SmallIdleTimeNothingToDo) {
if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.incremental_marking_stopped = true;
for (int i = 0; i < kMaxNotifications; i++) {
......@@ -204,6 +215,8 @@ TEST_F(GCIdleTimeHandlerTest, SmallIdleTimeNothingToDo) {
TEST_F(GCIdleTimeHandlerTest, DoneIfNotMakingProgressOnIncrementalMarking) {
if (!handler()->Enabled()) return;
// Regression test for crbug.com/489323.
GCIdleTimeHeapState heap_state = DefaultHeapState();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment