Commit 4b42656d authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] Unify incremental and main marking visitor

With parallel marking enabled, both visitors have to be equal wrt. to
actual visitation.

The differences are captured by template parameters:
- Retaining path tracing which we only do for full GCs.
- Incremental marking of FixedArray.

CQ_INCLUDE_TRYBOTS=master.tryserver.v8:v8_linux64_tsan_rel;master.tryserver.v8:v8_linux64_tsan_concurrent_marking_rel_ng;master.tryserver.blink:linux_trusty_blink_rel;master.tryserver.chromium.linux:linux_optional_gpu_tests_rel;master.tryserver.chromium.mac:mac_optional_gpu_tests_rel;master.tryserver.chromium.win:win_optional_gpu_tests_rel;master.tryserver.chromium.android:android_optional_gpu_tests_rel

Bug: chromium:694255, chromium:750084
Change-Id: I177aeb0ee4f6a35e2f592ba257c9ddc14f88fd99
Reviewed-on: https://chromium-review.googlesource.com/704935
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48582}
parent c02f5e3a
......@@ -399,6 +399,10 @@ enum ArrayStorageAllocationMode {
enum class ClearRecordedSlots { kYes, kNo };
enum class FixedArrayVisitationMode { kRegular, kIncremental };
enum class TraceRetainingPathMode { kEnabled, kDisabled };
enum class GarbageCollectionReason {
kUnknown = 0,
kAllocationFailure = 1,
......@@ -2440,16 +2444,16 @@ class Heap {
friend class GCCallbacksScope;
friend class GCTracer;
friend class HeapIterator;
template <typename ConcreteVisitor>
friend class MarkingVisitor;
friend class IdleScavengeObserver;
friend class IncrementalMarking;
friend class IncrementalMarkingJob;
friend class LargeObjectSpace;
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode>
friend class MarkingVisitor;
friend class MarkCompactCollector;
friend class MarkCompactCollectorBase;
friend class MinorMarkCompactCollector;
friend class MarkCompactMarkingVisitor;
friend class NewSpace;
friend class ObjectStatsCollector;
friend class Page;
......
......@@ -189,96 +189,6 @@ void IncrementalMarking::NotifyLeftTrimming(HeapObject* from, HeapObject* to) {
}
}
class IncrementalMarkingMarkingVisitor final
: public MarkingVisitor<IncrementalMarkingMarkingVisitor> {
public:
typedef MarkingVisitor<IncrementalMarkingMarkingVisitor> Parent;
static const int kProgressBarScanningChunk = 32 * 1024;
explicit IncrementalMarkingMarkingVisitor(MarkCompactCollector* collector)
: MarkingVisitor<IncrementalMarkingMarkingVisitor>(collector->heap(),
collector),
incremental_marking_(collector->heap()->incremental_marking()) {}
V8_INLINE int VisitFixedArray(Map* map, FixedArray* object) {
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
DCHECK(!FLAG_use_marking_progress_bar ||
chunk->owner()->identity() == LO_SPACE);
// When using a progress bar for large fixed arrays, scan only a chunk of
// the array and try to push it onto the marking deque again until it is
// fully scanned. Fall back to scanning it through to the end in case this
// fails because of a full deque.
int start_offset =
Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar());
if (start_offset < object_size) {
// Ensure that the object is either grey or black before pushing it
// into marking worklist.
incremental_marking_->marking_state()->WhiteToGrey(object);
if (FLAG_concurrent_marking) {
incremental_marking_->marking_worklist()->PushBailout(object);
} else {
incremental_marking_->marking_worklist()->Push(object);
}
DCHECK(incremental_marking_->marking_state()->IsGrey(object) ||
incremental_marking_->marking_state()->IsBlack(object));
int end_offset =
Min(object_size, start_offset + kProgressBarScanningChunk);
int already_scanned_offset = start_offset;
VisitPointers(object, HeapObject::RawField(object, start_offset),
HeapObject::RawField(object, end_offset));
start_offset = end_offset;
end_offset = Min(object_size, end_offset + kProgressBarScanningChunk);
chunk->set_progress_bar(start_offset);
if (start_offset < object_size) {
incremental_marking_->NotifyIncompleteScanOfObject(
object_size - (start_offset - already_scanned_offset));
}
}
} else {
FixedArray::BodyDescriptor::IterateBody(object, object_size, this);
}
return object_size;
}
V8_INLINE void VisitPointer(HeapObject* host, Object** p) final {
Object* target = *p;
if (target->IsHeapObject()) {
collector_->RecordSlot(host, p, target);
MarkObject(host, target);
}
}
V8_INLINE void VisitPointers(HeapObject* host, Object** start,
Object** end) final {
for (Object** p = start; p < end; p++) {
Object* target = *p;
if (target->IsHeapObject()) {
collector_->RecordSlot(host, p, target);
MarkObject(host, target);
}
}
}
// Marks the object grey and pushes it on the marking stack.
V8_INLINE void MarkObject(HeapObject* host, Object* obj) {
incremental_marking_->WhiteToGreyAndPush(HeapObject::cast(obj));
}
// Marks the object black without pushing it on the marking stack.
// Returns true if object needed marking and false otherwise.
V8_INLINE bool MarkObjectWithoutPush(HeapObject* host, Object* obj) {
HeapObject* heap_object = HeapObject::cast(obj);
return incremental_marking_->marking_state()->WhiteToBlack(heap_object);
}
private:
IncrementalMarking* const incremental_marking_;
};
class IncrementalMarkingRootMarkingVisitor : public RootVisitor {
public:
explicit IncrementalMarkingRootMarkingVisitor(
......
This diff is collapsed.
......@@ -1075,49 +1075,6 @@ void MarkCompactCollector::Finish() {
}
}
class MarkCompactMarkingVisitor final
: public MarkingVisitor<MarkCompactMarkingVisitor> {
public:
explicit MarkCompactMarkingVisitor(MarkCompactCollector* collector)
: MarkingVisitor<MarkCompactMarkingVisitor>(collector->heap(),
collector) {}
V8_INLINE void VisitPointer(HeapObject* host, Object** p) final {
MarkObjectByPointer(host, p);
}
V8_INLINE void VisitPointers(HeapObject* host, Object** start,
Object** end) final {
for (Object** p = start; p < end; p++) {
MarkObjectByPointer(host, p);
}
}
// Marks the object black and pushes it on the marking stack.
V8_INLINE void MarkObject(HeapObject* host, HeapObject* object) {
collector_->MarkObject(host, object);
}
// Marks the object black without pushing it on the marking stack. Returns
// true if object needed marking and false otherwise.
V8_INLINE bool MarkObjectWithoutPush(HeapObject* host, HeapObject* object) {
if (collector_->atomic_marking_state()->WhiteToBlack(object)) {
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
heap_->AddRetainer(host, object);
}
return true;
}
return false;
}
V8_INLINE void MarkObjectByPointer(HeapObject* host, Object** p) {
if (!(*p)->IsHeapObject()) return;
HeapObject* target_object = HeapObject::cast(*p);
collector_->RecordSlot(host, p, target_object);
collector_->MarkObject(host, target_object);
}
};
void MinorMarkCompactCollector::CleanupSweepToIteratePages() {
for (Page* p : sweep_to_iterate_pages_) {
if (p->IsFlagSet(Page::SWEEP_TO_ITERATE)) {
......@@ -2855,7 +2812,7 @@ void MarkCompactCollector::ProcessWeakCollections() {
RecordSlot(table, key_slot, *key_slot);
Object** value_slot =
table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i));
visitor.MarkObjectByPointer(table, value_slot);
visitor.VisitPointer(table, value_slot);
}
}
}
......
......@@ -9,6 +9,7 @@
#include <vector>
#include "src/heap/marking.h"
#include "src/heap/objects-visiting.h"
#include "src/heap/spaces.h"
#include "src/heap/worklist.h"
......@@ -934,11 +935,78 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
friend class FullEvacuator;
friend class Heap;
friend class IncrementalMarkingMarkingVisitor;
friend class MarkCompactMarkingVisitor;
friend class RecordMigratedSlotVisitor;
};
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode>
class MarkingVisitor final
: public HeapVisitor<
int, MarkingVisitor<fixed_array_mode, retaining_path_mode>> {
public:
typedef HeapVisitor<int,
MarkingVisitor<fixed_array_mode, retaining_path_mode>>
Parent;
V8_INLINE explicit MarkingVisitor(MarkCompactCollector* collector);
V8_INLINE bool ShouldVisitMapPointer() { return false; }
V8_INLINE int VisitAllocationSite(Map* map, AllocationSite* object);
V8_INLINE int VisitBytecodeArray(Map* map, BytecodeArray* object);
V8_INLINE int VisitFixedArray(Map* map, FixedArray* object);
V8_INLINE int VisitJSApiObject(Map* map, JSObject* object);
V8_INLINE int VisitJSFunction(Map* map, JSFunction* object);
V8_INLINE int VisitJSWeakCollection(Map* map, JSWeakCollection* object);
V8_INLINE int VisitMap(Map* map, Map* object);
V8_INLINE int VisitNativeContext(Map* map, Context* object);
V8_INLINE int VisitTransitionArray(Map* map, TransitionArray* object);
V8_INLINE int VisitWeakCell(Map* map, WeakCell* object);
// ObjectVisitor implementation.
V8_INLINE void VisitPointer(HeapObject* host, Object** p) final;
V8_INLINE void VisitPointers(HeapObject* host, Object** start,
Object** end) final;
V8_INLINE void VisitEmbeddedPointer(Code* host, RelocInfo* rinfo) final;
V8_INLINE void VisitCodeTarget(Code* host, RelocInfo* rinfo) final;
// Skip weak next code link.
V8_INLINE void VisitNextCodeLink(Code* host, Object** p) final {}
private:
// Granularity in which FixedArrays are scanned if |fixed_array_mode|
// is true.
static const int kProgressBarScanningChunk = 32 * 1024;
V8_INLINE int VisitFixedArrayIncremental(Map* map, FixedArray* object);
V8_INLINE void MarkMapContents(Map* map);
// Marks the object black without pushing it on the marking work list. Returns
// true if the object needed marking and false otherwise.
V8_INLINE bool MarkObjectWithoutPush(HeapObject* host, HeapObject* object);
// Marks the object grey and pushes it on the marking work list.
V8_INLINE void MarkObject(HeapObject* host, HeapObject* obj);
MajorAtomicMarkingState* marking_state() {
return this->collector_->atomic_marking_state();
}
MarkCompactCollector::MarkingWorklist* marking_worklist() {
return this->heap_->incremental_marking()->marking_worklist();
}
Heap* const heap_;
MarkCompactCollector* const collector_;
};
using MarkCompactMarkingVisitor =
MarkingVisitor<FixedArrayVisitationMode::kRegular,
TraceRetainingPathMode::kEnabled>;
using IncrementalMarkingMarkingVisitor =
MarkingVisitor<FixedArrayVisitationMode::kIncremental,
TraceRetainingPathMode::kDisabled>;
class EvacuationScope {
public:
explicit EvacuationScope(MarkCompactCollector* collector)
......
......@@ -186,197 +186,6 @@ int NewSpaceVisitor<ConcreteVisitor>::VisitJSApiObject(Map* map,
return visitor->VisitJSObject(map, object);
}
template <typename ConcreteVisitor>
int MarkingVisitor<ConcreteVisitor>::VisitJSFunction(Map* map,
JSFunction* object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
int size = JSFunction::BodyDescriptorWeak::SizeOf(map, object);
JSFunction::BodyDescriptorWeak::IterateBody(object, size, visitor);
return size;
}
template <typename ConcreteVisitor>
int MarkingVisitor<ConcreteVisitor>::VisitTransitionArray(
Map* map, TransitionArray* array) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
TransitionArray::BodyDescriptor::IterateBody(array, size, visitor);
collector_->AddTransitionArray(array);
return size;
}
template <typename ConcreteVisitor>
int MarkingVisitor<ConcreteVisitor>::VisitWeakCell(Map* map,
WeakCell* weak_cell) {
// Enqueue weak cell in linked list of encountered weak collections.
// We can ignore weak cells with cleared values because they will always
// contain smi zero.
if (!weak_cell->cleared()) {
HeapObject* value = HeapObject::cast(weak_cell->value());
if (heap_->incremental_marking()->marking_state()->IsBlackOrGrey(value)) {
// Weak cells with live values are directly processed here to reduce
// the processing time of weak cells during the main GC pause.
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
collector_->RecordSlot(weak_cell, slot, *slot);
} else {
// If we do not know about liveness of values of weak cells, we have to
// process them when we know the liveness of the whole transitive
// closure.
collector_->AddWeakCell(weak_cell);
}
}
return WeakCell::BodyDescriptor::SizeOf(map, weak_cell);
}
template <typename ConcreteVisitor>
int MarkingVisitor<ConcreteVisitor>::VisitNativeContext(Map* map,
Context* context) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
int size = Context::BodyDescriptorWeak::SizeOf(map, context);
Context::BodyDescriptorWeak::IterateBody(context, size, visitor);
return size;
}
template <typename ConcreteVisitor>
int MarkingVisitor<ConcreteVisitor>::VisitJSWeakCollection(
Map* map, JSWeakCollection* weak_collection) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
// Enqueue weak collection in linked list of encountered weak collections.
if (weak_collection->next() == heap_->undefined_value()) {
weak_collection->set_next(heap_->encountered_weak_collections());
heap_->set_encountered_weak_collections(weak_collection);
}
// Skip visiting the backing hash table containing the mappings and the
// pointer to the other enqueued weak collections, both are post-processed.
int size = JSWeakCollection::BodyDescriptorWeak::SizeOf(map, weak_collection);
JSWeakCollection::BodyDescriptorWeak::IterateBody(weak_collection, size,
visitor);
// Partially initialized weak collection is enqueued, but table is ignored.
if (!weak_collection->table()->IsHashTable()) return size;
// Mark the backing hash table without pushing it on the marking stack.
Object** slot =
HeapObject::RawField(weak_collection, JSWeakCollection::kTableOffset);
HeapObject* obj = HeapObject::cast(*slot);
collector_->RecordSlot(weak_collection, slot, obj);
visitor->MarkObjectWithoutPush(weak_collection, obj);
return size;
}
template <typename ConcreteVisitor>
int MarkingVisitor<ConcreteVisitor>::VisitBytecodeArray(Map* map,
BytecodeArray* array) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
int size = BytecodeArray::BodyDescriptor::SizeOf(map, array);
BytecodeArray::BodyDescriptor::IterateBody(array, size, visitor);
array->MakeOlder();
return size;
}
template <typename ConcreteVisitor>
int MarkingVisitor<ConcreteVisitor>::VisitCode(Map* map, Code* code) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
int size = Code::BodyDescriptor::SizeOf(map, code);
Code::BodyDescriptor::IterateBody(code, size, visitor);
return size;
}
template <typename ConcreteVisitor>
void MarkingVisitor<ConcreteVisitor>::MarkMapContents(Map* map) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
// Since descriptor arrays are potentially shared, ensure that only the
// descriptors that belong to this map are marked. The first time a non-empty
// descriptor array is marked, its header is also visited. The slot holding
// the descriptor array will be implicitly recorded when the pointer fields of
// this map are visited. Prototype maps don't keep track of transitions, so
// just mark the entire descriptor array.
if (!map->is_prototype_map()) {
DescriptorArray* descriptors = map->instance_descriptors();
if (visitor->MarkObjectWithoutPush(map, descriptors) &&
descriptors->length() > 0) {
visitor->VisitPointers(descriptors, descriptors->GetFirstElementAddress(),
descriptors->GetDescriptorEndSlot(0));
}
int start = 0;
int end = map->NumberOfOwnDescriptors();
if (start < end) {
visitor->VisitPointers(descriptors,
descriptors->GetDescriptorStartSlot(start),
descriptors->GetDescriptorEndSlot(end));
}
}
// Mark the pointer fields of the Map. Since the transitions array has
// been marked already, it is fine that one of these fields contains a
// pointer to it.
visitor->VisitPointers(
map, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
}
template <typename ConcreteVisitor>
int MarkingVisitor<ConcreteVisitor>::VisitMap(Map* map, Map* object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
// When map collection is enabled we have to mark through map's transitions
// and back pointers in a special way to make these links weak.
if (object->CanTransition()) {
MarkMapContents(object);
} else {
visitor->VisitPointers(
object, HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
}
return Map::BodyDescriptor::SizeOf(map, object);
}
template <typename ConcreteVisitor>
int MarkingVisitor<ConcreteVisitor>::VisitJSApiObject(Map* map,
JSObject* object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
if (heap_->local_embedder_heap_tracer()->InUse()) {
DCHECK(object->IsJSObject());
heap_->TracePossibleWrapper(object);
}
int size = JSObject::BodyDescriptor::SizeOf(map, object);
JSObject::BodyDescriptor::IterateBody(object, size, visitor);
return size;
}
template <typename ConcreteVisitor>
int MarkingVisitor<ConcreteVisitor>::VisitAllocationSite(
Map* map, AllocationSite* object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
int size = AllocationSite::BodyDescriptorWeak::SizeOf(map, object);
AllocationSite::BodyDescriptorWeak::IterateBody(object, size, visitor);
return size;
}
template <typename ConcreteVisitor>
void MarkingVisitor<ConcreteVisitor>::VisitEmbeddedPointer(Code* host,
RelocInfo* rinfo) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
HeapObject* object = HeapObject::cast(rinfo->target_object());
collector_->RecordRelocSlot(host, rinfo, object);
if (!host->IsWeakObject(object)) {
visitor->MarkObject(host, object);
}
}
template <typename ConcreteVisitor>
void MarkingVisitor<ConcreteVisitor>::VisitCodeTarget(Code* host,
RelocInfo* rinfo) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
collector_->RecordRelocSlot(host, rinfo, target);
visitor->MarkObject(host, target);
}
} // namespace internal
} // namespace v8
......
......@@ -116,38 +116,6 @@ class NewSpaceVisitor : public HeapVisitor<int, ConcreteVisitor> {
}
};
template <typename ConcreteVisitor>
class MarkingVisitor : public HeapVisitor<int, ConcreteVisitor> {
public:
explicit MarkingVisitor(Heap* heap, MarkCompactCollector* collector)
: heap_(heap), collector_(collector) {}
V8_INLINE bool ShouldVisitMapPointer() { return false; }
V8_INLINE int VisitJSFunction(Map* map, JSFunction* object);
V8_INLINE int VisitWeakCell(Map* map, WeakCell* object);
V8_INLINE int VisitTransitionArray(Map* map, TransitionArray* object);
V8_INLINE int VisitNativeContext(Map* map, Context* object);
V8_INLINE int VisitJSWeakCollection(Map* map, JSWeakCollection* object);
V8_INLINE int VisitBytecodeArray(Map* map, BytecodeArray* object);
V8_INLINE int VisitCode(Map* map, Code* object);
V8_INLINE int VisitMap(Map* map, Map* object);
V8_INLINE int VisitJSApiObject(Map* map, JSObject* object);
V8_INLINE int VisitAllocationSite(Map* map, AllocationSite* object);
// ObjectVisitor implementation.
V8_INLINE void VisitEmbeddedPointer(Code* host, RelocInfo* rinfo) final;
V8_INLINE void VisitCodeTarget(Code* host, RelocInfo* rinfo) final;
// Skip weak next code link.
V8_INLINE void VisitNextCodeLink(Code* host, Object** p) final {}
protected:
V8_INLINE void MarkMapContents(Map* map);
Heap* heap_;
MarkCompactCollector* collector_;
};
class WeakObjectRetainer;
// A weak list is single linked list where each element has a weak pointer to
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment