Commit 0c590f45 authored by Michael Achenbach's avatar Michael Achenbach Committed by Commit Bot

Revert https://codereview.chromium.org/2857713002 and dependencies

Revert "[heap] Make non-atomic markbit operations consistent with atomic ones."

This reverts commit dd37366f

Revert "[heap] Use atomic marking operations in incremental marking if"

This reverts commit 1f2c3596

Revert "[heap] Prepare IncrementalMarking::VisitObject for concurrent marking."

This reverts commit 00d1e2cf

Revert "[heap] Use shared markbits in the concurrent marker."

This reverts commit b0db0541

https://codereview.chromium.org/2857713002 blocks the current roll:
https://codereview.chromium.org/2857423002/

Doesn't revert cleanly.

NOTRY=true
TBR=ulan@chromium.org

Bug: chromium:694255
Change-Id: Iada35af5c2529cd9e604802700604b16cc30aa2d
Reviewed-on: https://chromium-review.googlesource.com/497387Reviewed-by: 's avatarMichael Achenbach <machenbach@chromium.org>
Commit-Queue: Michael Achenbach <machenbach@chromium.org>
Cr-Commit-Position: refs/heads/master@{#45118}
parent d21326d5
...@@ -666,7 +666,7 @@ DEFINE_BOOL(age_code, true, ...@@ -666,7 +666,7 @@ DEFINE_BOOL(age_code, true,
DEFINE_BOOL(incremental_marking, true, "use incremental marking") DEFINE_BOOL(incremental_marking, true, "use incremental marking")
DEFINE_BOOL(incremental_marking_wrappers, true, DEFINE_BOOL(incremental_marking_wrappers, true,
"use incremental marking for marking wrappers") "use incremental marking for marking wrappers")
DEFINE_BOOL(concurrent_marking, V8_CONCURRENT_MARKING, "use concurrent marking") DEFINE_BOOL(concurrent_marking, false, "use concurrent marking")
DEFINE_BOOL(trace_concurrent_marking, false, "trace concurrent marking") DEFINE_BOOL(trace_concurrent_marking, false, "trace concurrent marking")
DEFINE_INT(min_progress_during_incremental_marking_finalization, 32, DEFINE_INT(min_progress_during_incremental_marking_finalization, 32,
"keep finalizing incremental marking as long as we discover at " "keep finalizing incremental marking as long as we discover at "
......
...@@ -30,11 +30,6 @@ class ConcurrentMarkingVisitor final ...@@ -30,11 +30,6 @@ class ConcurrentMarkingVisitor final
explicit ConcurrentMarkingVisitor(ConcurrentMarkingDeque* deque) explicit ConcurrentMarkingVisitor(ConcurrentMarkingDeque* deque)
: deque_(deque) {} : deque_(deque) {}
bool ShouldVisit(HeapObject* object) override {
return ObjectMarking::GreyToBlack<MarkBit::AccessMode::ATOMIC>(
object, marking_state(object));
}
void VisitPointers(HeapObject* host, Object** start, Object** end) override { void VisitPointers(HeapObject* host, Object** start, Object** end) override {
for (Object** p = start; p < end; p++) { for (Object** p = start; p < end; p++) {
if (!(*p)->IsHeapObject()) continue; if (!(*p)->IsHeapObject()) continue;
...@@ -73,7 +68,7 @@ class ConcurrentMarkingVisitor final ...@@ -73,7 +68,7 @@ class ConcurrentMarkingVisitor final
// =========================================================================== // ===========================================================================
int VisitCode(Map* map, Code* object) override { int VisitCode(Map* map, Code* object) override {
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout); // TODO(ulan): push the object to the bail-out deque.
return 0; return 0;
} }
...@@ -82,65 +77,58 @@ class ConcurrentMarkingVisitor final ...@@ -82,65 +77,58 @@ class ConcurrentMarkingVisitor final
// =========================================================================== // ===========================================================================
int VisitBytecodeArray(Map* map, BytecodeArray* object) override { int VisitBytecodeArray(Map* map, BytecodeArray* object) override {
// TODO(ulan): implement iteration of strong fields. // TODO(ulan): implement iteration of strong fields and push the object to
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout); // the bailout deque.
return 0; return 0;
} }
int VisitJSFunction(Map* map, JSFunction* object) override { int VisitJSFunction(Map* map, JSFunction* object) override {
// TODO(ulan): implement iteration of strong fields. // TODO(ulan): implement iteration of strong fields and push the object to
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout); // the bailout deque.
return 0; return 0;
} }
int VisitMap(Map* map, Map* object) override { int VisitMap(Map* map, Map* object) override {
// TODO(ulan): implement iteration of strong fields. // TODO(ulan): implement iteration of strong fields and push the object to
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout); // the bailout deque.
return 0; return 0;
} }
int VisitNativeContext(Map* map, Context* object) override { int VisitNativeContext(Map* map, Context* object) override {
// TODO(ulan): implement iteration of strong fields. // TODO(ulan): implement iteration of strong fields and push the object to
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout); // the bailout deque.
return 0; return 0;
} }
int VisitSharedFunctionInfo(Map* map, SharedFunctionInfo* object) override { int VisitSharedFunctionInfo(Map* map, SharedFunctionInfo* object) override {
// TODO(ulan): implement iteration of strong fields. // TODO(ulan): implement iteration of strong fields and push the object to
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout); // the bailout deque.
return 0; return 0;
} }
int VisitTransitionArray(Map* map, TransitionArray* object) override { int VisitTransitionArray(Map* map, TransitionArray* object) override {
// TODO(ulan): implement iteration of strong fields. // TODO(ulan): implement iteration of strong fields and push the object to
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout); // the bailout deque.
return 0; return 0;
} }
int VisitWeakCell(Map* map, WeakCell* object) override { int VisitWeakCell(Map* map, WeakCell* object) override {
// TODO(ulan): implement iteration of strong fields. // TODO(ulan): implement iteration of strong fields and push the object to
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout); // the bailout deque.
return 0; return 0;
} }
int VisitJSWeakCollection(Map* map, JSWeakCollection* object) override { int VisitJSWeakCollection(Map* map, JSWeakCollection* object) override {
// TODO(ulan): implement iteration of strong fields. // TODO(ulan): implement iteration of strong fields and push the object to
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout); // the bailout deque.
return 0; return 0;
} }
void MarkObject(HeapObject* object) { void MarkObject(HeapObject* obj) {
if (ObjectMarking::WhiteToGrey<MarkBit::AccessMode::ATOMIC>( deque_->Push(obj, MarkingThread::kConcurrent, TargetDeque::kShared);
object, marking_state(object))) {
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kShared);
}
} }
private: private:
MarkingState marking_state(HeapObject* object) const {
return MarkingState::Internal(object);
}
ConcurrentMarkingDeque* deque_; ConcurrentMarkingDeque* deque_;
}; };
...@@ -187,7 +175,7 @@ void ConcurrentMarking::Run() { ...@@ -187,7 +175,7 @@ void ConcurrentMarking::Run() {
TimedScope scope(&time_ms); TimedScope scope(&time_ms);
HeapObject* object; HeapObject* object;
while ((object = deque_->Pop(MarkingThread::kConcurrent)) != nullptr) { while ((object = deque_->Pop(MarkingThread::kConcurrent)) != nullptr) {
bytes_marked += visitor_->Visit(object); bytes_marked += visitor_->IterateBody(object);
} }
} }
if (FLAG_trace_concurrent_marking) { if (FLAG_trace_concurrent_marking) {
......
...@@ -4270,7 +4270,7 @@ void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) { ...@@ -4270,7 +4270,7 @@ void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) {
void Heap::NotifyObjectLayoutChange(HeapObject* object, void Heap::NotifyObjectLayoutChange(HeapObject* object,
const DisallowHeapAllocation&) { const DisallowHeapAllocation&) {
if (FLAG_incremental_marking && incremental_marking()->IsMarking()) { if (FLAG_incremental_marking && incremental_marking()->IsMarking()) {
incremental_marking()->WhiteToGreyAndPush(object); incremental_marking()->MarkGrey(object);
} }
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
DCHECK(pending_layout_change_object_ == nullptr); DCHECK(pending_layout_change_object_ == nullptr);
...@@ -4834,7 +4834,7 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor { ...@@ -4834,7 +4834,7 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
// promoted objects. // promoted objects.
if (heap_->incremental_marking()->black_allocation()) { if (heap_->incremental_marking()->black_allocation()) {
Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
heap_->incremental_marking()->WhiteToGreyAndPush(code); heap_->incremental_marking()->MarkGrey(code);
} }
} }
...@@ -5628,7 +5628,7 @@ void Heap::RegisterExternallyReferencedObject(Object** object) { ...@@ -5628,7 +5628,7 @@ void Heap::RegisterExternallyReferencedObject(Object** object) {
HeapObject* heap_object = HeapObject::cast(*object); HeapObject* heap_object = HeapObject::cast(*object);
DCHECK(Contains(heap_object)); DCHECK(Contains(heap_object));
if (FLAG_incremental_marking_wrappers && incremental_marking()->IsMarking()) { if (FLAG_incremental_marking_wrappers && incremental_marking()->IsMarking()) {
incremental_marking()->WhiteToGreyAndPush(heap_object); incremental_marking()->MarkGrey(heap_object);
} else { } else {
DCHECK(mark_compact_collector()->in_use()); DCHECK(mark_compact_collector()->in_use());
mark_compact_collector()->MarkObject(heap_object); mark_compact_collector()->MarkObject(heap_object);
......
This diff is collapsed.
...@@ -65,6 +65,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking { ...@@ -65,6 +65,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
return MarkingState::Internal(chunk); return MarkingState::Internal(chunk);
} }
void MarkBlack(HeapObject* object, int size);
void MarkGrey(HeapObject* object);
// Transfers mark bits without requiring proper object headers. // Transfers mark bits without requiring proper object headers.
void TransferMark(Heap* heap, HeapObject* from, HeapObject* to); void TransferMark(Heap* heap, HeapObject* from, HeapObject* to);
...@@ -79,15 +82,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking { ...@@ -79,15 +82,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
DCHECK(ObjectMarking::IsWhite<access_mode>(to, marking_state(to))); DCHECK(ObjectMarking::IsWhite<access_mode>(to, marking_state(to)));
if (ObjectMarking::IsGrey<access_mode>(from, marking_state(from))) { if (ObjectMarking::IsGrey<access_mode>(from, marking_state(from))) {
bool success = ObjectMarking::WhiteToGrey<access_mode>(to, marking_state(to));
ObjectMarking::WhiteToGrey<access_mode>(to, marking_state(to));
DCHECK(success);
USE(success);
} else if (ObjectMarking::IsBlack<access_mode>(from, marking_state(from))) { } else if (ObjectMarking::IsBlack<access_mode>(from, marking_state(from))) {
bool success = ObjectMarking::WhiteToBlack<access_mode>(to, marking_state(to));
ObjectMarking::WhiteToBlack<access_mode>(to, marking_state(to));
DCHECK(success);
USE(success);
} }
} }
...@@ -182,12 +179,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking { ...@@ -182,12 +179,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
static const intptr_t kActivationThreshold = 0; static const intptr_t kActivationThreshold = 0;
#endif #endif
#ifdef V8_CONCURRENT_MARKING
static const MarkBit::AccessMode kAtomicity = MarkBit::AccessMode::ATOMIC;
#else
static const MarkBit::AccessMode kAtomicity = MarkBit::AccessMode::NON_ATOMIC;
#endif
void FinalizeSweeping(); void FinalizeSweeping();
size_t Step(size_t bytes_to_process, CompletionAction action, size_t Step(size_t bytes_to_process, CompletionAction action,
...@@ -219,9 +210,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking { ...@@ -219,9 +210,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
void RecordCodeTargetPatch(Code* host, Address pc, HeapObject* value); void RecordCodeTargetPatch(Code* host, Address pc, HeapObject* value);
void RecordCodeTargetPatch(Address pc, HeapObject* value); void RecordCodeTargetPatch(Address pc, HeapObject* value);
// Returns true if the function succeeds in transitioning the object void WhiteToGreyAndPush(HeapObject* obj);
// from white to grey.
bool WhiteToGreyAndPush(HeapObject* obj);
inline void SetOldSpacePageFlags(MemoryChunk* chunk) { inline void SetOldSpacePageFlags(MemoryChunk* chunk) {
SetOldSpacePageFlags(chunk, IsMarking(), IsCompacting()); SetOldSpacePageFlags(chunk, IsMarking(), IsCompacting());
...@@ -312,7 +301,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking { ...@@ -312,7 +301,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
intptr_t bytes_to_process, intptr_t bytes_to_process,
ForceCompletionAction completion = DO_NOT_FORCE_COMPLETION)); ForceCompletionAction completion = DO_NOT_FORCE_COMPLETION));
INLINE(bool IsFixedArrayWithProgressBar(HeapObject* object));
INLINE(void VisitObject(Map* map, HeapObject* obj, int size)); INLINE(void VisitObject(Map* map, HeapObject* obj, int size));
void IncrementIdleMarkingDelayCounter(); void IncrementIdleMarkingDelayCounter();
......
...@@ -38,15 +38,19 @@ void MarkCompactCollector::UnshiftBlack(HeapObject* obj) { ...@@ -38,15 +38,19 @@ void MarkCompactCollector::UnshiftBlack(HeapObject* obj) {
} }
void MarkCompactCollector::MarkObject(HeapObject* obj) { void MarkCompactCollector::MarkObject(HeapObject* obj) {
if (ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>( if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
obj, MarkingState::Internal(obj))) { obj, MarkingState::Internal(obj))) {
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::Internal(obj));
PushBlack(obj); PushBlack(obj);
} }
} }
void MinorMarkCompactCollector::MarkObject(HeapObject* obj) { void MinorMarkCompactCollector::MarkObject(HeapObject* obj) {
if (ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>( if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
obj, MarkingState::External(obj))) { obj, MarkingState::External(obj))) {
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::External(obj));
PushBlack(obj); PushBlack(obj);
} }
} }
......
...@@ -1196,10 +1196,12 @@ class StaticYoungGenerationMarkingVisitor ...@@ -1196,10 +1196,12 @@ class StaticYoungGenerationMarkingVisitor
StackLimitCheck check(heap->isolate()); StackLimitCheck check(heap->isolate());
if (check.HasOverflowed()) return false; if (check.HasOverflowed()) return false;
if (ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>( if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(
object, MarkingState::External(object))) { object, MarkingState::External(object)))
IterateBody(object->map(), object); return true;
} ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
object, MarkingState::External(object));
IterateBody(object->map(), object);
return true; return true;
} }
}; };
...@@ -1235,7 +1237,11 @@ class MarkCompactMarkingVisitor ...@@ -1235,7 +1237,11 @@ class MarkCompactMarkingVisitor
// Marks the object black without pushing it on the marking stack. // Marks the object black without pushing it on the marking stack.
// Returns true if object needed marking and false otherwise. // Returns true if object needed marking and false otherwise.
INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) { INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) {
return ObjectMarking::WhiteToBlack(object, MarkingState::Internal(object)); if (ObjectMarking::IsWhite(object, MarkingState::Internal(object))) {
ObjectMarking::WhiteToBlack(object, MarkingState::Internal(object));
return true;
}
return false;
} }
// Mark object pointed to by p. // Mark object pointed to by p.
...@@ -1253,15 +1259,14 @@ class MarkCompactMarkingVisitor ...@@ -1253,15 +1259,14 @@ class MarkCompactMarkingVisitor
HeapObject* obj)) { HeapObject* obj)) {
#ifdef DEBUG #ifdef DEBUG
DCHECK(collector->heap()->Contains(obj)); DCHECK(collector->heap()->Contains(obj));
DCHECK(ObjectMarking::IsWhite(obj, MarkingState::Internal(obj)));
#endif #endif
if (ObjectMarking::WhiteToBlack(obj, MarkingState::Internal(obj))) { Map* map = obj->map();
Map* map = obj->map(); Heap* heap = obj->GetHeap();
Heap* heap = obj->GetHeap(); ObjectMarking::WhiteToBlack(obj, MarkingState::Internal(obj));
ObjectMarking::WhiteToBlack(obj, MarkingState::Internal(obj)); // Mark the map pointer and the body.
// Mark the map pointer and the body. heap->mark_compact_collector()->MarkObject(map);
heap->mark_compact_collector()->MarkObject(map); IterateBody(map, obj);
IterateBody(map, obj);
}
} }
// Visit all unmarked objects pointed to by [start, end). // Visit all unmarked objects pointed to by [start, end).
...@@ -1279,6 +1284,8 @@ class MarkCompactMarkingVisitor ...@@ -1279,6 +1284,8 @@ class MarkCompactMarkingVisitor
if (!o->IsHeapObject()) continue; if (!o->IsHeapObject()) continue;
collector->RecordSlot(object, p, o); collector->RecordSlot(object, p, o);
HeapObject* obj = HeapObject::cast(o); HeapObject* obj = HeapObject::cast(o);
if (ObjectMarking::IsBlackOrGrey(obj, MarkingState::Internal(obj)))
continue;
VisitUnmarkedObject(collector, obj); VisitUnmarkedObject(collector, obj);
} }
return true; return true;
...@@ -1475,12 +1482,16 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor { ...@@ -1475,12 +1482,16 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor {
if (!collector_->heap()->InNewSpace(object)) return; if (!collector_->heap()->InNewSpace(object)) return;
if (ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>( if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(
object, MarkingState::External(object))) { object, MarkingState::External(object)))
Map* map = object->map(); return;
StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
collector_->EmptyMarkingDeque(); Map* map = object->map();
} ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
object, MarkingState::External(object));
StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
collector_->EmptyMarkingDeque();
} }
MinorMarkCompactCollector* collector_; MinorMarkCompactCollector* collector_;
...@@ -1521,16 +1532,22 @@ class MarkCompactCollector::RootMarkingVisitor : public ObjectVisitor, ...@@ -1521,16 +1532,22 @@ class MarkCompactCollector::RootMarkingVisitor : public ObjectVisitor,
HeapObject* object = HeapObject::cast(*p); HeapObject* object = HeapObject::cast(*p);
if (ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>( if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(
object, MarkingState::Internal(object))) { object, MarkingState::Internal(object)))
Map* map = object->map(); return;
// Mark the map pointer and body, and push them on the marking stack.
collector_->MarkObject(map); Map* map = object->map();
MarkCompactMarkingVisitor::IterateBody(map, object); // Mark the object.
// Mark all the objects reachable from the map and body. May leave ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
// overflowed objects in the heap. object, MarkingState::Internal(object));
collector_->EmptyMarkingDeque();
} // Mark the map pointer and body, and push them on the marking stack.
collector_->MarkObject(map);
MarkCompactMarkingVisitor::IterateBody(map, object);
// Mark all the objects reachable from the map and body. May leave
// overflowed objects in the heap.
collector_->EmptyMarkingDeque();
} }
MarkCompactCollector* collector_; MarkCompactCollector* collector_;
...@@ -1701,7 +1718,8 @@ void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) { ...@@ -1701,7 +1718,8 @@ void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) {
Map* filler_map = heap()->one_pointer_filler_map(); Map* filler_map = heap()->one_pointer_filler_map();
for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) { for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) {
if ((object->map() != filler_map) && if ((object->map() != filler_map) &&
ObjectMarking::GreyToBlack(object, MarkingState::Internal(object))) { ObjectMarking::IsGrey(object, MarkingState::Internal(object))) {
ObjectMarking::GreyToBlack(object, MarkingState::Internal(object));
PushBlack(object); PushBlack(object);
if (marking_deque()->IsFull()) return; if (marking_deque()->IsFull()) return;
} }
...@@ -1713,10 +1731,8 @@ void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) { ...@@ -1713,10 +1731,8 @@ void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) {
LiveObjectIterator<kGreyObjects> it(p, MarkingState::Internal(p)); LiveObjectIterator<kGreyObjects> it(p, MarkingState::Internal(p));
HeapObject* object = NULL; HeapObject* object = NULL;
while ((object = it.Next()) != NULL) { while ((object = it.Next()) != NULL) {
bool success = DCHECK(ObjectMarking::IsGrey(object, MarkingState::Internal(object)));
ObjectMarking::GreyToBlack(object, MarkingState::Internal(object)); ObjectMarking::GreyToBlack(object, MarkingState::Internal(object));
DCHECK(success);
USE(success);
PushBlack(object); PushBlack(object);
if (marking_deque()->IsFull()) return; if (marking_deque()->IsFull()) return;
} }
...@@ -2279,12 +2295,15 @@ bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { ...@@ -2279,12 +2295,15 @@ bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) { void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) {
StringTable* string_table = heap()->string_table(); StringTable* string_table = heap()->string_table();
// Mark the string table itself. // Mark the string table itself.
if (ObjectMarking::WhiteToBlack(string_table, if (ObjectMarking::IsWhite(string_table,
MarkingState::Internal(string_table))) { MarkingState::Internal(string_table))) {
// Explicitly mark the prefix. // String table could have already been marked by visiting the handles list.
string_table->IteratePrefix(visitor); ObjectMarking::WhiteToBlack(string_table,
ProcessMarkingDeque(); MarkingState::Internal(string_table));
} }
// Explicitly mark the prefix.
string_table->IteratePrefix(visitor);
ProcessMarkingDeque();
} }
void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
......
...@@ -82,6 +82,8 @@ class ObjectMarking : public AllStatic { ...@@ -82,6 +82,8 @@ class ObjectMarking : public AllStatic {
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool BlackToGrey(HeapObject* obj, V8_INLINE static bool BlackToGrey(HeapObject* obj,
const MarkingState& state) { const MarkingState& state) {
DCHECK(
(access_mode == MarkBit::ATOMIC || IsBlack<access_mode>(obj, state)));
MarkBit markbit = MarkBitFrom(obj, state); MarkBit markbit = MarkBitFrom(obj, state);
if (!Marking::BlackToGrey<access_mode>(markbit)) return false; if (!Marking::BlackToGrey<access_mode>(markbit)) return false;
state.IncrementLiveBytes<access_mode>(-obj->Size()); state.IncrementLiveBytes<access_mode>(-obj->Size());
...@@ -91,19 +93,24 @@ class ObjectMarking : public AllStatic { ...@@ -91,19 +93,24 @@ class ObjectMarking : public AllStatic {
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool WhiteToGrey(HeapObject* obj, V8_INLINE static bool WhiteToGrey(HeapObject* obj,
const MarkingState& state) { const MarkingState& state) {
DCHECK(
(access_mode == MarkBit::ATOMIC || IsWhite<access_mode>(obj, state)));
return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj, state)); return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj, state));
} }
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool WhiteToBlack(HeapObject* obj, V8_INLINE static bool WhiteToBlack(HeapObject* obj,
const MarkingState& state) { const MarkingState& state) {
return ObjectMarking::WhiteToGrey<access_mode>(obj, state) && DCHECK(
ObjectMarking::GreyToBlack<access_mode>(obj, state); (access_mode == MarkBit::ATOMIC || IsWhite<access_mode>(obj, state)));
if (!ObjectMarking::WhiteToGrey<access_mode>(obj, state)) return false;
return ObjectMarking::GreyToBlack<access_mode>(obj, state);
} }
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool GreyToBlack(HeapObject* obj, V8_INLINE static bool GreyToBlack(HeapObject* obj,
const MarkingState& state) { const MarkingState& state) {
DCHECK((access_mode == MarkBit::ATOMIC || IsGrey<access_mode>(obj, state)));
MarkBit markbit = MarkBitFrom(obj, state); MarkBit markbit = MarkBitFrom(obj, state);
if (!Marking::GreyToBlack<access_mode>(markbit)) return false; if (!Marking::GreyToBlack<access_mode>(markbit)) return false;
state.IncrementLiveBytes<access_mode>(obj->Size()); state.IncrementLiveBytes<access_mode>(obj->Size());
......
...@@ -38,16 +38,12 @@ class MarkBit { ...@@ -38,16 +38,12 @@ class MarkBit {
} }
} }
// The function returns true if it succeeded to
// transition the bit from 0 to 1.
template <AccessMode mode = NON_ATOMIC> template <AccessMode mode = NON_ATOMIC>
inline bool Set(); inline bool Set();
template <AccessMode mode = NON_ATOMIC> template <AccessMode mode = NON_ATOMIC>
inline bool Get(); inline bool Get();
// The function returns true if it succeeded to
// transition the bit from 1 to 0.
template <AccessMode mode = NON_ATOMIC> template <AccessMode mode = NON_ATOMIC>
inline bool Clear(); inline bool Clear();
...@@ -61,9 +57,8 @@ class MarkBit { ...@@ -61,9 +57,8 @@ class MarkBit {
template <> template <>
inline bool MarkBit::Set<MarkBit::NON_ATOMIC>() { inline bool MarkBit::Set<MarkBit::NON_ATOMIC>() {
base::Atomic32 old_value = *cell_; *cell_ |= mask_;
*cell_ = old_value | mask_; return true;
return (old_value & mask_) == 0;
} }
template <> template <>
...@@ -91,9 +86,8 @@ inline bool MarkBit::Get<MarkBit::ATOMIC>() { ...@@ -91,9 +86,8 @@ inline bool MarkBit::Get<MarkBit::ATOMIC>() {
template <> template <>
inline bool MarkBit::Clear<MarkBit::NON_ATOMIC>() { inline bool MarkBit::Clear<MarkBit::NON_ATOMIC>() {
base::Atomic32 old_value = *cell_; *cell_ &= ~mask_;
*cell_ = old_value & ~mask_; return true;
return (old_value & mask_) == mask_;
} }
template <> template <>
...@@ -418,17 +412,24 @@ class Marking : public AllStatic { ...@@ -418,17 +412,24 @@ class Marking : public AllStatic {
template <MarkBit::AccessMode mode = MarkBit::NON_ATOMIC> template <MarkBit::AccessMode mode = MarkBit::NON_ATOMIC>
INLINE(static bool WhiteToGrey(MarkBit markbit)) { INLINE(static bool WhiteToGrey(MarkBit markbit)) {
DCHECK(mode == MarkBit::ATOMIC || IsWhite(markbit));
return markbit.Set<mode>(); return markbit.Set<mode>();
} }
// Warning: this method is not safe in general in concurrent scenarios.
// If you know that nobody else will change the bits on the given location
// then you may use it.
template <MarkBit::AccessMode mode = MarkBit::NON_ATOMIC> template <MarkBit::AccessMode mode = MarkBit::NON_ATOMIC>
INLINE(static bool WhiteToBlack(MarkBit markbit)) { INLINE(static void WhiteToBlack(MarkBit markbit)) {
return markbit.Set<mode>() && markbit.Next().Set<mode>(); DCHECK(mode == MarkBit::ATOMIC || IsWhite(markbit));
markbit.Set<mode>();
markbit.Next().Set<mode>();
} }
template <MarkBit::AccessMode mode = MarkBit::NON_ATOMIC> template <MarkBit::AccessMode mode = MarkBit::NON_ATOMIC>
INLINE(static bool GreyToBlack(MarkBit markbit)) { INLINE(static bool GreyToBlack(MarkBit markbit)) {
return markbit.Get<mode>() && markbit.Next().Set<mode>(); DCHECK(mode == MarkBit::ATOMIC || IsGrey(markbit));
return markbit.Next().Set<mode>();
} }
enum ObjectColor { enum ObjectColor {
......
...@@ -637,7 +637,8 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode( ...@@ -637,7 +637,8 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
} }
template <typename ResultType, typename ConcreteVisitor> template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(HeapObject* object) { ResultType HeapVisitor<ResultType, ConcreteVisitor>::IterateBody(
HeapObject* object) {
Map* map = object->map(); Map* map = object->map();
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this); ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
switch (static_cast<VisitorId>(map->visitor_id())) { switch (static_cast<VisitorId>(map->visitor_id())) {
...@@ -668,28 +669,14 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(HeapObject* object) { ...@@ -668,28 +669,14 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(HeapObject* object) {
return ResultType(); return ResultType();
} }
template <typename ResultType, typename ConcreteVisitor> #define VISIT(type) \
void HeapVisitor<ResultType, ConcreteVisitor>::VisitMapPointer( template <typename ResultType, typename ConcreteVisitor> \
HeapObject* host, HeapObject** map) { ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit##type( \
static_cast<ConcreteVisitor*>(this)->VisitPointer( Map* map, type* object) { \
host, reinterpret_cast<Object**>(map)); int size = type::BodyDescriptor::SizeOf(map, object); \
} type::BodyDescriptor::IterateBody(object, size, \
static_cast<ConcreteVisitor*>(this)); \
template <typename ResultType, typename ConcreteVisitor> return static_cast<ResultType>(size); \
bool HeapVisitor<ResultType, ConcreteVisitor>::ShouldVisit(HeapObject* object) {
return true;
}
#define VISIT(type) \
template <typename ResultType, typename ConcreteVisitor> \
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit##type( \
Map* map, type* object) { \
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this); \
if (!visitor->ShouldVisit(object)) return ResultType(); \
int size = type::BodyDescriptor::SizeOf(map, object); \
visitor->VisitMapPointer(object, object->map_slot()); \
type::BodyDescriptor::IterateBody(object, size, visitor); \
return static_cast<ResultType>(size); \
} }
TYPED_VISITOR_ID_LIST(VISIT) TYPED_VISITOR_ID_LIST(VISIT)
#undef VISIT #undef VISIT
...@@ -697,10 +684,7 @@ TYPED_VISITOR_ID_LIST(VISIT) ...@@ -697,10 +684,7 @@ TYPED_VISITOR_ID_LIST(VISIT)
template <typename ResultType, typename ConcreteVisitor> template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitShortcutCandidate( ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitShortcutCandidate(
Map* map, ConsString* object) { Map* map, ConsString* object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
if (!visitor->ShouldVisit(object)) return ResultType();
int size = ConsString::BodyDescriptor::SizeOf(map, object); int size = ConsString::BodyDescriptor::SizeOf(map, object);
visitor->VisitMapPointer(object, object->map_slot());
ConsString::BodyDescriptor::IterateBody(object, size, ConsString::BodyDescriptor::IterateBody(object, size,
static_cast<ConcreteVisitor*>(this)); static_cast<ConcreteVisitor*>(this));
return static_cast<ResultType>(size); return static_cast<ResultType>(size);
...@@ -709,10 +693,7 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitShortcutCandidate( ...@@ -709,10 +693,7 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitShortcutCandidate(
template <typename ResultType, typename ConcreteVisitor> template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitNativeContext( ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitNativeContext(
Map* map, Context* object) { Map* map, Context* object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
if (!visitor->ShouldVisit(object)) return ResultType();
int size = Context::BodyDescriptor::SizeOf(map, object); int size = Context::BodyDescriptor::SizeOf(map, object);
visitor->VisitMapPointer(object, object->map_slot());
Context::BodyDescriptor::IterateBody(object, size, Context::BodyDescriptor::IterateBody(object, size,
static_cast<ConcreteVisitor*>(this)); static_cast<ConcreteVisitor*>(this));
return static_cast<ResultType>(size); return static_cast<ResultType>(size);
...@@ -721,20 +702,14 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitNativeContext( ...@@ -721,20 +702,14 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitNativeContext(
template <typename ResultType, typename ConcreteVisitor> template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitDataObject( ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitDataObject(
Map* map, HeapObject* object) { Map* map, HeapObject* object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
if (!visitor->ShouldVisit(object)) return ResultType();
int size = map->instance_size(); int size = map->instance_size();
visitor->VisitMapPointer(object, object->map_slot());
return static_cast<ResultType>(size); return static_cast<ResultType>(size);
} }
template <typename ResultType, typename ConcreteVisitor> template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSObjectFast( ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSObjectFast(
Map* map, JSObject* object) { Map* map, JSObject* object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
if (!visitor->ShouldVisit(object)) return ResultType();
int size = JSObject::FastBodyDescriptor::SizeOf(map, object); int size = JSObject::FastBodyDescriptor::SizeOf(map, object);
visitor->VisitMapPointer(object, object->map_slot());
JSObject::FastBodyDescriptor::IterateBody( JSObject::FastBodyDescriptor::IterateBody(
object, size, static_cast<ConcreteVisitor*>(this)); object, size, static_cast<ConcreteVisitor*>(this));
return static_cast<ResultType>(size); return static_cast<ResultType>(size);
...@@ -742,10 +717,7 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSObjectFast( ...@@ -742,10 +717,7 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSObjectFast(
template <typename ResultType, typename ConcreteVisitor> template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSApiObject( ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSApiObject(
Map* map, JSObject* object) { Map* map, JSObject* object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
if (!visitor->ShouldVisit(object)) return ResultType();
int size = JSObject::BodyDescriptor::SizeOf(map, object); int size = JSObject::BodyDescriptor::SizeOf(map, object);
visitor->VisitMapPointer(object, object->map_slot());
JSObject::BodyDescriptor::IterateBody(object, size, JSObject::BodyDescriptor::IterateBody(object, size,
static_cast<ConcreteVisitor*>(this)); static_cast<ConcreteVisitor*>(this));
return static_cast<ResultType>(size); return static_cast<ResultType>(size);
...@@ -753,10 +725,7 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSApiObject( ...@@ -753,10 +725,7 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSApiObject(
template <typename ResultType, typename ConcreteVisitor> template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitStruct( ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitStruct(
Map* map, HeapObject* object) { Map* map, HeapObject* object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
if (!visitor->ShouldVisit(object)) return ResultType();
int size = map->instance_size(); int size = map->instance_size();
visitor->VisitMapPointer(object, object->map_slot());
StructBodyDescriptor::IterateBody(object, size, StructBodyDescriptor::IterateBody(object, size,
static_cast<ConcreteVisitor*>(this)); static_cast<ConcreteVisitor*>(this));
return static_cast<ResultType>(size); return static_cast<ResultType>(size);
...@@ -764,9 +733,6 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitStruct( ...@@ -764,9 +733,6 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitStruct(
template <typename ResultType, typename ConcreteVisitor> template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitFreeSpace( ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitFreeSpace(
Map* map, FreeSpace* object) { Map* map, FreeSpace* object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
if (!visitor->ShouldVisit(object)) return ResultType();
visitor->VisitMapPointer(object, object->map_slot());
return static_cast<ResultType>(FreeSpace::cast(object)->size()); return static_cast<ResultType>(FreeSpace::cast(object)->size());
} }
......
...@@ -395,16 +395,9 @@ VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback> ...@@ -395,16 +395,9 @@ VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
template <typename ResultType, typename ConcreteVisitor> template <typename ResultType, typename ConcreteVisitor>
class HeapVisitor : public ObjectVisitor { class HeapVisitor : public ObjectVisitor {
public: public:
ResultType Visit(HeapObject* object); ResultType IterateBody(HeapObject* object);
protected: protected:
// A guard predicate for visiting the object.
// If it returns false then the default implementations of the Visit*
// functions bailout from iterating the object pointers.
virtual bool ShouldVisit(HeapObject* object);
// A callback for visiting the map pointer in the object header.
virtual void VisitMapPointer(HeapObject* host, HeapObject** map);
#define VISIT(type) virtual ResultType Visit##type(Map* map, type* object); #define VISIT(type) virtual ResultType Visit##type(Map* map, type* object);
TYPED_VISITOR_ID_LIST(VISIT) TYPED_VISITOR_ID_LIST(VISIT)
#undef VISIT #undef VISIT
......
...@@ -1525,9 +1525,6 @@ void HeapObject::set_map_no_write_barrier(Map* value) { ...@@ -1525,9 +1525,6 @@ void HeapObject::set_map_no_write_barrier(Map* value) {
set_map_word(MapWord::FromMap(value)); set_map_word(MapWord::FromMap(value));
} }
HeapObject** HeapObject::map_slot() {
return reinterpret_cast<HeapObject**>(FIELD_ADDR(this, kMapOffset));
}
MapWord HeapObject::map_word() const { MapWord HeapObject::map_word() const {
return MapWord( return MapWord(
......
...@@ -1669,9 +1669,6 @@ class HeapObject: public Object { ...@@ -1669,9 +1669,6 @@ class HeapObject: public Object {
// information. // information.
inline Map* map() const; inline Map* map() const;
inline void set_map(Map* value); inline void set_map(Map* value);
inline HeapObject** map_slot();
// The no-write-barrier version. This is OK if the object is white and in // The no-write-barrier version. This is OK if the object is white and in
// new space, or if the value is an immortal immutable object, like the maps // new space, or if the value is an immortal immutable object, like the maps
// of primitive (non-JS) objects like strings, heap numbers etc. // of primitive (non-JS) objects like strings, heap numbers etc.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment