Commit b1d96fa1 authored by ulan's avatar ulan Committed by Commit bot

[heap] Reland Use atomic marking operations in incremental marking if

concurrent marking is enabled.

This patch adds kAtomicity flag to IncrementalMarking that is set
depending on the concurrent marking compile time flag.

BUG=chromium:694255
CQ_INCLUDE_TRYBOTS=master.tryserver.chromium.linux:linux_chromium_rel_ng

Review-Url: https://codereview.chromium.org/2858343003
Cr-Commit-Position: refs/heads/master@{#45138}
parent b9cebf8c
...@@ -52,10 +52,11 @@ IncrementalMarking::IncrementalMarking(Heap* heap) ...@@ -52,10 +52,11 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
HeapObject* value_heap_obj = HeapObject::cast(value); HeapObject* value_heap_obj = HeapObject::cast(value);
DCHECK(!ObjectMarking::IsImpossible(value_heap_obj, DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(
marking_state(value_heap_obj))); value_heap_obj, marking_state(value_heap_obj)));
DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(obj, marking_state(obj)));
const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj)); const bool is_black =
ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj));
if (is_black && WhiteToGreyAndPush(value_heap_obj)) { if (is_black && WhiteToGreyAndPush(value_heap_obj)) {
RestartIfNotMarking(); RestartIfNotMarking();
...@@ -129,7 +130,7 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, ...@@ -129,7 +130,7 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
} }
bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
if (ObjectMarking::WhiteToGrey(obj, marking_state(obj))) { if (ObjectMarking::WhiteToGrey<kAtomicity>(obj, marking_state(obj))) {
marking_deque()->Push(obj); marking_deque()->Push(obj);
return true; return true;
} }
...@@ -153,18 +154,18 @@ void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, ...@@ -153,18 +154,18 @@ void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to)); MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to));
MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from)); MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from));
if (Marking::IsBlack(old_mark_bit)) { if (Marking::IsBlack<kAtomicity>(old_mark_bit)) {
if (from->address() + kPointerSize == to->address()) { if (from->address() + kPointerSize == to->address()) {
// The old and the new markbits overlap. The |to| object has the // The old and the new markbits overlap. The |to| object has the
// grey color. To make it black, we need to set the second bit. // grey color. To make it black, we need to set the second bit.
DCHECK(new_mark_bit.Get()); DCHECK(new_mark_bit.Get<kAtomicity>());
new_mark_bit.Next().Set(); new_mark_bit.Next().Set<kAtomicity>();
} else { } else {
bool success = Marking::WhiteToBlack(new_mark_bit); bool success = Marking::WhiteToBlack<kAtomicity>(new_mark_bit);
DCHECK(success); DCHECK(success);
USE(success); USE(success);
} }
} else if (Marking::IsGrey(old_mark_bit)) { } else if (Marking::IsGrey<kAtomicity>(old_mark_bit)) {
if (from->address() + kPointerSize == to->address()) { if (from->address() + kPointerSize == to->address()) {
// The old and the new markbits overlap. The |to| object has the // The old and the new markbits overlap. The |to| object has the
// white color. To make it grey, we need to set the first bit. // white color. To make it grey, we need to set the first bit.
...@@ -173,7 +174,7 @@ void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, ...@@ -173,7 +174,7 @@ void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
new_mark_bit.Set(); new_mark_bit.Set();
DCHECK(!new_mark_bit.Next().Get()); DCHECK(!new_mark_bit.Next().Get());
} else { } else {
bool success = Marking::WhiteToGrey(new_mark_bit); bool success = Marking::WhiteToGrey<kAtomicity>(new_mark_bit);
DCHECK(success); DCHECK(success);
USE(success); USE(success);
} }
...@@ -219,11 +220,11 @@ class IncrementalMarkingMarkingVisitor ...@@ -219,11 +220,11 @@ class IncrementalMarkingMarkingVisitor
} while (scan_until_end && start_offset < object_size); } while (scan_until_end && start_offset < object_size);
chunk->set_progress_bar(start_offset); chunk->set_progress_bar(start_offset);
if (start_offset < object_size) { if (start_offset < object_size) {
if (ObjectMarking::IsGrey( if (ObjectMarking::IsGrey<IncrementalMarking::kAtomicity>(
object, heap->incremental_marking()->marking_state(object))) { object, heap->incremental_marking()->marking_state(object))) {
heap->incremental_marking()->marking_deque()->Unshift(object); heap->incremental_marking()->marking_deque()->Unshift(object);
} else { } else {
DCHECK(ObjectMarking::IsBlack( DCHECK(ObjectMarking::IsBlack<IncrementalMarking::kAtomicity>(
object, heap->incremental_marking()->marking_state(object))); object, heap->incremental_marking()->marking_state(object)));
heap->mark_compact_collector()->UnshiftBlack(object); heap->mark_compact_collector()->UnshiftBlack(object);
} }
...@@ -248,8 +249,9 @@ class IncrementalMarkingMarkingVisitor ...@@ -248,8 +249,9 @@ class IncrementalMarkingMarkingVisitor
// Mark the object grey if it is white, do not enque it into the marking // Mark the object grey if it is white, do not enque it into the marking
// deque. // deque.
Heap* heap = map->GetHeap(); Heap* heap = map->GetHeap();
bool ignored = ObjectMarking::WhiteToGrey( bool ignored =
heap_obj, heap->incremental_marking()->marking_state(heap_obj)); ObjectMarking::WhiteToGrey<IncrementalMarking::kAtomicity>(
heap_obj, heap->incremental_marking()->marking_state(heap_obj));
USE(ignored); USE(ignored);
} }
} }
...@@ -284,13 +286,14 @@ class IncrementalMarkingMarkingVisitor ...@@ -284,13 +286,14 @@ class IncrementalMarkingMarkingVisitor
// Returns true if object needed marking and false otherwise. // Returns true if object needed marking and false otherwise.
INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
HeapObject* heap_object = HeapObject::cast(obj); HeapObject* heap_object = HeapObject::cast(obj);
return ObjectMarking::WhiteToBlack( return ObjectMarking::WhiteToBlack<IncrementalMarking::kAtomicity>(
heap_object, heap->incremental_marking()->marking_state(heap_object)); heap_object, heap->incremental_marking()->marking_state(heap_object));
} }
}; };
void IncrementalMarking::IterateBlackObject(HeapObject* object) { void IncrementalMarking::IterateBlackObject(HeapObject* object) {
if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) { if (IsMarking() &&
ObjectMarking::IsBlack<kAtomicity>(object, marking_state(object))) {
Page* page = Page::FromAddress(object->address()); Page* page = Page::FromAddress(object->address());
if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) {
// IterateBlackObject requires us to visit the whole object. // IterateBlackObject requires us to visit the whole object.
...@@ -647,7 +650,7 @@ void IncrementalMarking::ProcessWeakCells() { ...@@ -647,7 +650,7 @@ void IncrementalMarking::ProcessWeakCells() {
HeapObject* value = HeapObject::cast(weak_cell->value()); HeapObject* value = HeapObject::cast(weak_cell->value());
// Remove weak cells with live objects from the list, they do not need // Remove weak cells with live objects from the list, they do not need
// clearing. // clearing.
if (ObjectMarking::IsBlackOrGrey(value, marking_state(value))) { if (ObjectMarking::IsBlackOrGrey<kAtomicity>(value, marking_state(value))) {
// Record slot, if value is pointing to an evacuation candidate. // Record slot, if value is pointing to an evacuation candidate.
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot); heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot);
...@@ -678,9 +681,10 @@ bool ShouldRetainMap(Map* map, int age) { ...@@ -678,9 +681,10 @@ bool ShouldRetainMap(Map* map, int age) {
Object* constructor = map->GetConstructor(); Object* constructor = map->GetConstructor();
Heap* heap = map->GetHeap(); Heap* heap = map->GetHeap();
if (!constructor->IsHeapObject() || if (!constructor->IsHeapObject() ||
ObjectMarking::IsWhite(HeapObject::cast(constructor), ObjectMarking::IsWhite<IncrementalMarking::kAtomicity>(
heap->incremental_marking()->marking_state( HeapObject::cast(constructor),
HeapObject::cast(constructor)))) { heap->incremental_marking()->marking_state(
HeapObject::cast(constructor)))) {
// The constructor is dead, no new objects with this map can // The constructor is dead, no new objects with this map can
// be created. Do not retain this map. // be created. Do not retain this map.
return false; return false;
...@@ -710,14 +714,15 @@ void IncrementalMarking::RetainMaps() { ...@@ -710,14 +714,15 @@ void IncrementalMarking::RetainMaps() {
int new_age; int new_age;
Map* map = Map::cast(cell->value()); Map* map = Map::cast(cell->value());
if (i >= number_of_disposed_maps && !map_retaining_is_disabled && if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
ObjectMarking::IsWhite(map, marking_state(map))) { ObjectMarking::IsWhite<kAtomicity>(map, marking_state(map))) {
if (ShouldRetainMap(map, age)) { if (ShouldRetainMap(map, age)) {
WhiteToGreyAndPush(map); WhiteToGreyAndPush(map);
} }
Object* prototype = map->prototype(); Object* prototype = map->prototype();
if (age > 0 && prototype->IsHeapObject() && if (age > 0 && prototype->IsHeapObject() &&
ObjectMarking::IsWhite(HeapObject::cast(prototype), ObjectMarking::IsWhite<kAtomicity>(
marking_state(HeapObject::cast(prototype)))) { HeapObject::cast(prototype),
marking_state(HeapObject::cast(prototype)))) {
// The prototype is not marked, age the map. // The prototype is not marked, age the map.
new_age = age - 1; new_age = age - 1;
} else { } else {
...@@ -808,21 +813,21 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() { ...@@ -808,21 +813,21 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
return nullptr; return nullptr;
} }
HeapObject* dest = map_word.ToForwardingAddress(); HeapObject* dest = map_word.ToForwardingAddress();
if (ObjectMarking::IsBlack(dest, marking_state(dest))) { if (ObjectMarking::IsBlack<kAtomicity>(dest, marking_state(dest))) {
// The object is already processed by the marker. // The object is already processed by the marker.
return nullptr; return nullptr;
} }
DCHECK( DCHECK(ObjectMarking::IsGrey<kAtomicity>(obj, marking_state(obj)) ||
ObjectMarking::IsGrey(obj, marking_state(obj)) || (obj->IsFiller() &&
(obj->IsFiller() && ObjectMarking::IsWhite(obj, marking_state(obj)))); ObjectMarking::IsWhite<kAtomicity>(obj, marking_state(obj))));
return dest; return dest;
} else { } else {
DCHECK(ObjectMarking::IsGrey(obj, marking_state(obj)) || DCHECK(ObjectMarking::IsGrey<kAtomicity>(obj, marking_state(obj)) ||
(obj->IsFiller() && (obj->IsFiller() &&
ObjectMarking::IsWhite(obj, marking_state(obj))) || ObjectMarking::IsWhite<kAtomicity>(obj, marking_state(obj))) ||
(MemoryChunk::FromAddress(obj->address()) (MemoryChunk::FromAddress(obj->address())
->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
ObjectMarking::IsBlack(obj, marking_state(obj)))); ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj))));
// Skip one word filler objects that appear on the // Skip one word filler objects that appear on the
// stack when we perform in place array shift. // stack when we perform in place array shift.
return (obj->map() == filler_map) ? nullptr : obj; return (obj->map() == filler_map) ? nullptr : obj;
...@@ -839,11 +844,11 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { ...@@ -839,11 +844,11 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
#if ENABLE_SLOW_DCHECKS #if ENABLE_SLOW_DCHECKS
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj)); MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj));
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
SLOW_DCHECK(Marking::IsGrey(mark_bit) || SLOW_DCHECK(Marking::IsGrey<kAtomicity>(mark_bit) ||
(chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
Marking::IsBlack(mark_bit))); Marking::IsBlack<kAtomicity>(mark_bit)));
#endif #endif
ObjectMarking::GreyToBlack(obj, marking_state(obj)); ObjectMarking::GreyToBlack<kAtomicity>(obj, marking_state(obj));
} }
intptr_t IncrementalMarking::ProcessMarkingDeque( intptr_t IncrementalMarking::ProcessMarkingDeque(
...@@ -856,7 +861,7 @@ intptr_t IncrementalMarking::ProcessMarkingDeque( ...@@ -856,7 +861,7 @@ intptr_t IncrementalMarking::ProcessMarkingDeque(
// Left trimming may result in white, grey, or black filler objects on the // Left trimming may result in white, grey, or black filler objects on the
// marking deque. Ignore these objects. // marking deque. Ignore these objects.
if (obj->IsFiller()) { if (obj->IsFiller()) {
DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj))); DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(obj, marking_state(obj)));
continue; continue;
} }
...@@ -912,7 +917,8 @@ void IncrementalMarking::Hurry() { ...@@ -912,7 +917,8 @@ void IncrementalMarking::Hurry() {
Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
if (!cache->IsUndefined(heap_->isolate())) { if (!cache->IsUndefined(heap_->isolate())) {
// Mark the cache black if it is grey. // Mark the cache black if it is grey.
bool ignored = ObjectMarking::GreyToBlack(cache, marking_state(cache)); bool ignored =
ObjectMarking::GreyToBlack<kAtomicity>(cache, marking_state(cache));
USE(ignored); USE(ignored);
} }
context = Context::cast(context)->next_context_link(); context = Context::cast(context)->next_context_link();
......
...@@ -182,6 +182,12 @@ class V8_EXPORT_PRIVATE IncrementalMarking { ...@@ -182,6 +182,12 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
static const intptr_t kActivationThreshold = 0; static const intptr_t kActivationThreshold = 0;
#endif #endif
#if V8_CONCURRENT_MARKING
static const MarkBit::AccessMode kAtomicity = MarkBit::AccessMode::ATOMIC;
#else
static const MarkBit::AccessMode kAtomicity = MarkBit::AccessMode::NON_ATOMIC;
#endif
void FinalizeSweeping(); void FinalizeSweeping();
size_t Step(size_t bytes_to_process, CompletionAction action, size_t Step(size_t bytes_to_process, CompletionAction action,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment