Commit 79ac83e1 authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Enforce explicit MarkingState

Require the use of MarkingState when going through ObjectMarking
and friends.

BUG=chromium:651354

Review-Url: https://codereview.chromium.org/2770253002
Cr-Commit-Position: refs/heads/master@{#44123}
parent c7ec5bf4
...@@ -19,7 +19,10 @@ void LocalArrayBufferTracker::Free() { ...@@ -19,7 +19,10 @@ void LocalArrayBufferTracker::Free() {
for (TrackingData::iterator it = array_buffers_.begin(); for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) { it != array_buffers_.end();) {
JSArrayBuffer* buffer = reinterpret_cast<JSArrayBuffer*>(it->first); JSArrayBuffer* buffer = reinterpret_cast<JSArrayBuffer*>(it->first);
if ((free_mode == kFreeAll) || ObjectMarking::IsWhite(buffer)) { // TODO(mlippautz): Create a dependency on the collector to avoid getting
// the marking state out of thin air.
if ((free_mode == kFreeAll) ||
ObjectMarking::IsWhite(buffer, MarkingState::Internal(buffer))) {
const size_t len = it->second; const size_t len = it->second;
heap_->isolate()->array_buffer_allocator()->Free(buffer->backing_store(), heap_->isolate()->array_buffer_allocator()->Free(buffer->backing_store(),
len); len);
......
...@@ -3164,9 +3164,9 @@ void Heap::AdjustLiveBytes(HeapObject* object, int by) { ...@@ -3164,9 +3164,9 @@ void Heap::AdjustLiveBytes(HeapObject* object, int by) {
lo_space()->AdjustLiveBytes(by); lo_space()->AdjustLiveBytes(by);
} else if (!in_heap_iterator() && } else if (!in_heap_iterator() &&
!mark_compact_collector()->sweeping_in_progress() && !mark_compact_collector()->sweeping_in_progress() &&
ObjectMarking::IsBlack(object)) { ObjectMarking::IsBlack(object, MarkingState::Internal(object))) {
DCHECK(MemoryChunk::FromAddress(object->address())->SweepingDone()); DCHECK(MemoryChunk::FromAddress(object->address())->SweepingDone());
MemoryChunk::IncrementLiveBytes(object, by); MarkingState::Internal(object).IncrementLiveBytes(by);
} }
} }
...@@ -3201,8 +3201,9 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, ...@@ -3201,8 +3201,9 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object,
// Transfer the mark bits to their new location if the object is not within // Transfer the mark bits to their new location if the object is not within
// a black area. // a black area.
if (!incremental_marking()->black_allocation() || if (!incremental_marking()->black_allocation() ||
!Marking::IsBlack( !Marking::IsBlack(ObjectMarking::MarkBitFrom(
ObjectMarking::MarkBitFrom(HeapObject::FromAddress(new_start)))) { HeapObject::FromAddress(new_start),
MarkingState::Internal(HeapObject::FromAddress(new_start))))) {
IncrementalMarking::TransferMark(this, object, IncrementalMarking::TransferMark(this, object,
HeapObject::FromAddress(new_start)); HeapObject::FromAddress(new_start));
} }
...@@ -3285,9 +3286,9 @@ void Heap::RightTrimFixedArray(FixedArrayBase* object, int elements_to_trim) { ...@@ -3285,9 +3286,9 @@ void Heap::RightTrimFixedArray(FixedArrayBase* object, int elements_to_trim) {
// Clear the mark bits of the black area that belongs now to the filler. // Clear the mark bits of the black area that belongs now to the filler.
// This is an optimization. The sweeper will release black fillers anyway. // This is an optimization. The sweeper will release black fillers anyway.
if (incremental_marking()->black_allocation() && if (incremental_marking()->black_allocation() &&
ObjectMarking::IsBlackOrGrey(filler)) { ObjectMarking::IsBlackOrGrey(filler, MarkingState::Internal(filler))) {
Page* page = Page::FromAddress(new_end); Page* page = Page::FromAddress(new_end);
page->markbits()->ClearRange( MarkingState::Internal(page).bitmap()->ClearRange(
page->AddressToMarkbitIndex(new_end), page->AddressToMarkbitIndex(new_end),
page->AddressToMarkbitIndex(new_end + bytes_to_trim)); page->AddressToMarkbitIndex(new_end + bytes_to_trim));
} }
...@@ -4274,8 +4275,9 @@ void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) { ...@@ -4274,8 +4275,9 @@ void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) {
HeapObject* obj = HeapObject::FromAddress(addr); HeapObject* obj = HeapObject::FromAddress(addr);
// There might be grey objects due to black to grey transitions in // There might be grey objects due to black to grey transitions in
// incremental marking. E.g. see VisitNativeContextIncremental. // incremental marking. E.g. see VisitNativeContextIncremental.
DCHECK(ObjectMarking::IsBlackOrGrey(obj)); DCHECK(
if (ObjectMarking::IsBlack(obj)) { ObjectMarking::IsBlackOrGrey(obj, MarkingState::Internal(obj)));
if (ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))) {
incremental_marking()->IterateBlackObject(obj); incremental_marking()->IterateBlackObject(obj);
} }
addr += obj->Size(); addr += obj->Size();
...@@ -4873,7 +4875,8 @@ void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size, ...@@ -4873,7 +4875,8 @@ void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size,
// it would be a violation of the invariant to record it's slots. // it would be a violation of the invariant to record it's slots.
bool record_slots = false; bool record_slots = false;
if (incremental_marking()->IsCompacting()) { if (incremental_marking()->IsCompacting()) {
record_slots = ObjectMarking::IsBlack(target); record_slots =
ObjectMarking::IsBlack(target, MarkingState::Internal(target));
} }
IterateAndScavengePromotedObjectsVisitor visitor(this, target, record_slots); IterateAndScavengePromotedObjectsVisitor visitor(this, target, record_slots);
...@@ -6107,7 +6110,7 @@ class UnreachableObjectsFilter : public HeapObjectsFilter { ...@@ -6107,7 +6110,7 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
bool SkipObject(HeapObject* object) { bool SkipObject(HeapObject* object) {
if (object->IsFiller()) return true; if (object->IsFiller()) return true;
return ObjectMarking::IsWhite(object); return ObjectMarking::IsWhite(object, MarkingState::Internal(object));
} }
private: private:
...@@ -6121,7 +6124,8 @@ class UnreachableObjectsFilter : public HeapObjectsFilter { ...@@ -6121,7 +6124,8 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
HeapObject* obj = HeapObject::cast(*p); HeapObject* obj = HeapObject::cast(*p);
// Use Marking instead of ObjectMarking to avoid adjusting live bytes // Use Marking instead of ObjectMarking to avoid adjusting live bytes
// counter. // counter.
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); MarkBit mark_bit =
ObjectMarking::MarkBitFrom(obj, MarkingState::Internal(obj));
if (Marking::IsWhite(mark_bit)) { if (Marking::IsWhite(mark_bit)) {
Marking::WhiteToBlack(mark_bit); Marking::WhiteToBlack(mark_bit);
marking_stack_.Add(obj); marking_stack_.Add(obj);
......
...@@ -40,11 +40,14 @@ IncrementalMarking::IncrementalMarking(Heap* heap) ...@@ -40,11 +40,14 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) { bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
HeapObject* value_heap_obj = HeapObject::cast(value); HeapObject* value_heap_obj = HeapObject::cast(value);
DCHECK(!ObjectMarking::IsImpossible(value_heap_obj)); DCHECK(!ObjectMarking::IsImpossible(value_heap_obj,
DCHECK(!ObjectMarking::IsImpossible(obj)); MarkingState::Internal(value_heap_obj)));
const bool is_black = ObjectMarking::IsBlack(obj); DCHECK(!ObjectMarking::IsImpossible(obj, MarkingState::Internal(obj)));
const bool is_black =
if (is_black && ObjectMarking::IsWhite(value_heap_obj)) { ObjectMarking::IsBlack(obj, MarkingState::Internal(obj));
if (is_black && ObjectMarking::IsWhite(
value_heap_obj, MarkingState::Internal(value_heap_obj))) {
WhiteToGreyAndPush(value_heap_obj); WhiteToGreyAndPush(value_heap_obj);
RestartIfNotMarking(); RestartIfNotMarking();
} }
...@@ -117,7 +120,7 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, ...@@ -117,7 +120,7 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
} }
void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) { void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
ObjectMarking::WhiteToGrey(obj); ObjectMarking::WhiteToGrey(obj, MarkingState::Internal(obj));
heap_->mark_compact_collector()->marking_deque()->Push(obj); heap_->mark_compact_collector()->marking_deque()->Push(obj);
} }
...@@ -135,8 +138,10 @@ void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from, ...@@ -135,8 +138,10 @@ void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
// size, so the adjustment to the live data count will be zero anyway. // size, so the adjustment to the live data count will be zero anyway.
if (from == to) return; if (from == to) return;
MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to); MarkBit new_mark_bit =
MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from); ObjectMarking::MarkBitFrom(to, MarkingState::Internal(to));
MarkBit old_mark_bit =
ObjectMarking::MarkBitFrom(from, MarkingState::Internal(from));
if (Marking::IsBlack(old_mark_bit)) { if (Marking::IsBlack(old_mark_bit)) {
Marking::MarkBlack(new_mark_bit); Marking::MarkBlack(new_mark_bit);
...@@ -185,10 +190,11 @@ class IncrementalMarkingMarkingVisitor ...@@ -185,10 +190,11 @@ class IncrementalMarkingMarkingVisitor
} while (scan_until_end && start_offset < object_size); } while (scan_until_end && start_offset < object_size);
chunk->set_progress_bar(start_offset); chunk->set_progress_bar(start_offset);
if (start_offset < object_size) { if (start_offset < object_size) {
if (ObjectMarking::IsGrey(object)) { if (ObjectMarking::IsGrey(object, MarkingState::Internal(object))) {
heap->mark_compact_collector()->marking_deque()->Unshift(object); heap->mark_compact_collector()->marking_deque()->Unshift(object);
} else { } else {
DCHECK(ObjectMarking::IsBlack(object)); DCHECK(
ObjectMarking::IsBlack(object, MarkingState::Internal(object)));
heap->mark_compact_collector()->UnshiftBlack(object); heap->mark_compact_collector()->UnshiftBlack(object);
} }
heap->incremental_marking()->NotifyIncompleteScanOfObject( heap->incremental_marking()->NotifyIncompleteScanOfObject(
...@@ -211,8 +217,10 @@ class IncrementalMarkingMarkingVisitor ...@@ -211,8 +217,10 @@ class IncrementalMarkingMarkingVisitor
HeapObject* heap_obj = HeapObject::cast(cache); HeapObject* heap_obj = HeapObject::cast(cache);
// Mark the object grey if it is white, do not enque it into the marking // Mark the object grey if it is white, do not enque it into the marking
// deque. // deque.
if (ObjectMarking::IsWhite(heap_obj)) { if (ObjectMarking::IsWhite(heap_obj,
ObjectMarking::WhiteToGrey(heap_obj); MarkingState::Internal(heap_obj))) {
ObjectMarking::WhiteToGrey(heap_obj,
MarkingState::Internal(heap_obj));
} }
} }
} }
...@@ -247,8 +255,10 @@ class IncrementalMarkingMarkingVisitor ...@@ -247,8 +255,10 @@ class IncrementalMarkingMarkingVisitor
// Returns true if object needed marking and false otherwise. // Returns true if object needed marking and false otherwise.
INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) { INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
HeapObject* heap_object = HeapObject::cast(obj); HeapObject* heap_object = HeapObject::cast(obj);
if (ObjectMarking::IsWhite(heap_object)) { if (ObjectMarking::IsWhite(heap_object,
ObjectMarking::WhiteToBlack(heap_object); MarkingState::Internal(heap_object))) {
ObjectMarking::WhiteToBlack(heap_object,
MarkingState::Internal(heap_object));
return true; return true;
} }
return false; return false;
...@@ -256,7 +266,8 @@ class IncrementalMarkingMarkingVisitor ...@@ -256,7 +266,8 @@ class IncrementalMarkingMarkingVisitor
}; };
void IncrementalMarking::IterateBlackObject(HeapObject* object) { void IncrementalMarking::IterateBlackObject(HeapObject* object) {
if (IsMarking() && ObjectMarking::IsBlack(object)) { if (IsMarking() &&
ObjectMarking::IsBlack(object, MarkingState::Internal(object))) {
Page* page = Page::FromAddress(object->address()); Page* page = Page::FromAddress(object->address());
if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) { if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) {
// IterateBlackObject requires us to visit the whole object. // IterateBlackObject requires us to visit the whole object.
...@@ -593,7 +604,7 @@ void IncrementalMarking::ProcessWeakCells() { ...@@ -593,7 +604,7 @@ void IncrementalMarking::ProcessWeakCells() {
HeapObject* value = HeapObject::cast(weak_cell->value()); HeapObject* value = HeapObject::cast(weak_cell->value());
// Remove weak cells with live objects from the list, they do not need // Remove weak cells with live objects from the list, they do not need
// clearing. // clearing.
if (ObjectMarking::IsBlackOrGrey(value)) { if (ObjectMarking::IsBlackOrGrey(value, MarkingState::Internal(value))) {
// Record slot, if value is pointing to an evacuation candidate. // Record slot, if value is pointing to an evacuation candidate.
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot); heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot);
...@@ -623,7 +634,9 @@ bool ShouldRetainMap(Map* map, int age) { ...@@ -623,7 +634,9 @@ bool ShouldRetainMap(Map* map, int age) {
} }
Object* constructor = map->GetConstructor(); Object* constructor = map->GetConstructor();
if (!constructor->IsHeapObject() || if (!constructor->IsHeapObject() ||
ObjectMarking::IsWhite(HeapObject::cast(constructor))) { ObjectMarking::IsWhite(
HeapObject::cast(constructor),
MarkingState::Internal(HeapObject::cast(constructor)))) {
// The constructor is dead, no new objects with this map can // The constructor is dead, no new objects with this map can
// be created. Do not retain this map. // be created. Do not retain this map.
return false; return false;
...@@ -653,13 +666,15 @@ void IncrementalMarking::RetainMaps() { ...@@ -653,13 +666,15 @@ void IncrementalMarking::RetainMaps() {
int new_age; int new_age;
Map* map = Map::cast(cell->value()); Map* map = Map::cast(cell->value());
if (i >= number_of_disposed_maps && !map_retaining_is_disabled && if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
ObjectMarking::IsWhite(map)) { ObjectMarking::IsWhite(map, MarkingState::Internal(map))) {
if (ShouldRetainMap(map, age)) { if (ShouldRetainMap(map, age)) {
MarkGrey(heap(), map); MarkGrey(heap(), map);
} }
Object* prototype = map->prototype(); Object* prototype = map->prototype();
if (age > 0 && prototype->IsHeapObject() && if (age > 0 && prototype->IsHeapObject() &&
ObjectMarking::IsWhite(HeapObject::cast(prototype))) { ObjectMarking::IsWhite(
HeapObject::cast(prototype),
MarkingState::Internal(HeapObject::cast(prototype)))) {
// The prototype is not marked, age the map. // The prototype is not marked, age the map.
new_age = age - 1; new_age = age - 1;
} else { } else {
...@@ -763,12 +778,14 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() { ...@@ -763,12 +778,14 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
// them. // them.
if (map_word.IsForwardingAddress()) { if (map_word.IsForwardingAddress()) {
HeapObject* dest = map_word.ToForwardingAddress(); HeapObject* dest = map_word.ToForwardingAddress();
if (ObjectMarking::IsBlack(dest)) continue; if (ObjectMarking::IsBlack(dest, MarkingState::Internal(dest)))
continue;
array[new_top] = dest; array[new_top] = dest;
new_top = ((new_top + 1) & mask); new_top = ((new_top + 1) & mask);
DCHECK(new_top != marking_deque->bottom()); DCHECK(new_top != marking_deque->bottom());
DCHECK(ObjectMarking::IsGrey(obj) || DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) ||
(obj->IsFiller() && ObjectMarking::IsWhite(obj))); (obj->IsFiller() &&
ObjectMarking::IsWhite(obj, MarkingState::Internal(obj))));
} }
} else if (obj->map() != filler_map) { } else if (obj->map() != filler_map) {
// Skip one word filler objects that appear on the // Skip one word filler objects that appear on the
...@@ -776,11 +793,12 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() { ...@@ -776,11 +793,12 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
array[new_top] = obj; array[new_top] = obj;
new_top = ((new_top + 1) & mask); new_top = ((new_top + 1) & mask);
DCHECK(new_top != marking_deque->bottom()); DCHECK(new_top != marking_deque->bottom());
DCHECK(ObjectMarking::IsGrey(obj) || DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) ||
(obj->IsFiller() && ObjectMarking::IsWhite(obj)) || (obj->IsFiller() &&
ObjectMarking::IsWhite(obj, MarkingState::Internal(obj))) ||
(MemoryChunk::FromAddress(obj->address()) (MemoryChunk::FromAddress(obj->address())
->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
ObjectMarking::IsBlack(obj))); ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))));
} }
} }
marking_deque->set_top(new_top); marking_deque->set_top(new_top);
...@@ -793,7 +811,8 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { ...@@ -793,7 +811,8 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
IncrementalMarkingMarkingVisitor::IterateBody(map, obj); IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
#if ENABLE_SLOW_DCHECKS #if ENABLE_SLOW_DCHECKS
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); MarkBit mark_bit =
ObjectMarking::MarkBitFrom(obj, MarkingState::Internal(obj));
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
SLOW_DCHECK(Marking::IsGrey(mark_bit) || SLOW_DCHECK(Marking::IsGrey(mark_bit) ||
(chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
...@@ -803,14 +822,14 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { ...@@ -803,14 +822,14 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
} }
void IncrementalMarking::MarkGrey(Heap* heap, HeapObject* object) { void IncrementalMarking::MarkGrey(Heap* heap, HeapObject* object) {
if (ObjectMarking::IsWhite(object)) { if (ObjectMarking::IsWhite(object, MarkingState::Internal(object))) {
heap->incremental_marking()->WhiteToGreyAndPush(object); heap->incremental_marking()->WhiteToGreyAndPush(object);
} }
} }
void IncrementalMarking::MarkBlack(HeapObject* obj, int size) { void IncrementalMarking::MarkBlack(HeapObject* obj, int size) {
if (ObjectMarking::IsBlack(obj)) return; if (ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))) return;
ObjectMarking::GreyToBlack(obj); ObjectMarking::GreyToBlack(obj, MarkingState::Internal(obj));
} }
intptr_t IncrementalMarking::ProcessMarkingDeque( intptr_t IncrementalMarking::ProcessMarkingDeque(
...@@ -825,7 +844,7 @@ intptr_t IncrementalMarking::ProcessMarkingDeque( ...@@ -825,7 +844,7 @@ intptr_t IncrementalMarking::ProcessMarkingDeque(
// Left trimming may result in white, grey, or black filler objects on the // Left trimming may result in white, grey, or black filler objects on the
// marking deque. Ignore these objects. // marking deque. Ignore these objects.
if (obj->IsFiller()) { if (obj->IsFiller()) {
DCHECK(!ObjectMarking::IsImpossible(obj)); DCHECK(!ObjectMarking::IsImpossible(obj, MarkingState::Internal(obj)));
continue; continue;
} }
...@@ -880,8 +899,8 @@ void IncrementalMarking::Hurry() { ...@@ -880,8 +899,8 @@ void IncrementalMarking::Hurry() {
HeapObject* cache = HeapObject::cast( HeapObject* cache = HeapObject::cast(
Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
if (!cache->IsUndefined(heap_->isolate())) { if (!cache->IsUndefined(heap_->isolate())) {
if (ObjectMarking::IsGrey(cache)) { if (ObjectMarking::IsGrey(cache, MarkingState::Internal(cache))) {
ObjectMarking::GreyToBlack(cache); ObjectMarking::GreyToBlack(cache, MarkingState::Internal(cache));
} }
} }
context = Context::cast(context)->next_context_link(); context = Context::cast(context)->next_context_link();
......
...@@ -186,16 +186,16 @@ class V8_EXPORT_PRIVATE IncrementalMarking { ...@@ -186,16 +186,16 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
static void TransferMark(Heap* heap, HeapObject* from, HeapObject* to); static void TransferMark(Heap* heap, HeapObject* from, HeapObject* to);
V8_INLINE static void TransferColor(HeapObject* from, HeapObject* to) { V8_INLINE static void TransferColor(HeapObject* from, HeapObject* to) {
if (ObjectMarking::IsBlack(to)) { if (ObjectMarking::IsBlack(to, MarkingState::Internal(to))) {
DCHECK(to->GetHeap()->incremental_marking()->black_allocation()); DCHECK(to->GetHeap()->incremental_marking()->black_allocation());
return; return;
} }
DCHECK(ObjectMarking::IsWhite(to)); DCHECK(ObjectMarking::IsWhite(to, MarkingState::Internal(to)));
if (ObjectMarking::IsGrey(from)) { if (ObjectMarking::IsGrey(from, MarkingState::Internal(from))) {
ObjectMarking::WhiteToGrey(to); ObjectMarking::WhiteToGrey(to, MarkingState::Internal(to));
} else if (ObjectMarking::IsBlack(from)) { } else if (ObjectMarking::IsBlack(from, MarkingState::Internal(from))) {
ObjectMarking::WhiteToBlack(to); ObjectMarking::WhiteToBlack(to, MarkingState::Internal(to));
} }
} }
......
...@@ -13,37 +13,44 @@ namespace v8 { ...@@ -13,37 +13,44 @@ namespace v8 {
namespace internal { namespace internal {
void MarkCompactCollector::PushBlack(HeapObject* obj) { void MarkCompactCollector::PushBlack(HeapObject* obj) {
DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(obj))); DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::Internal(obj))));
if (!marking_deque()->Push(obj)) { if (!marking_deque()->Push(obj)) {
ObjectMarking::BlackToGrey<MarkBit::NON_ATOMIC>(obj); ObjectMarking::BlackToGrey<MarkBit::NON_ATOMIC>(
obj, MarkingState::Internal(obj));
} }
} }
void MinorMarkCompactCollector::PushBlack(HeapObject* obj) { void MinorMarkCompactCollector::PushBlack(HeapObject* obj) {
DCHECK( DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(
(ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(obj, StateForObject(obj)))); obj, MarkingState::External(obj))));
if (!marking_deque()->Push(obj)) { if (!marking_deque()->Push(obj)) {
ObjectMarking::BlackToGrey<MarkBit::NON_ATOMIC>(obj, StateForObject(obj)); ObjectMarking::BlackToGrey<MarkBit::NON_ATOMIC>(
obj, MarkingState::External(obj));
} }
} }
void MarkCompactCollector::UnshiftBlack(HeapObject* obj) { void MarkCompactCollector::UnshiftBlack(HeapObject* obj) {
DCHECK(ObjectMarking::IsBlack(obj)); DCHECK(ObjectMarking::IsBlack(obj, MarkingState::Internal(obj)));
if (!marking_deque()->Unshift(obj)) { if (!marking_deque()->Unshift(obj)) {
ObjectMarking::BlackToGrey(obj); ObjectMarking::BlackToGrey(obj, MarkingState::Internal(obj));
} }
} }
void MarkCompactCollector::MarkObject(HeapObject* obj) { void MarkCompactCollector::MarkObject(HeapObject* obj) {
if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(obj)) { if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(obj); obj, MarkingState::Internal(obj))) {
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::Internal(obj));
PushBlack(obj); PushBlack(obj);
} }
} }
void MinorMarkCompactCollector::MarkObject(HeapObject* obj) { void MinorMarkCompactCollector::MarkObject(HeapObject* obj) {
if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(obj, StateForObject(obj))) { if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(obj, StateForObject(obj)); obj, MarkingState::External(obj))) {
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::External(obj));
PushBlack(obj); PushBlack(obj);
} }
} }
...@@ -54,7 +61,8 @@ void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot, ...@@ -54,7 +61,8 @@ void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object)); Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
if (target_page->IsEvacuationCandidate() && if (target_page->IsEvacuationCandidate() &&
!ShouldSkipEvacuationSlotRecording(object)) { !ShouldSkipEvacuationSlotRecording(object)) {
DCHECK(ObjectMarking::IsBlackOrGrey(object)); DCHECK(
ObjectMarking::IsBlackOrGrey(object, MarkingState::Internal(object)));
RememberedSet<OLD_TO_OLD>::Insert(source_page, RememberedSet<OLD_TO_OLD>::Insert(source_page,
reinterpret_cast<Address>(slot)); reinterpret_cast<Address>(slot));
} }
......
...@@ -104,10 +104,10 @@ void MarkingVerifier::VerifyMarkingOnPage(const Page& page, ...@@ -104,10 +104,10 @@ void MarkingVerifier::VerifyMarkingOnPage(const Page& page,
// The object is either part of a black area of black allocation or a // The object is either part of a black area of black allocation or a
// regular black object // regular black object
CHECK( CHECK(
state.bitmap->AllBitsSetInRange( state.bitmap()->AllBitsSetInRange(
page.AddressToMarkbitIndex(current), page.AddressToMarkbitIndex(current),
page.AddressToMarkbitIndex(next_object_must_be_here_or_later)) || page.AddressToMarkbitIndex(next_object_must_be_here_or_later)) ||
state.bitmap->AllBitsClearInRange( state.bitmap()->AllBitsClearInRange(
page.AddressToMarkbitIndex(current + kPointerSize * 2), page.AddressToMarkbitIndex(current + kPointerSize * 2),
page.AddressToMarkbitIndex(next_object_must_be_here_or_later))); page.AddressToMarkbitIndex(next_object_must_be_here_or_later)));
current = next_object_must_be_here_or_later; current = next_object_must_be_here_or_later;
...@@ -159,11 +159,11 @@ class FullMarkingVerifier : public MarkingVerifier { ...@@ -159,11 +159,11 @@ class FullMarkingVerifier : public MarkingVerifier {
protected: protected:
MarkingState marking_state(MemoryChunk* chunk) override { MarkingState marking_state(MemoryChunk* chunk) override {
return MarkingState::FromPageInternal(chunk); return MarkingState::Internal(chunk);
} }
MarkingState marking_state(HeapObject* object) { MarkingState marking_state(HeapObject* object) {
return marking_state(Page::FromAddress(object->address())); return MarkingState::Internal(object);
} }
void VisitPointers(Object** start, Object** end) override { void VisitPointers(Object** start, Object** end) override {
...@@ -197,11 +197,11 @@ class YoungGenerationMarkingVerifier : public MarkingVerifier { ...@@ -197,11 +197,11 @@ class YoungGenerationMarkingVerifier : public MarkingVerifier {
explicit YoungGenerationMarkingVerifier(Heap* heap) : MarkingVerifier(heap) {} explicit YoungGenerationMarkingVerifier(Heap* heap) : MarkingVerifier(heap) {}
MarkingState marking_state(MemoryChunk* chunk) override { MarkingState marking_state(MemoryChunk* chunk) override {
return MarkingState::FromPageExternal(chunk); return MarkingState::External(chunk);
} }
MarkingState marking_state(HeapObject* object) { MarkingState marking_state(HeapObject* object) {
return marking_state(Page::FromAddress(object->address())); return MarkingState::External(object);
} }
void Run() override { void Run() override {
...@@ -379,16 +379,18 @@ void MarkCompactCollector::CollectGarbage() { ...@@ -379,16 +379,18 @@ void MarkCompactCollector::CollectGarbage() {
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) { void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
for (Page* p : *space) { for (Page* p : *space) {
CHECK(p->markbits()->IsClean()); const MarkingState state = MarkingState::Internal(p);
CHECK_EQ(0, p->LiveBytes()); CHECK(state.bitmap()->IsClean());
CHECK_EQ(0, state.live_bytes());
} }
} }
void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) { void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
for (Page* p : PageRange(space->bottom(), space->top())) { for (Page* p : PageRange(space->bottom(), space->top())) {
CHECK(p->markbits()->IsClean()); const MarkingState state = MarkingState::Internal(p);
CHECK_EQ(0, p->LiveBytes()); CHECK(state.bitmap()->IsClean());
CHECK_EQ(0, state.live_bytes());
} }
} }
...@@ -401,8 +403,8 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() { ...@@ -401,8 +403,8 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() {
LargeObjectIterator it(heap_->lo_space()); LargeObjectIterator it(heap_->lo_space());
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
CHECK(ObjectMarking::IsWhite(obj)); CHECK(ObjectMarking::IsWhite(obj, MarkingState::Internal(obj)));
CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes()); CHECK_EQ(0, MarkingState::Internal(obj).live_bytes());
} }
} }
...@@ -430,14 +432,14 @@ void MarkCompactCollector::VerifyOmittedMapChecks() { ...@@ -430,14 +432,14 @@ void MarkCompactCollector::VerifyOmittedMapChecks() {
static void ClearMarkbitsInPagedSpace(PagedSpace* space) { static void ClearMarkbitsInPagedSpace(PagedSpace* space) {
for (Page* p : *space) { for (Page* p : *space) {
p->ClearLiveness(); MarkingState::Internal(p).ClearLiveness();
} }
} }
static void ClearMarkbitsInNewSpace(NewSpace* space) { static void ClearMarkbitsInNewSpace(NewSpace* space) {
for (Page* page : *space) { for (Page* p : *space) {
page->ClearLiveness(); MarkingState::Internal(p).ClearLiveness();
} }
} }
...@@ -491,7 +493,10 @@ void MarkCompactCollector::Sweeper::StartSweeping() { ...@@ -491,7 +493,10 @@ void MarkCompactCollector::Sweeper::StartSweeping() {
sweeping_in_progress_ = true; sweeping_in_progress_ = true;
ForAllSweepingSpaces([this](AllocationSpace space) { ForAllSweepingSpaces([this](AllocationSpace space) {
std::sort(sweeping_list_[space].begin(), sweeping_list_[space].end(), std::sort(sweeping_list_[space].begin(), sweeping_list_[space].end(),
[](Page* a, Page* b) { return a->LiveBytes() < b->LiveBytes(); }); [](Page* a, Page* b) {
return MarkingState::Internal(a).live_bytes() <
MarkingState::Internal(b).live_bytes();
});
}); });
} }
...@@ -945,7 +950,7 @@ void CodeFlusher::ProcessJSFunctionCandidates() { ...@@ -945,7 +950,7 @@ void CodeFlusher::ProcessJSFunctionCandidates() {
SharedFunctionInfo* shared = candidate->shared(); SharedFunctionInfo* shared = candidate->shared();
Code* code = shared->code(); Code* code = shared->code();
if (ObjectMarking::IsWhite(code)) { if (ObjectMarking::IsWhite(code, MarkingState::Internal(code))) {
if (FLAG_trace_code_flushing && shared->is_compiled()) { if (FLAG_trace_code_flushing && shared->is_compiled()) {
PrintF("[code-flushing clears: "); PrintF("[code-flushing clears: ");
shared->ShortPrint(); shared->ShortPrint();
...@@ -963,7 +968,7 @@ void CodeFlusher::ProcessJSFunctionCandidates() { ...@@ -963,7 +968,7 @@ void CodeFlusher::ProcessJSFunctionCandidates() {
candidate->set_code(lazy_compile); candidate->set_code(lazy_compile);
} }
} else { } else {
DCHECK(ObjectMarking::IsBlack(code)); DCHECK(ObjectMarking::IsBlack(code, MarkingState::Internal(code)));
candidate->set_code(code); candidate->set_code(code);
} }
...@@ -997,7 +1002,7 @@ void CodeFlusher::ProcessSharedFunctionInfoCandidates() { ...@@ -997,7 +1002,7 @@ void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
ClearNextCandidate(candidate); ClearNextCandidate(candidate);
Code* code = candidate->code(); Code* code = candidate->code();
if (ObjectMarking::IsWhite(code)) { if (ObjectMarking::IsWhite(code, MarkingState::Internal(code))) {
if (FLAG_trace_code_flushing && candidate->is_compiled()) { if (FLAG_trace_code_flushing && candidate->is_compiled()) {
PrintF("[code-flushing clears: "); PrintF("[code-flushing clears: ");
candidate->ShortPrint(); candidate->ShortPrint();
...@@ -1132,11 +1137,11 @@ class StaticYoungGenerationMarkingVisitor ...@@ -1132,11 +1137,11 @@ class StaticYoungGenerationMarkingVisitor
StackLimitCheck check(heap->isolate()); StackLimitCheck check(heap->isolate());
if (check.HasOverflowed()) return false; if (check.HasOverflowed()) return false;
const MarkingState state = if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(
MinorMarkCompactCollector::StateForObject(object); object, MarkingState::External(object)))
if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(object, state))
return true; return true;
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(object, state); ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
object, MarkingState::External(object));
IterateBody(object->map(), object); IterateBody(object->map(), object);
return true; return true;
} }
...@@ -1173,8 +1178,8 @@ class MarkCompactMarkingVisitor ...@@ -1173,8 +1178,8 @@ class MarkCompactMarkingVisitor
// Marks the object black without pushing it on the marking stack. // Marks the object black without pushing it on the marking stack.
// Returns true if object needed marking and false otherwise. // Returns true if object needed marking and false otherwise.
INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) { INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) {
if (ObjectMarking::IsWhite(object)) { if (ObjectMarking::IsWhite(object, MarkingState::Internal(object))) {
ObjectMarking::WhiteToBlack(object); ObjectMarking::WhiteToBlack(object, MarkingState::Internal(object));
return true; return true;
} }
return false; return false;
...@@ -1195,11 +1200,11 @@ class MarkCompactMarkingVisitor ...@@ -1195,11 +1200,11 @@ class MarkCompactMarkingVisitor
HeapObject* obj)) { HeapObject* obj)) {
#ifdef DEBUG #ifdef DEBUG
DCHECK(collector->heap()->Contains(obj)); DCHECK(collector->heap()->Contains(obj));
DCHECK(ObjectMarking::IsWhite(obj)); DCHECK(ObjectMarking::IsWhite(obj, MarkingState::Internal(obj)));
#endif #endif
Map* map = obj->map(); Map* map = obj->map();
Heap* heap = obj->GetHeap(); Heap* heap = obj->GetHeap();
ObjectMarking::WhiteToBlack(obj); ObjectMarking::WhiteToBlack(obj, MarkingState::Internal(obj));
// Mark the map pointer and the body. // Mark the map pointer and the body.
heap->mark_compact_collector()->MarkObject(map); heap->mark_compact_collector()->MarkObject(map);
IterateBody(map, obj); IterateBody(map, obj);
...@@ -1220,7 +1225,8 @@ class MarkCompactMarkingVisitor ...@@ -1220,7 +1225,8 @@ class MarkCompactMarkingVisitor
if (!o->IsHeapObject()) continue; if (!o->IsHeapObject()) continue;
collector->RecordSlot(object, p, o); collector->RecordSlot(object, p, o);
HeapObject* obj = HeapObject::cast(o); HeapObject* obj = HeapObject::cast(o);
if (ObjectMarking::IsBlackOrGrey(obj)) continue; if (ObjectMarking::IsBlackOrGrey(obj, MarkingState::Internal(obj)))
continue;
VisitUnmarkedObject(collector, obj); VisitUnmarkedObject(collector, obj);
} }
return true; return true;
...@@ -1253,7 +1259,7 @@ class MarkCompactMarkingVisitor ...@@ -1253,7 +1259,7 @@ class MarkCompactMarkingVisitor
// was marked through the compilation cache before marker reached JSRegExp // was marked through the compilation cache before marker reached JSRegExp
// object. // object.
FixedArray* data = FixedArray::cast(re->data()); FixedArray* data = FixedArray::cast(re->data());
if (ObjectMarking::IsBlackOrGrey(data)) { if (ObjectMarking::IsBlackOrGrey(data, MarkingState::Internal(data))) {
Object** slot = Object** slot =
data->data_start() + JSRegExp::saved_code_index(is_one_byte); data->data_start() + JSRegExp::saved_code_index(is_one_byte);
heap->mark_compact_collector()->RecordSlot(data, slot, code); heap->mark_compact_collector()->RecordSlot(data, slot, code);
...@@ -1411,12 +1417,12 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public ObjectVisitor { ...@@ -1411,12 +1417,12 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public ObjectVisitor {
if (!collector_->heap()->InNewSpace(object)) return; if (!collector_->heap()->InNewSpace(object)) return;
if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>( if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(
object, StateForObject(object))) object, MarkingState::External(object)))
return; return;
Map* map = object->map(); Map* map = object->map();
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(object, ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
StateForObject(object)); object, MarkingState::External(object));
StaticYoungGenerationMarkingVisitor::IterateBody(map, object); StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
collector_->EmptyMarkingDeque(); collector_->EmptyMarkingDeque();
...@@ -1447,11 +1453,14 @@ class MarkCompactCollector::RootMarkingVisitor : public ObjectVisitor { ...@@ -1447,11 +1453,14 @@ class MarkCompactCollector::RootMarkingVisitor : public ObjectVisitor {
HeapObject* object = HeapObject::cast(*p); HeapObject* object = HeapObject::cast(*p);
if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(object)) return; if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(
object, MarkingState::Internal(object)))
return;
Map* map = object->map(); Map* map = object->map();
// Mark the object. // Mark the object.
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(object); ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
object, MarkingState::Internal(object));
// Mark the map pointer and body, and push them on the marking stack. // Mark the map pointer and body, and push them on the marking stack.
collector_->MarkObject(map); collector_->MarkObject(map);
...@@ -1481,7 +1490,9 @@ class StringTableCleaner : public ObjectVisitor { ...@@ -1481,7 +1490,9 @@ class StringTableCleaner : public ObjectVisitor {
for (Object** p = start; p < end; p++) { for (Object** p = start; p < end; p++) {
Object* o = *p; Object* o = *p;
if (o->IsHeapObject()) { if (o->IsHeapObject()) {
if (ObjectMarking::IsWhite(HeapObject::cast(o))) { HeapObject* heap_object = HeapObject::cast(o);
if (ObjectMarking::IsWhite(heap_object,
MarkingState::Internal(heap_object))) {
if (finalize_external_strings) { if (finalize_external_strings) {
if (o->IsExternalString()) { if (o->IsExternalString()) {
heap_->FinalizeExternalString(String::cast(*p)); heap_->FinalizeExternalString(String::cast(*p));
...@@ -1522,8 +1533,11 @@ typedef StringTableCleaner<true, false> ExternalStringTableCleaner; ...@@ -1522,8 +1533,11 @@ typedef StringTableCleaner<true, false> ExternalStringTableCleaner;
class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
public: public:
virtual Object* RetainAs(Object* object) { virtual Object* RetainAs(Object* object) {
DCHECK(!ObjectMarking::IsGrey(HeapObject::cast(object))); HeapObject* heap_object = HeapObject::cast(object);
if (ObjectMarking::IsBlack(HeapObject::cast(object))) { DCHECK(!ObjectMarking::IsGrey(heap_object,
MarkingState::Internal(heap_object)));
if (ObjectMarking::IsBlack(heap_object,
MarkingState::Internal(heap_object))) {
return object; return object;
} else if (object->IsAllocationSite() && } else if (object->IsAllocationSite() &&
!(AllocationSite::cast(object)->IsZombie())) { !(AllocationSite::cast(object)->IsZombie())) {
...@@ -1531,7 +1545,7 @@ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { ...@@ -1531,7 +1545,7 @@ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
// space. These sites get a one-time reprieve. // space. These sites get a one-time reprieve.
AllocationSite* site = AllocationSite::cast(object); AllocationSite* site = AllocationSite::cast(object);
site->MarkZombie(); site->MarkZombie();
ObjectMarking::WhiteToBlack(site); ObjectMarking::WhiteToBlack(site, MarkingState::Internal(site));
return object; return object;
} else { } else {
return NULL; return NULL;
...@@ -1551,8 +1565,9 @@ void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) { ...@@ -1551,8 +1565,9 @@ void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) {
Map* filler_map = heap()->one_pointer_filler_map(); Map* filler_map = heap()->one_pointer_filler_map();
for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) { for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) {
if ((object->map() != filler_map) && ObjectMarking::IsGrey(object)) { if ((object->map() != filler_map) &&
ObjectMarking::GreyToBlack(object); ObjectMarking::IsGrey(object, MarkingState::Internal(object))) {
ObjectMarking::GreyToBlack(object, MarkingState::Internal(object));
PushBlack(object); PushBlack(object);
if (marking_deque()->IsFull()) return; if (marking_deque()->IsFull()) return;
} }
...@@ -1561,11 +1576,11 @@ void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) { ...@@ -1561,11 +1576,11 @@ void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) {
void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) { void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) {
DCHECK(!marking_deque()->IsFull()); DCHECK(!marking_deque()->IsFull());
LiveObjectIterator<kGreyObjects> it(p, MarkingState::FromPageInternal(p)); LiveObjectIterator<kGreyObjects> it(p, MarkingState::Internal(p));
HeapObject* object = NULL; HeapObject* object = NULL;
while ((object = it.Next()) != NULL) { while ((object = it.Next()) != NULL) {
DCHECK(ObjectMarking::IsGrey(object)); DCHECK(ObjectMarking::IsGrey(object, MarkingState::Internal(object)));
ObjectMarking::GreyToBlack(object); ObjectMarking::GreyToBlack(object, MarkingState::Internal(object));
PushBlack(object); PushBlack(object);
if (marking_deque()->IsFull()) return; if (marking_deque()->IsFull()) return;
} }
...@@ -2018,15 +2033,18 @@ void MarkCompactCollector::DiscoverGreyObjectsInNewSpace() { ...@@ -2018,15 +2033,18 @@ void MarkCompactCollector::DiscoverGreyObjectsInNewSpace() {
bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
Object* o = *p; Object* o = *p;
if (!o->IsHeapObject()) return false; if (!o->IsHeapObject()) return false;
return ObjectMarking::IsWhite(HeapObject::cast(o)); return ObjectMarking::IsWhite(HeapObject::cast(o),
MarkingState::Internal(HeapObject::cast(o)));
} }
void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) { void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) {
StringTable* string_table = heap()->string_table(); StringTable* string_table = heap()->string_table();
// Mark the string table itself. // Mark the string table itself.
if (ObjectMarking::IsWhite(string_table)) { if (ObjectMarking::IsWhite(string_table,
MarkingState::Internal(string_table))) {
// String table could have already been marked by visiting the handles list. // String table could have already been marked by visiting the handles list.
ObjectMarking::WhiteToBlack(string_table); ObjectMarking::WhiteToBlack(string_table,
MarkingState::Internal(string_table));
} }
// Explicitly mark the prefix. // Explicitly mark the prefix.
string_table->IteratePrefix(visitor); string_table->IteratePrefix(visitor);
...@@ -2059,7 +2077,8 @@ void MarkCompactCollector::EmptyMarkingDeque() { ...@@ -2059,7 +2077,8 @@ void MarkCompactCollector::EmptyMarkingDeque() {
DCHECK(!object->IsFiller()); DCHECK(!object->IsFiller());
DCHECK(object->IsHeapObject()); DCHECK(object->IsHeapObject());
DCHECK(heap()->Contains(object)); DCHECK(heap()->Contains(object));
DCHECK(!(ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(object))); DCHECK(!(ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
object, MarkingState::Internal(object))));
Map* map = object->map(); Map* map = object->map();
MarkObject(map); MarkObject(map);
...@@ -2250,10 +2269,10 @@ class ObjectStatsVisitor : public HeapObjectVisitor { ...@@ -2250,10 +2269,10 @@ class ObjectStatsVisitor : public HeapObjectVisitor {
} }
bool Visit(HeapObject* obj) override { bool Visit(HeapObject* obj) override {
if (ObjectMarking::IsBlack(obj)) { if (ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))) {
live_collector_.CollectStatistics(obj); live_collector_.CollectStatistics(obj);
} else { } else {
DCHECK(!ObjectMarking::IsGrey(obj)); DCHECK(!ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)));
dead_collector_.CollectStatistics(obj); dead_collector_.CollectStatistics(obj);
} }
return true; return true;
...@@ -2309,8 +2328,7 @@ SlotCallbackResult MinorMarkCompactCollector::CheckAndMarkObject( ...@@ -2309,8 +2328,7 @@ SlotCallbackResult MinorMarkCompactCollector::CheckAndMarkObject(
// has to be in ToSpace. // has to be in ToSpace.
DCHECK(heap->InToSpace(object)); DCHECK(heap->InToSpace(object));
HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); HeapObject* heap_object = reinterpret_cast<HeapObject*>(object);
const MarkingState state = const MarkingState state = MarkingState::External(heap_object);
MinorMarkCompactCollector::StateForObject(heap_object);
if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(heap_object, state)) { if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(heap_object, state)) {
return KEEP_SLOT; return KEEP_SLOT;
} }
...@@ -2324,7 +2342,9 @@ SlotCallbackResult MinorMarkCompactCollector::CheckAndMarkObject( ...@@ -2324,7 +2342,9 @@ SlotCallbackResult MinorMarkCompactCollector::CheckAndMarkObject(
static bool IsUnmarkedObject(Heap* heap, Object** p) { static bool IsUnmarkedObject(Heap* heap, Object** p) {
DCHECK_IMPLIES(heap->InNewSpace(*p), heap->InToSpace(*p)); DCHECK_IMPLIES(heap->InNewSpace(*p), heap->InToSpace(*p));
return heap->InNewSpace(*p) && !ObjectMarking::IsBlack(HeapObject::cast(*p)); return heap->InNewSpace(*p) &&
!ObjectMarking::IsBlack(HeapObject::cast(*p),
MarkingState::Internal(HeapObject::cast(*p)));
} }
void MinorMarkCompactCollector::MarkLiveObjects() { void MinorMarkCompactCollector::MarkLiveObjects() {
...@@ -2398,11 +2418,11 @@ void MinorMarkCompactCollector::EmptyMarkingDeque() { ...@@ -2398,11 +2418,11 @@ void MinorMarkCompactCollector::EmptyMarkingDeque() {
DCHECK(heap()->Contains(object)); DCHECK(heap()->Contains(object));
DCHECK(!(ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>( DCHECK(!(ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
object, StateForObject(object)))); object, MarkingState::External(object))));
Map* map = object->map(); Map* map = object->map();
DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>( DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(
object, StateForObject(object)))); object, MarkingState::External(object))));
StaticYoungGenerationMarkingVisitor::IterateBody(map, object); StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
} }
} }
...@@ -2626,11 +2646,12 @@ void MarkCompactCollector::ClearSimpleMapTransitions( ...@@ -2626,11 +2646,12 @@ void MarkCompactCollector::ClearSimpleMapTransitions(
while (weak_cell_obj != Smi::kZero) { while (weak_cell_obj != Smi::kZero) {
WeakCell* weak_cell = WeakCell::cast(weak_cell_obj); WeakCell* weak_cell = WeakCell::cast(weak_cell_obj);
Map* map = Map::cast(weak_cell->value()); Map* map = Map::cast(weak_cell->value());
DCHECK(ObjectMarking::IsWhite(map)); DCHECK(ObjectMarking::IsWhite(map, MarkingState::Internal(map)));
Object* potential_parent = map->constructor_or_backpointer(); Object* potential_parent = map->constructor_or_backpointer();
if (potential_parent->IsMap()) { if (potential_parent->IsMap()) {
Map* parent = Map::cast(potential_parent); Map* parent = Map::cast(potential_parent);
if (ObjectMarking::IsBlackOrGrey(parent) && if (ObjectMarking::IsBlackOrGrey(parent,
MarkingState::Internal(parent)) &&
parent->raw_transitions() == weak_cell) { parent->raw_transitions() == weak_cell) {
ClearSimpleMapTransition(parent, map); ClearSimpleMapTransition(parent, map);
} }
...@@ -2669,7 +2690,8 @@ void MarkCompactCollector::ClearFullMapTransitions() { ...@@ -2669,7 +2690,8 @@ void MarkCompactCollector::ClearFullMapTransitions() {
if (num_transitions > 0) { if (num_transitions > 0) {
Map* map = array->GetTarget(0); Map* map = array->GetTarget(0);
Map* parent = Map::cast(map->constructor_or_backpointer()); Map* parent = Map::cast(map->constructor_or_backpointer());
bool parent_is_alive = ObjectMarking::IsBlackOrGrey(parent); bool parent_is_alive =
ObjectMarking::IsBlackOrGrey(parent, MarkingState::Internal(parent));
DescriptorArray* descriptors = DescriptorArray* descriptors =
parent_is_alive ? parent->instance_descriptors() : nullptr; parent_is_alive ? parent->instance_descriptors() : nullptr;
bool descriptors_owner_died = bool descriptors_owner_died =
...@@ -2694,7 +2716,7 @@ bool MarkCompactCollector::CompactTransitionArray( ...@@ -2694,7 +2716,7 @@ bool MarkCompactCollector::CompactTransitionArray(
for (int i = 0; i < num_transitions; ++i) { for (int i = 0; i < num_transitions; ++i) {
Map* target = transitions->GetTarget(i); Map* target = transitions->GetTarget(i);
DCHECK_EQ(target->constructor_or_backpointer(), map); DCHECK_EQ(target->constructor_or_backpointer(), map);
if (ObjectMarking::IsWhite(target)) { if (ObjectMarking::IsWhite(target, MarkingState::Internal(target))) {
if (descriptors != nullptr && if (descriptors != nullptr &&
target->instance_descriptors() == descriptors) { target->instance_descriptors() == descriptors) {
descriptors_owner_died = true; descriptors_owner_died = true;
...@@ -2786,11 +2808,14 @@ void MarkCompactCollector::ProcessWeakCollections() { ...@@ -2786,11 +2808,14 @@ void MarkCompactCollector::ProcessWeakCollections() {
while (weak_collection_obj != Smi::kZero) { while (weak_collection_obj != Smi::kZero) {
JSWeakCollection* weak_collection = JSWeakCollection* weak_collection =
reinterpret_cast<JSWeakCollection*>(weak_collection_obj); reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
DCHECK(ObjectMarking::IsBlackOrGrey(weak_collection)); DCHECK(ObjectMarking::IsBlackOrGrey(
weak_collection, MarkingState::Internal(weak_collection)));
if (weak_collection->table()->IsHashTable()) { if (weak_collection->table()->IsHashTable()) {
ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
for (int i = 0; i < table->Capacity(); i++) { for (int i = 0; i < table->Capacity(); i++) {
if (ObjectMarking::IsBlackOrGrey(HeapObject::cast(table->KeyAt(i)))) { HeapObject* heap_object = HeapObject::cast(table->KeyAt(i));
if (ObjectMarking::IsBlackOrGrey(heap_object,
MarkingState::Internal(heap_object))) {
Object** key_slot = Object** key_slot =
table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i)); table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i));
RecordSlot(table, key_slot, *key_slot); RecordSlot(table, key_slot, *key_slot);
...@@ -2812,12 +2837,13 @@ void MarkCompactCollector::ClearWeakCollections() { ...@@ -2812,12 +2837,13 @@ void MarkCompactCollector::ClearWeakCollections() {
while (weak_collection_obj != Smi::kZero) { while (weak_collection_obj != Smi::kZero) {
JSWeakCollection* weak_collection = JSWeakCollection* weak_collection =
reinterpret_cast<JSWeakCollection*>(weak_collection_obj); reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
DCHECK(ObjectMarking::IsBlackOrGrey(weak_collection)); DCHECK(ObjectMarking::IsBlackOrGrey(
weak_collection, MarkingState::Internal(weak_collection)));
if (weak_collection->table()->IsHashTable()) { if (weak_collection->table()->IsHashTable()) {
ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
for (int i = 0; i < table->Capacity(); i++) { for (int i = 0; i < table->Capacity(); i++) {
HeapObject* key = HeapObject::cast(table->KeyAt(i)); HeapObject* key = HeapObject::cast(table->KeyAt(i));
if (!ObjectMarking::IsBlackOrGrey(key)) { if (!ObjectMarking::IsBlackOrGrey(key, MarkingState::Internal(key))) {
table->RemoveEntry(i); table->RemoveEntry(i);
} }
} }
...@@ -2858,7 +2884,7 @@ void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list, ...@@ -2858,7 +2884,7 @@ void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list,
// We do not insert cleared weak cells into the list, so the value // We do not insert cleared weak cells into the list, so the value
// cannot be a Smi here. // cannot be a Smi here.
HeapObject* value = HeapObject::cast(weak_cell->value()); HeapObject* value = HeapObject::cast(weak_cell->value());
if (!ObjectMarking::IsBlackOrGrey(value)) { if (!ObjectMarking::IsBlackOrGrey(value, MarkingState::Internal(value))) {
// Cells for new-space objects embedded in optimized code are wrapped in // Cells for new-space objects embedded in optimized code are wrapped in
// WeakCell and put into Heap::weak_object_to_code_table. // WeakCell and put into Heap::weak_object_to_code_table.
// Such cells do not have any strong references but we want to keep them // Such cells do not have any strong references but we want to keep them
...@@ -2867,9 +2893,11 @@ void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list, ...@@ -2867,9 +2893,11 @@ void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list,
if (value->IsCell()) { if (value->IsCell()) {
Object* cell_value = Cell::cast(value)->value(); Object* cell_value = Cell::cast(value)->value();
if (cell_value->IsHeapObject() && if (cell_value->IsHeapObject() &&
ObjectMarking::IsBlackOrGrey(HeapObject::cast(cell_value))) { ObjectMarking::IsBlackOrGrey(
HeapObject::cast(cell_value),
MarkingState::Internal(HeapObject::cast(cell_value)))) {
// Resurrect the cell. // Resurrect the cell.
ObjectMarking::WhiteToBlack(value); ObjectMarking::WhiteToBlack(value, MarkingState::Internal(value));
Object** slot = HeapObject::RawField(value, Cell::kValueOffset); Object** slot = HeapObject::RawField(value, Cell::kValueOffset);
RecordSlot(value, slot, *slot); RecordSlot(value, slot, *slot);
slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
...@@ -3163,7 +3191,7 @@ class FullEvacuator : public Evacuator { ...@@ -3163,7 +3191,7 @@ class FullEvacuator : public Evacuator {
bool FullEvacuator::EvacuatePage(Page* page, const MarkingState& state) { bool FullEvacuator::EvacuatePage(Page* page, const MarkingState& state) {
bool success = false; bool success = false;
DCHECK(page->SweepingDone()); DCHECK(page->SweepingDone());
intptr_t saved_live_bytes = *state.live_bytes; intptr_t saved_live_bytes = state.live_bytes();
double evacuation_time = 0.0; double evacuation_time = 0.0;
{ {
AlwaysAllocateScope always_allocate(heap()->isolate()); AlwaysAllocateScope always_allocate(heap()->isolate());
...@@ -3183,7 +3211,8 @@ bool FullEvacuator::EvacuatePage(Page* page, const MarkingState& state) { ...@@ -3183,7 +3211,8 @@ bool FullEvacuator::EvacuatePage(Page* page, const MarkingState& state) {
page, state, &new_to_old_page_visitor_, page, state, &new_to_old_page_visitor_,
LiveObjectVisitor::kKeepMarking); LiveObjectVisitor::kKeepMarking);
DCHECK(success); DCHECK(success);
new_to_old_page_visitor_.account_moved_bytes(page->LiveBytes()); new_to_old_page_visitor_.account_moved_bytes(
MarkingState::Internal(page).live_bytes());
// ArrayBufferTracker will be updated during sweeping. // ArrayBufferTracker will be updated during sweeping.
break; break;
case kPageNewToNew: case kPageNewToNew:
...@@ -3191,7 +3220,8 @@ bool FullEvacuator::EvacuatePage(Page* page, const MarkingState& state) { ...@@ -3191,7 +3220,8 @@ bool FullEvacuator::EvacuatePage(Page* page, const MarkingState& state) {
page, state, &new_to_new_page_visitor_, page, state, &new_to_new_page_visitor_,
LiveObjectVisitor::kKeepMarking); LiveObjectVisitor::kKeepMarking);
DCHECK(success); DCHECK(success);
new_to_new_page_visitor_.account_moved_bytes(page->LiveBytes()); new_to_new_page_visitor_.account_moved_bytes(
MarkingState::Internal(page).live_bytes());
// ArrayBufferTracker will be updated during sweeping. // ArrayBufferTracker will be updated during sweeping.
break; break;
case kObjectsOldToOld: case kObjectsOldToOld:
...@@ -3275,7 +3305,7 @@ class EvacuationJobTraits { ...@@ -3275,7 +3305,7 @@ class EvacuationJobTraits {
static bool ProcessPageInParallel(Heap* heap, PerTaskData evacuator, static bool ProcessPageInParallel(Heap* heap, PerTaskData evacuator,
MemoryChunk* chunk, PerPageData) { MemoryChunk* chunk, PerPageData) {
return evacuator->EvacuatePage(reinterpret_cast<Page*>(chunk), return evacuator->EvacuatePage(reinterpret_cast<Page*>(chunk),
MarkingState::FromPageInternal(chunk)); MarkingState::Internal(chunk));
} }
static void FinalizePageSequentially(Heap* heap, MemoryChunk* chunk, static void FinalizePageSequentially(Heap* heap, MemoryChunk* chunk,
...@@ -3318,18 +3348,19 @@ void MarkCompactCollector::EvacuatePagesInParallel() { ...@@ -3318,18 +3348,19 @@ void MarkCompactCollector::EvacuatePagesInParallel() {
int abandoned_pages = 0; int abandoned_pages = 0;
intptr_t live_bytes = 0; intptr_t live_bytes = 0;
for (Page* page : old_space_evacuation_pages_) { for (Page* page : old_space_evacuation_pages_) {
live_bytes += page->LiveBytes(); live_bytes += MarkingState::Internal(page).live_bytes();
job.AddPage(page, &abandoned_pages); job.AddPage(page, &abandoned_pages);
} }
const bool reduce_memory = heap()->ShouldReduceMemory(); const bool reduce_memory = heap()->ShouldReduceMemory();
const Address age_mark = heap()->new_space()->age_mark(); const Address age_mark = heap()->new_space()->age_mark();
for (Page* page : new_space_evacuation_pages_) { for (Page* page : new_space_evacuation_pages_) {
live_bytes += page->LiveBytes(); intptr_t live_bytes_on_page = MarkingState::Internal(page).live_bytes();
live_bytes += live_bytes_on_page;
if (!reduce_memory && !page->NeverEvacuate() && if (!reduce_memory && !page->NeverEvacuate() &&
(page->LiveBytes() > Evacuator::PageEvacuationThreshold()) && (live_bytes_on_page > Evacuator::PageEvacuationThreshold()) &&
!page->Contains(age_mark) && !page->Contains(age_mark) &&
heap()->CanExpandOldGeneration(page->LiveBytes())) { heap()->CanExpandOldGeneration(live_bytes_on_page)) {
if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) { if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) {
EvacuateNewSpacePageVisitor<NEW_TO_OLD>::Move(page); EvacuateNewSpacePageVisitor<NEW_TO_OLD>::Move(page);
} else { } else {
...@@ -3445,11 +3476,11 @@ int MarkCompactCollector::Sweeper::RawSweep( ...@@ -3445,11 +3476,11 @@ int MarkCompactCollector::Sweeper::RawSweep(
intptr_t max_freed_bytes = 0; intptr_t max_freed_bytes = 0;
int curr_region = -1; int curr_region = -1;
LiveObjectIterator<kBlackObjects> it(p, MarkingState::FromPageInternal(p)); LiveObjectIterator<kBlackObjects> it(p, MarkingState::Internal(p));
HeapObject* object = NULL; HeapObject* object = NULL;
while ((object = it.Next()) != NULL) { while ((object = it.Next()) != NULL) {
DCHECK(ObjectMarking::IsBlack(object)); DCHECK(ObjectMarking::IsBlack(object, MarkingState::Internal(object)));
Address free_end = object->address(); Address free_end = object->address();
if (free_end != free_start) { if (free_end != free_start) {
CHECK_GT(free_end, free_start); CHECK_GT(free_end, free_start);
...@@ -3520,7 +3551,7 @@ int MarkCompactCollector::Sweeper::RawSweep( ...@@ -3520,7 +3551,7 @@ int MarkCompactCollector::Sweeper::RawSweep(
} }
// Clear the mark bits of that page and reset live bytes count. // Clear the mark bits of that page and reset live bytes count.
p->ClearLiveness(); MarkingState::Internal(p).ClearLiveness();
p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); p->concurrent_sweeping_state().SetValue(Page::kSweepingDone);
if (free_list_mode == IGNORE_FREE_LIST) return 0; if (free_list_mode == IGNORE_FREE_LIST) return 0;
...@@ -3539,7 +3570,7 @@ void MarkCompactCollector::InvalidateCode(Code* code) { ...@@ -3539,7 +3570,7 @@ void MarkCompactCollector::InvalidateCode(Code* code) {
DCHECK(compacting_); DCHECK(compacting_);
// If the object is white than no slots were recorded on it yet. // If the object is white than no slots were recorded on it yet.
if (ObjectMarking::IsWhite(code)) return; if (ObjectMarking::IsWhite(code, MarkingState::Internal(code))) return;
// Ignore all slots that might have been recorded in the body of the // Ignore all slots that might have been recorded in the body of the
// deoptimized code object. Assumption: no slots will be recorded for // deoptimized code object. Assumption: no slots will be recorded for
...@@ -3557,8 +3588,8 @@ bool MarkCompactCollector::WillBeDeoptimized(Code* code) { ...@@ -3557,8 +3588,8 @@ bool MarkCompactCollector::WillBeDeoptimized(Code* code) {
void MarkCompactCollector::RecordLiveSlotsOnPage(Page* page) { void MarkCompactCollector::RecordLiveSlotsOnPage(Page* page) {
EvacuateRecordOnlyVisitor visitor(heap()); EvacuateRecordOnlyVisitor visitor(heap());
LiveObjectVisitor object_visitor; LiveObjectVisitor object_visitor;
object_visitor.VisitBlackObjects(page, MarkingState::FromPageInternal(page), object_visitor.VisitBlackObjects(page, MarkingState::Internal(page), &visitor,
&visitor, LiveObjectVisitor::kKeepMarking); LiveObjectVisitor::kKeepMarking);
} }
template <class Visitor> template <class Visitor>
...@@ -3572,7 +3603,7 @@ bool LiveObjectVisitor::VisitBlackObjects(MemoryChunk* chunk, ...@@ -3572,7 +3603,7 @@ bool LiveObjectVisitor::VisitBlackObjects(MemoryChunk* chunk,
DCHECK(ObjectMarking::IsBlack(object, state)); DCHECK(ObjectMarking::IsBlack(object, state));
if (!visitor->Visit(object)) { if (!visitor->Visit(object)) {
if (iteration_mode == kClearMarkbits) { if (iteration_mode == kClearMarkbits) {
state.bitmap->ClearRange( state.bitmap()->ClearRange(
chunk->AddressToMarkbitIndex(chunk->area_start()), chunk->AddressToMarkbitIndex(chunk->area_start()),
chunk->AddressToMarkbitIndex(object->address())); chunk->AddressToMarkbitIndex(object->address()));
if (chunk->old_to_new_slots() != nullptr) { if (chunk->old_to_new_slots() != nullptr) {
...@@ -3773,7 +3804,9 @@ class PointerUpdateJobTraits { ...@@ -3773,7 +3804,9 @@ class PointerUpdateJobTraits {
// slot has been recorded multiple times in the remembered set. Since // slot has been recorded multiple times in the remembered set. Since
// there is no forwarding information present we need to check the // there is no forwarding information present we need to check the
// markbits to determine liveness. // markbits to determine liveness.
if (ObjectMarking::IsBlack(reinterpret_cast<HeapObject*>(slot_reference))) HeapObject* heap_object = reinterpret_cast<HeapObject*>(slot_reference);
if (ObjectMarking::IsBlack(heap_object,
MarkingState::Internal(heap_object)))
return KEEP_SLOT; return KEEP_SLOT;
} else { } else {
DCHECK(!heap->InNewSpace(slot_reference)); DCHECK(!heap->InNewSpace(slot_reference));
...@@ -3839,8 +3872,7 @@ class ToSpacePointerUpdateJobTraits { ...@@ -3839,8 +3872,7 @@ class ToSpacePointerUpdateJobTraits {
static void ProcessPageInParallelVisitLive(Heap* heap, PerTaskData visitor, static void ProcessPageInParallelVisitLive(Heap* heap, PerTaskData visitor,
MemoryChunk* chunk, MemoryChunk* chunk,
PerPageData limits) { PerPageData limits) {
LiveObjectIterator<kBlackObjects> it(chunk, LiveObjectIterator<kBlackObjects> it(chunk, MarkingState::Internal(chunk));
MarkingState::FromPageInternal(chunk));
HeapObject* object = NULL; HeapObject* object = NULL;
while ((object = it.Next()) != NULL) { while ((object = it.Next()) != NULL) {
Map* map = object->map(); Map* map = object->map();
...@@ -3904,7 +3936,7 @@ void MarkCompactCollector::ReleaseEvacuationCandidates() { ...@@ -3904,7 +3936,7 @@ void MarkCompactCollector::ReleaseEvacuationCandidates() {
for (Page* p : old_space_evacuation_pages_) { for (Page* p : old_space_evacuation_pages_) {
if (!p->IsEvacuationCandidate()) continue; if (!p->IsEvacuationCandidate()) continue;
PagedSpace* space = static_cast<PagedSpace*>(p->owner()); PagedSpace* space = static_cast<PagedSpace*>(p->owner());
p->ResetLiveBytes(); MarkingState::Internal(p).SetLiveBytes(0);
CHECK(p->SweepingDone()); CHECK(p->SweepingDone());
space->ReleasePage(p); space->ReleasePage(p);
} }
...@@ -3975,8 +4007,10 @@ void MarkCompactCollector::Sweeper::AddPage(AllocationSpace space, Page* page) { ...@@ -3975,8 +4007,10 @@ void MarkCompactCollector::Sweeper::AddPage(AllocationSpace space, Page* page) {
void MarkCompactCollector::Sweeper::PrepareToBeSweptPage(AllocationSpace space, void MarkCompactCollector::Sweeper::PrepareToBeSweptPage(AllocationSpace space,
Page* page) { Page* page) {
page->concurrent_sweeping_state().SetValue(Page::kSweepingPending); page->concurrent_sweeping_state().SetValue(Page::kSweepingPending);
DCHECK_GE(page->area_size(), static_cast<size_t>(page->LiveBytes())); DCHECK_GE(page->area_size(),
size_t to_sweep = page->area_size() - page->LiveBytes(); static_cast<size_t>(MarkingState::Internal(page).live_bytes()));
size_t to_sweep =
page->area_size() - MarkingState::Internal(page).live_bytes();
if (space != NEW_SPACE) if (space != NEW_SPACE)
heap_->paged_space(space)->accounting_stats_.ShrinkSpace(to_sweep); heap_->paged_space(space)->accounting_stats_.ShrinkSpace(to_sweep);
} }
...@@ -4028,7 +4062,7 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) { ...@@ -4028,7 +4062,7 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
} }
// One unused page is kept, all further are released before sweeping them. // One unused page is kept, all further are released before sweeping them.
if (p->LiveBytes() == 0) { if (MarkingState::Internal(p).live_bytes() == 0) {
if (unused_page_present) { if (unused_page_present) {
if (FLAG_gc_verbose) { if (FLAG_gc_verbose) {
PrintIsolate(isolate(), "sweeping: released page: %p", PrintIsolate(isolate(), "sweeping: released page: %p",
...@@ -4108,7 +4142,7 @@ void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) { ...@@ -4108,7 +4142,7 @@ void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) {
Code* host = Code* host =
isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer( isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer(
pc); pc);
if (ObjectMarking::IsBlack(host)) { if (ObjectMarking::IsBlack(host, MarkingState::Internal(host))) {
RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host); RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host);
// The target is always in old space, we don't have to record the slot in // The target is always in old space, we don't have to record the slot in
// the old-to-new remembered set. // the old-to-new remembered set.
......
...@@ -32,57 +32,13 @@ class MarkCompactCollector; ...@@ -32,57 +32,13 @@ class MarkCompactCollector;
class MinorMarkCompactCollector; class MinorMarkCompactCollector;
class MarkingVisitor; class MarkingVisitor;
class MarkingState {
public:
static MarkingState FromPageInternal(MemoryChunk* chunk) {
return MarkingState(chunk->markbits<MarkingMode::FULL>(),
chunk->live_bytes_address<MarkingMode::FULL>());
}
static MarkingState FromPageExternal(MemoryChunk* chunk) {
return MarkingState(
chunk->markbits<MarkingMode::YOUNG_GENERATION>(),
chunk->live_bytes_address<MarkingMode::YOUNG_GENERATION>());
}
MarkingState(Bitmap* bitmap, intptr_t* live_bytes)
: bitmap(bitmap), live_bytes(live_bytes) {}
void IncrementLiveBytes(intptr_t by) const {
*live_bytes += static_cast<int>(by);
}
void SetLiveBytes(intptr_t value) const {
*live_bytes = static_cast<int>(value);
}
void ClearLiveness() const {
bitmap->Clear();
*live_bytes = 0;
}
Bitmap* bitmap;
intptr_t* live_bytes;
};
// TODO(mlippautz): Remove duplicate accessors once the architecture for
// different markers is fixed.
class ObjectMarking : public AllStatic { class ObjectMarking : public AllStatic {
public: public:
V8_INLINE static MarkBit MarkBitFrom(HeapObject* obj) {
const Address address = obj->address();
const MemoryChunk* p = MemoryChunk::FromAddress(address);
return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(address));
}
V8_INLINE static MarkBit MarkBitFrom(HeapObject* obj, V8_INLINE static MarkBit MarkBitFrom(HeapObject* obj,
const MarkingState& state) { const MarkingState& state) {
const Address address = obj->address(); const Address address = obj->address();
const MemoryChunk* p = MemoryChunk::FromAddress(address); const MemoryChunk* p = MemoryChunk::FromAddress(address);
return state.bitmap->MarkBitFromIndex(p->AddressToMarkbitIndex(address)); return state.bitmap()->MarkBitFromIndex(p->AddressToMarkbitIndex(address));
}
static Marking::ObjectColor Color(HeapObject* obj) {
return Marking::Color(ObjectMarking::MarkBitFrom(obj));
} }
static Marking::ObjectColor Color(HeapObject* obj, static Marking::ObjectColor Color(HeapObject* obj,
...@@ -90,67 +46,33 @@ class ObjectMarking : public AllStatic { ...@@ -90,67 +46,33 @@ class ObjectMarking : public AllStatic {
return Marking::Color(ObjectMarking::MarkBitFrom(obj, state)); return Marking::Color(ObjectMarking::MarkBitFrom(obj, state));
} }
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsImpossible(HeapObject* obj) {
return Marking::IsImpossible<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsImpossible(HeapObject* obj, V8_INLINE static bool IsImpossible(HeapObject* obj,
const MarkingState& state) { const MarkingState& state) {
return Marking::IsImpossible<access_mode>(MarkBitFrom(obj, state)); return Marking::IsImpossible<access_mode>(MarkBitFrom(obj, state));
} }
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsBlack(HeapObject* obj) {
return Marking::IsBlack<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsBlack(HeapObject* obj, const MarkingState& state) { V8_INLINE static bool IsBlack(HeapObject* obj, const MarkingState& state) {
return Marking::IsBlack<access_mode>(MarkBitFrom(obj, state)); return Marking::IsBlack<access_mode>(MarkBitFrom(obj, state));
} }
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsWhite(HeapObject* obj) {
return Marking::IsWhite<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsWhite(HeapObject* obj, const MarkingState& state) { V8_INLINE static bool IsWhite(HeapObject* obj, const MarkingState& state) {
return Marking::IsWhite<access_mode>(MarkBitFrom(obj, state)); return Marking::IsWhite<access_mode>(MarkBitFrom(obj, state));
} }
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsGrey(HeapObject* obj) {
return Marking::IsGrey<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsGrey(HeapObject* obj, const MarkingState& state) { V8_INLINE static bool IsGrey(HeapObject* obj, const MarkingState& state) {
return Marking::IsGrey<access_mode>(MarkBitFrom(obj, state)); return Marking::IsGrey<access_mode>(MarkBitFrom(obj, state));
} }
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsBlackOrGrey(HeapObject* obj) {
return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsBlackOrGrey(HeapObject* obj, V8_INLINE static bool IsBlackOrGrey(HeapObject* obj,
const MarkingState& state) { const MarkingState& state) {
return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj, state)); return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj, state));
} }
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool BlackToGrey(HeapObject* obj) {
DCHECK((access_mode == MarkBit::ATOMIC || IsBlack<access_mode>(obj)));
MarkBit markbit = MarkBitFrom(obj);
if (!Marking::BlackToGrey<access_mode>(markbit)) return false;
MemoryChunk::IncrementLiveBytes(obj, -obj->Size());
return true;
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool BlackToGrey(HeapObject* obj, V8_INLINE static bool BlackToGrey(HeapObject* obj,
const MarkingState& state) { const MarkingState& state) {
...@@ -162,12 +84,6 @@ class ObjectMarking : public AllStatic { ...@@ -162,12 +84,6 @@ class ObjectMarking : public AllStatic {
return true; return true;
} }
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool WhiteToGrey(HeapObject* obj) {
DCHECK((access_mode == MarkBit::ATOMIC || IsWhite<access_mode>(obj)));
return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool WhiteToGrey(HeapObject* obj, V8_INLINE static bool WhiteToGrey(HeapObject* obj,
const MarkingState& state) { const MarkingState& state) {
...@@ -176,13 +92,6 @@ class ObjectMarking : public AllStatic { ...@@ -176,13 +92,6 @@ class ObjectMarking : public AllStatic {
return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj, state)); return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj, state));
} }
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool WhiteToBlack(HeapObject* obj) {
DCHECK((access_mode == MarkBit::ATOMIC || IsWhite<access_mode>(obj)));
if (!ObjectMarking::WhiteToGrey<access_mode>(obj)) return false;
return ObjectMarking::GreyToBlack<access_mode>(obj);
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool WhiteToBlack(HeapObject* obj, V8_INLINE static bool WhiteToBlack(HeapObject* obj,
const MarkingState& state) { const MarkingState& state) {
...@@ -192,15 +101,6 @@ class ObjectMarking : public AllStatic { ...@@ -192,15 +101,6 @@ class ObjectMarking : public AllStatic {
return ObjectMarking::GreyToBlack<access_mode>(obj, state); return ObjectMarking::GreyToBlack<access_mode>(obj, state);
} }
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool GreyToBlack(HeapObject* obj) {
DCHECK((access_mode == MarkBit::ATOMIC || IsGrey<access_mode>(obj)));
MarkBit markbit = MarkBitFrom(obj);
if (!Marking::GreyToBlack<access_mode>(markbit)) return false;
MemoryChunk::IncrementLiveBytes(obj, obj->Size());
return true;
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC> template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool GreyToBlack(HeapObject* obj, V8_INLINE static bool GreyToBlack(HeapObject* obj,
const MarkingState& state) { const MarkingState& state) {
...@@ -413,7 +313,7 @@ class MarkBitCellIterator BASE_EMBEDDED { ...@@ -413,7 +313,7 @@ class MarkBitCellIterator BASE_EMBEDDED {
cell_base_ = chunk_->area_start(); cell_base_ = chunk_->area_start();
cell_index_ = Bitmap::IndexToCell( cell_index_ = Bitmap::IndexToCell(
Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(cell_base_))); Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(cell_base_)));
cells_ = state.bitmap->cells(); cells_ = state.bitmap()->cells();
} }
inline bool Done() { return cell_index_ == last_cell_index_; } inline bool Done() { return cell_index_ == last_cell_index_; }
...@@ -526,10 +426,6 @@ class MinorMarkCompactCollector { ...@@ -526,10 +426,6 @@ class MinorMarkCompactCollector {
private: private:
class RootMarkingVisitor; class RootMarkingVisitor;
static MarkingState StateForObject(HeapObject* object) {
return MarkingState::FromPageExternal(Page::FromAddress(object->address()));
}
inline Heap* heap() { return heap_; } inline Heap* heap() { return heap_; }
inline Isolate* isolate() { return heap()->isolate(); } inline Isolate* isolate() { return heap()->isolate(); }
inline MarkingDeque* marking_deque() { return &marking_deque_; } inline MarkingDeque* marking_deque() { return &marking_deque_; }
......
...@@ -343,7 +343,8 @@ static bool IsCowArray(Heap* heap, FixedArrayBase* array) { ...@@ -343,7 +343,8 @@ static bool IsCowArray(Heap* heap, FixedArrayBase* array) {
static bool SameLiveness(HeapObject* obj1, HeapObject* obj2) { static bool SameLiveness(HeapObject* obj1, HeapObject* obj2) {
return obj1 == nullptr || obj2 == nullptr || return obj1 == nullptr || obj2 == nullptr ||
ObjectMarking::Color(obj1) == ObjectMarking::Color(obj2); ObjectMarking::Color(obj1, MarkingState::Internal(obj1)) ==
ObjectMarking::Color(obj2, MarkingState::Internal(obj2));
} }
bool ObjectStatsCollector::RecordFixedArrayHelper(HeapObject* parent, bool ObjectStatsCollector::RecordFixedArrayHelper(HeapObject* parent,
......
...@@ -333,7 +333,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map, ...@@ -333,7 +333,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
// contain smi zero. // contain smi zero.
if (weak_cell->next_cleared() && !weak_cell->cleared()) { if (weak_cell->next_cleared() && !weak_cell->cleared()) {
HeapObject* value = HeapObject::cast(weak_cell->value()); HeapObject* value = HeapObject::cast(weak_cell->value());
if (ObjectMarking::IsBlackOrGrey(value)) { if (ObjectMarking::IsBlackOrGrey(value, MarkingState::Internal(value))) {
// Weak cells with live values are directly processed here to reduce // Weak cells with live values are directly processed here to reduce
// the processing time of weak cells during the main GC pause. // the processing time of weak cells during the main GC pause.
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
...@@ -522,7 +522,8 @@ bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap, ...@@ -522,7 +522,8 @@ bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
// Code is either on stack, in compilation cache or referenced // Code is either on stack, in compilation cache or referenced
// by optimized version of function. // by optimized version of function.
if (ObjectMarking::IsBlackOrGrey(function->code())) { if (ObjectMarking::IsBlackOrGrey(function->code(),
MarkingState::Internal(function->code()))) {
return false; return false;
} }
...@@ -545,7 +546,8 @@ bool StaticMarkingVisitor<StaticVisitor>::IsFlushable( ...@@ -545,7 +546,8 @@ bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
Heap* heap, SharedFunctionInfo* shared_info) { Heap* heap, SharedFunctionInfo* shared_info) {
// Code is either on stack, in compilation cache or referenced // Code is either on stack, in compilation cache or referenced
// by optimized version of function. // by optimized version of function.
if (ObjectMarking::IsBlackOrGrey(shared_info->code())) { if (ObjectMarking::IsBlackOrGrey(
shared_info->code(), MarkingState::Internal(shared_info->code()))) {
return false; return false;
} }
......
...@@ -200,8 +200,10 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -200,8 +200,10 @@ class ScavengingVisitor : public StaticVisitorBase {
reinterpret_cast<base::AtomicWord>(target)); reinterpret_cast<base::AtomicWord>(target));
if (object_contents == POINTER_OBJECT) { if (object_contents == POINTER_OBJECT) {
heap->promotion_queue()->insert(target, object_size, // TODO(mlippautz): Query collector for marking state.
ObjectMarking::IsBlack(object)); heap->promotion_queue()->insert(
target, object_size,
ObjectMarking::IsBlack(object, MarkingState::Internal(object)));
} }
heap->IncrementPromotedObjectsSize(object_size); heap->IncrementPromotedObjectsSize(object_size);
return true; return true;
...@@ -245,7 +247,9 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -245,7 +247,9 @@ class ScavengingVisitor : public StaticVisitorBase {
DCHECK(map_word.IsForwardingAddress()); DCHECK(map_word.IsForwardingAddress());
HeapObject* target = map_word.ToForwardingAddress(); HeapObject* target = map_word.ToForwardingAddress();
if (ObjectMarking::IsBlack(target)) { // TODO(mlippautz): Notify collector of this object so we don't have to
// retrieve the state our of thin air.
if (ObjectMarking::IsBlack(target, MarkingState::Internal(target))) {
// This object is black and it might not be rescanned by marker. // This object is black and it might not be rescanned by marker.
// We should explicitly record code entry slot for compaction because // We should explicitly record code entry slot for compaction because
// promotion queue processing (IteratePromotedObjectPointers) will // promotion queue processing (IteratePromotedObjectPointers) will
......
...@@ -182,7 +182,7 @@ Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable, ...@@ -182,7 +182,7 @@ Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable,
page->AllocateLocalTracker(); page->AllocateLocalTracker();
if (FLAG_minor_mc) { if (FLAG_minor_mc) {
page->AllocateYoungGenerationBitmap(); page->AllocateYoungGenerationBitmap();
page->ClearLiveness<MarkingMode::YOUNG_GENERATION>(); MarkingState::External(page).ClearLiveness();
} }
return page; return page;
} }
...@@ -230,54 +230,6 @@ void Page::InitializeFreeListCategories() { ...@@ -230,54 +230,6 @@ void Page::InitializeFreeListCategories() {
} }
} }
template <MarkingMode mode>
void MemoryChunk::IncrementLiveBytes(HeapObject* object, int by) {
MemoryChunk::FromAddress(object->address())->IncrementLiveBytes<mode>(by);
}
template <MarkingMode mode>
void MemoryChunk::TraceLiveBytes(intptr_t old_value, intptr_t new_value) {
if (!FLAG_trace_live_bytes) return;
PrintIsolate(heap()->isolate(),
"live-bytes[%p:%s]: %" V8PRIdPTR "-> %" V8PRIdPTR "\n",
static_cast<void*>(this),
mode == MarkingMode::FULL ? "internal" : "external", old_value,
new_value);
}
template <MarkingMode mode>
void MemoryChunk::ResetLiveBytes() {
switch (mode) {
case MarkingMode::FULL:
TraceLiveBytes(live_byte_count_, 0);
live_byte_count_ = 0;
break;
case MarkingMode::YOUNG_GENERATION:
TraceLiveBytes(young_generation_live_byte_count_, 0);
young_generation_live_byte_count_ = 0;
break;
}
}
template <MarkingMode mode>
void MemoryChunk::IncrementLiveBytes(int by) {
switch (mode) {
case MarkingMode::FULL:
TraceLiveBytes(live_byte_count_, live_byte_count_ + by);
live_byte_count_ += by;
DCHECK_GE(live_byte_count_, 0);
DCHECK_LE(static_cast<size_t>(live_byte_count_), size_);
break;
case MarkingMode::YOUNG_GENERATION:
TraceLiveBytes(young_generation_live_byte_count_,
young_generation_live_byte_count_ + by);
young_generation_live_byte_count_ += by;
DCHECK_GE(young_generation_live_byte_count_, 0);
DCHECK_LE(static_cast<size_t>(young_generation_live_byte_count_), size_);
break;
}
}
bool PagedSpace::Contains(Address addr) { bool PagedSpace::Contains(Address addr) {
return MemoryChunk::FromAnyPointerAddress(heap(), addr)->owner() == this; return MemoryChunk::FromAnyPointerAddress(heap(), addr)->owner() == this;
} }
......
...@@ -538,12 +538,13 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size, ...@@ -538,12 +538,13 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
chunk->mutex_ = new base::Mutex(); chunk->mutex_ = new base::Mutex();
chunk->available_in_free_list_ = 0; chunk->available_in_free_list_ = 0;
chunk->wasted_memory_ = 0; chunk->wasted_memory_ = 0;
chunk->ClearLiveness();
chunk->young_generation_bitmap_ = nullptr; chunk->young_generation_bitmap_ = nullptr;
chunk->set_next_chunk(nullptr); chunk->set_next_chunk(nullptr);
chunk->set_prev_chunk(nullptr); chunk->set_prev_chunk(nullptr);
chunk->local_tracker_ = nullptr; chunk->local_tracker_ = nullptr;
MarkingState::Internal(chunk).ClearLiveness();
DCHECK(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset); DCHECK(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset);
if (executable == EXECUTABLE) { if (executable == EXECUTABLE) {
...@@ -854,9 +855,10 @@ void Page::CreateBlackArea(Address start, Address end) { ...@@ -854,9 +855,10 @@ void Page::CreateBlackArea(Address start, Address end) {
DCHECK_EQ(Page::FromAddress(start), this); DCHECK_EQ(Page::FromAddress(start), this);
DCHECK_NE(start, end); DCHECK_NE(start, end);
DCHECK_EQ(Page::FromAddress(end - 1), this); DCHECK_EQ(Page::FromAddress(end - 1), this);
markbits()->SetRange(AddressToMarkbitIndex(start), MarkingState::Internal(this).bitmap()->SetRange(AddressToMarkbitIndex(start),
AddressToMarkbitIndex(end)); AddressToMarkbitIndex(end));
IncrementLiveBytes(static_cast<int>(end - start)); MarkingState::Internal(this).IncrementLiveBytes(
static_cast<int>(end - start));
} }
void MemoryAllocator::PartialFreeMemory(MemoryChunk* chunk, void MemoryAllocator::PartialFreeMemory(MemoryChunk* chunk,
...@@ -1196,15 +1198,6 @@ void MemoryChunk::ReleaseYoungGenerationBitmap() { ...@@ -1196,15 +1198,6 @@ void MemoryChunk::ReleaseYoungGenerationBitmap() {
young_generation_bitmap_ = nullptr; young_generation_bitmap_ = nullptr;
} }
template <MarkingMode mode>
void MemoryChunk::ClearLiveness() {
markbits<mode>()->Clear();
ResetLiveBytes<mode>();
}
template void MemoryChunk::ClearLiveness<MarkingMode::FULL>();
template void MemoryChunk::ClearLiveness<MarkingMode::YOUNG_GENERATION>();
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// PagedSpace implementation // PagedSpace implementation
...@@ -1420,9 +1413,11 @@ void PagedSpace::EmptyAllocationInfo() { ...@@ -1420,9 +1413,11 @@ void PagedSpace::EmptyAllocationInfo() {
// Clear the bits in the unused black area. // Clear the bits in the unused black area.
if (current_top != current_limit) { if (current_top != current_limit) {
page->markbits()->ClearRange(page->AddressToMarkbitIndex(current_top), MarkingState::Internal(page).bitmap()->ClearRange(
page->AddressToMarkbitIndex(current_limit)); page->AddressToMarkbitIndex(current_top),
page->IncrementLiveBytes(-static_cast<int>(current_limit - current_top)); page->AddressToMarkbitIndex(current_limit));
MarkingState::Internal(page).IncrementLiveBytes(
-static_cast<int>(current_limit - current_top));
} }
} }
...@@ -1436,7 +1431,7 @@ void PagedSpace::IncreaseCapacity(size_t bytes) { ...@@ -1436,7 +1431,7 @@ void PagedSpace::IncreaseCapacity(size_t bytes) {
} }
void PagedSpace::ReleasePage(Page* page) { void PagedSpace::ReleasePage(Page* page) {
DCHECK_EQ(page->LiveBytes(), 0); DCHECK_EQ(0, MarkingState::Internal(page).live_bytes());
DCHECK_EQ(page->owner(), this); DCHECK_EQ(page->owner(), this);
free_list_.EvictFreeListItems(page); free_list_.EvictFreeListItems(page);
...@@ -1497,14 +1492,14 @@ void PagedSpace::Verify(ObjectVisitor* visitor) { ...@@ -1497,14 +1492,14 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
// All the interior pointers should be contained in the heap. // All the interior pointers should be contained in the heap.
int size = object->Size(); int size = object->Size();
object->IterateBody(map->instance_type(), size, visitor); object->IterateBody(map->instance_type(), size, visitor);
if (ObjectMarking::IsBlack(object)) { if (ObjectMarking::IsBlack(object, MarkingState::Internal(object))) {
black_size += size; black_size += size;
} }
CHECK(object->address() + size <= top); CHECK(object->address() + size <= top);
end_of_previous_object = object->address() + size; end_of_previous_object = object->address() + size;
} }
CHECK_LE(black_size, page->LiveBytes()); CHECK_LE(black_size, MarkingState::Internal(page).live_bytes());
} }
CHECK(allocation_pointer_found_in_space); CHECK(allocation_pointer_found_in_space);
} }
...@@ -1637,7 +1632,7 @@ bool SemiSpace::EnsureCurrentCapacity() { ...@@ -1637,7 +1632,7 @@ bool SemiSpace::EnsureCurrentCapacity() {
if (current_page == nullptr) return false; if (current_page == nullptr) return false;
DCHECK_NOT_NULL(current_page); DCHECK_NOT_NULL(current_page);
current_page->InsertAfter(anchor()); current_page->InsertAfter(anchor());
current_page->ClearLiveness(); MarkingState::Internal(current_page).ClearLiveness();
current_page->SetFlags(anchor()->prev_page()->GetFlags(), current_page->SetFlags(anchor()->prev_page()->GetFlags(),
static_cast<uintptr_t>(Page::kCopyAllFlags)); static_cast<uintptr_t>(Page::kCopyAllFlags));
heap()->CreateFillerObjectAt(current_page->area_start(), heap()->CreateFillerObjectAt(current_page->area_start(),
...@@ -1709,7 +1704,7 @@ void NewSpace::ResetAllocationInfo() { ...@@ -1709,7 +1704,7 @@ void NewSpace::ResetAllocationInfo() {
UpdateAllocationInfo(); UpdateAllocationInfo();
// Clear all mark-bits in the to-space. // Clear all mark-bits in the to-space.
for (Page* p : to_space_) { for (Page* p : to_space_) {
p->ClearLiveness(); MarkingState::Internal(p).ClearLiveness();
} }
InlineAllocationStep(old_top, allocation_info_.top(), nullptr, 0); InlineAllocationStep(old_top, allocation_info_.top(), nullptr, 0);
} }
...@@ -2010,7 +2005,7 @@ bool SemiSpace::GrowTo(size_t new_capacity) { ...@@ -2010,7 +2005,7 @@ bool SemiSpace::GrowTo(size_t new_capacity) {
return false; return false;
} }
new_page->InsertAfter(last_page); new_page->InsertAfter(last_page);
new_page->ClearLiveness(); MarkingState::Internal(new_page).ClearLiveness();
// Duplicate the flags that was set on the old page. // Duplicate the flags that was set on the old page.
new_page->SetFlags(last_page->GetFlags(), Page::kCopyOnFlipFlagsMask); new_page->SetFlags(last_page->GetFlags(), Page::kCopyOnFlipFlagsMask);
last_page = new_page; last_page = new_page;
...@@ -2071,7 +2066,7 @@ void SemiSpace::FixPagesFlags(intptr_t flags, intptr_t mask) { ...@@ -2071,7 +2066,7 @@ void SemiSpace::FixPagesFlags(intptr_t flags, intptr_t mask) {
page->ClearFlag(MemoryChunk::IN_FROM_SPACE); page->ClearFlag(MemoryChunk::IN_FROM_SPACE);
page->SetFlag(MemoryChunk::IN_TO_SPACE); page->SetFlag(MemoryChunk::IN_TO_SPACE);
page->ClearFlag(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK); page->ClearFlag(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK);
page->ResetLiveBytes(); MarkingState::Internal(page).SetLiveBytes(0);
} else { } else {
page->SetFlag(MemoryChunk::IN_FROM_SPACE); page->SetFlag(MemoryChunk::IN_FROM_SPACE);
page->ClearFlag(MemoryChunk::IN_TO_SPACE); page->ClearFlag(MemoryChunk::IN_TO_SPACE);
...@@ -3044,7 +3039,7 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size, ...@@ -3044,7 +3039,7 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size,
ClearRecordedSlots::kNo); ClearRecordedSlots::kNo);
if (heap()->incremental_marking()->black_allocation()) { if (heap()->incremental_marking()->black_allocation()) {
ObjectMarking::WhiteToBlack(object); ObjectMarking::WhiteToBlack(object, MarkingState::Internal(object));
} }
return object; return object;
} }
...@@ -3091,13 +3086,14 @@ LargePage* LargeObjectSpace::FindPage(Address a) { ...@@ -3091,13 +3086,14 @@ LargePage* LargeObjectSpace::FindPage(Address a) {
void LargeObjectSpace::ClearMarkingStateOfLiveObjects() { void LargeObjectSpace::ClearMarkingStateOfLiveObjects() {
LargeObjectIterator it(this); LargeObjectIterator it(this);
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
if (ObjectMarking::IsBlackOrGrey(obj)) { if (ObjectMarking::IsBlackOrGrey(obj, MarkingState::Internal(obj))) {
Marking::MarkWhite(ObjectMarking::MarkBitFrom(obj)); Marking::MarkWhite(
ObjectMarking::MarkBitFrom(obj, MarkingState::Internal(obj)));
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
chunk->ResetProgressBar(); chunk->ResetProgressBar();
chunk->ResetLiveBytes(); MarkingState::Internal(chunk).SetLiveBytes(0);
} }
DCHECK(ObjectMarking::IsWhite(obj)); DCHECK(ObjectMarking::IsWhite(obj, MarkingState::Internal(obj)));
} }
} }
...@@ -3139,8 +3135,8 @@ void LargeObjectSpace::FreeUnmarkedObjects() { ...@@ -3139,8 +3135,8 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
LargePage* current = first_page_; LargePage* current = first_page_;
while (current != NULL) { while (current != NULL) {
HeapObject* object = current->GetObject(); HeapObject* object = current->GetObject();
DCHECK(!ObjectMarking::IsGrey(object)); DCHECK(!ObjectMarking::IsGrey(object, MarkingState::Internal(object)));
if (ObjectMarking::IsBlack(object)) { if (ObjectMarking::IsBlack(object, MarkingState::Internal(object))) {
Address free_start; Address free_start;
if ((free_start = current->GetAddressToShrink()) != 0) { if ((free_start = current->GetAddressToShrink()) != 0) {
// TODO(hpayer): Perform partial free concurrently. // TODO(hpayer): Perform partial free concurrently.
...@@ -3276,7 +3272,8 @@ void Page::Print() { ...@@ -3276,7 +3272,8 @@ void Page::Print() {
unsigned mark_size = 0; unsigned mark_size = 0;
for (HeapObject* object = objects.Next(); object != NULL; for (HeapObject* object = objects.Next(); object != NULL;
object = objects.Next()) { object = objects.Next()) {
bool is_marked = ObjectMarking::IsBlackOrGrey(object); bool is_marked =
ObjectMarking::IsBlackOrGrey(object, MarkingState::Internal(object));
PrintF(" %c ", (is_marked ? '!' : ' ')); // Indent a little. PrintF(" %c ", (is_marked ? '!' : ' ')); // Indent a little.
if (is_marked) { if (is_marked) {
mark_size += object->Size(); mark_size += object->Size();
...@@ -3285,7 +3282,8 @@ void Page::Print() { ...@@ -3285,7 +3282,8 @@ void Page::Print() {
PrintF("\n"); PrintF("\n");
} }
printf(" --------------------------------------\n"); printf(" --------------------------------------\n");
printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); printf(" Marked: %x, LiveCount: %" V8PRIdPTR "\n", mark_size,
MarkingState::Internal(this).live_bytes());
} }
#endif // DEBUG #endif // DEBUG
......
...@@ -224,10 +224,6 @@ class FreeListCategory { ...@@ -224,10 +224,6 @@ class FreeListCategory {
friend class PagedSpace; friend class PagedSpace;
}; };
// MarkingMode determines which bitmaps and counters should be used when
// accessing marking information on MemoryChunk.
enum class MarkingMode { FULL, YOUNG_GENERATION };
// MemoryChunk represents a memory region owned by a specific space. // MemoryChunk represents a memory region owned by a specific space.
// It is divided into the header and the body. Chunk start is always // It is divided into the header and the body. Chunk start is always
// 1MB aligned. Start of the body is aligned so it can accommodate // 1MB aligned. Start of the body is aligned so it can accommodate
...@@ -377,9 +373,6 @@ class MemoryChunk { ...@@ -377,9 +373,6 @@ class MemoryChunk {
static const int kAllocatableMemory = kPageSize - kObjectStartOffset; static const int kAllocatableMemory = kPageSize - kObjectStartOffset;
template <MarkingMode mode = MarkingMode::FULL>
static inline void IncrementLiveBytes(HeapObject* object, int by);
// Only works if the pointer is in the first kPageSize of the MemoryChunk. // Only works if the pointer is in the first kPageSize of the MemoryChunk.
static MemoryChunk* FromAddress(Address a) { static MemoryChunk* FromAddress(Address a) {
return reinterpret_cast<MemoryChunk*>(OffsetFrom(a) & ~kAlignmentMask); return reinterpret_cast<MemoryChunk*>(OffsetFrom(a) & ~kAlignmentMask);
...@@ -427,33 +420,6 @@ class MemoryChunk { ...@@ -427,33 +420,6 @@ class MemoryChunk {
return concurrent_sweeping_state().Value() == kSweepingDone; return concurrent_sweeping_state().Value() == kSweepingDone;
} }
// Manage live byte count, i.e., count of bytes in black objects.
template <MarkingMode mode = MarkingMode::FULL>
inline void ResetLiveBytes();
template <MarkingMode mode = MarkingMode::FULL>
inline void IncrementLiveBytes(int by);
template <MarkingMode mode = MarkingMode::FULL>
int LiveBytes() {
switch (mode) {
case MarkingMode::FULL:
DCHECK_LE(static_cast<unsigned>(live_byte_count_), size_);
return static_cast<int>(live_byte_count_);
case MarkingMode::YOUNG_GENERATION:
DCHECK_LE(static_cast<unsigned>(young_generation_live_byte_count_),
size_);
return static_cast<int>(young_generation_live_byte_count_);
}
UNREACHABLE();
return 0;
}
void SetLiveBytes(int live_bytes) {
DCHECK_GE(live_bytes, 0);
DCHECK_LE(static_cast<size_t>(live_bytes), size_);
live_byte_count_ = live_bytes;
}
size_t size() const { return size_; } size_t size() const { return size_; }
void set_size(size_t size) { size_ = size; } void set_size(size_t size) { size_ = size; }
...@@ -513,19 +479,6 @@ class MemoryChunk { ...@@ -513,19 +479,6 @@ class MemoryChunk {
} }
} }
template <MarkingMode mode = MarkingMode::FULL>
inline Bitmap* markbits() const {
return mode == MarkingMode::FULL
? Bitmap::FromAddress(address() + kHeaderSize)
: young_generation_bitmap_;
}
template <MarkingMode mode = MarkingMode::FULL>
inline intptr_t* live_bytes_address() {
return mode == MarkingMode::FULL ? &live_byte_count_
: &young_generation_live_byte_count_;
}
inline uint32_t AddressToMarkbitIndex(Address addr) const { inline uint32_t AddressToMarkbitIndex(Address addr) const {
return static_cast<uint32_t>(addr - this->address()) >> kPointerSizeLog2; return static_cast<uint32_t>(addr - this->address()) >> kPointerSizeLog2;
} }
...@@ -534,11 +487,6 @@ class MemoryChunk { ...@@ -534,11 +487,6 @@ class MemoryChunk {
return this->address() + (index << kPointerSizeLog2); return this->address() + (index << kPointerSizeLog2);
} }
template <MarkingMode mode = MarkingMode::FULL>
void ClearLiveness();
void PrintMarkbits() { markbits()->Print(); }
void SetFlag(Flag flag) { flags_ |= flag; } void SetFlag(Flag flag) { flags_ |= flag; }
void ClearFlag(Flag flag) { flags_ &= ~Flags(flag); } void ClearFlag(Flag flag) { flags_ &= ~Flags(flag); }
bool IsFlagSet(Flag flag) { return (flags_ & flag) != 0; } bool IsFlagSet(Flag flag) { return (flags_ & flag) != 0; }
...@@ -622,9 +570,6 @@ class MemoryChunk { ...@@ -622,9 +570,6 @@ class MemoryChunk {
base::VirtualMemory* reserved_memory() { return &reservation_; } base::VirtualMemory* reserved_memory() { return &reservation_; }
template <MarkingMode mode = MarkingMode::FULL>
inline void TraceLiveBytes(intptr_t old_value, intptr_t new_value);
size_t size_; size_t size_;
Flags flags_; Flags flags_;
...@@ -686,6 +631,7 @@ class MemoryChunk { ...@@ -686,6 +631,7 @@ class MemoryChunk {
private: private:
void InitializeReservedMemory() { reservation_.Reset(); } void InitializeReservedMemory() { reservation_.Reset(); }
friend class MarkingState;
friend class MemoryAllocator; friend class MemoryAllocator;
friend class MemoryChunkValidator; friend class MemoryChunkValidator;
}; };
...@@ -695,6 +641,50 @@ DEFINE_OPERATORS_FOR_FLAGS(MemoryChunk::Flags) ...@@ -695,6 +641,50 @@ DEFINE_OPERATORS_FOR_FLAGS(MemoryChunk::Flags)
static_assert(kMaxRegularHeapObjectSize <= MemoryChunk::kAllocatableMemory, static_assert(kMaxRegularHeapObjectSize <= MemoryChunk::kAllocatableMemory,
"kMaxRegularHeapObjectSize <= MemoryChunk::kAllocatableMemory"); "kMaxRegularHeapObjectSize <= MemoryChunk::kAllocatableMemory");
class MarkingState {
public:
static MarkingState External(HeapObject* object) {
return External(MemoryChunk::FromAddress(object->address()));
}
static MarkingState External(MemoryChunk* chunk) {
return MarkingState(chunk->young_generation_bitmap_,
&chunk->young_generation_live_byte_count_);
}
static MarkingState Internal(HeapObject* object) {
return Internal(MemoryChunk::FromAddress(object->address()));
}
static MarkingState Internal(MemoryChunk* chunk) {
return MarkingState(
Bitmap::FromAddress(chunk->address() + MemoryChunk::kHeaderSize),
&chunk->live_byte_count_);
}
MarkingState(Bitmap* bitmap, intptr_t* live_bytes)
: bitmap_(bitmap), live_bytes_(live_bytes) {}
void IncrementLiveBytes(intptr_t by) const {
*live_bytes_ += static_cast<int>(by);
}
void SetLiveBytes(intptr_t value) const {
*live_bytes_ = static_cast<int>(value);
}
void ClearLiveness() const {
bitmap_->Clear();
*live_bytes_ = 0;
}
Bitmap* bitmap() const { return bitmap_; }
intptr_t live_bytes() const { return *live_bytes_; }
private:
Bitmap* bitmap_;
intptr_t* live_bytes_;
};
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// A page is a memory chunk of a size 1MB. Large object pages may be larger. // A page is a memory chunk of a size 1MB. Large object pages may be larger.
// //
......
...@@ -2005,9 +2005,10 @@ void WeakCell::initialize(HeapObject* val) { ...@@ -2005,9 +2005,10 @@ void WeakCell::initialize(HeapObject* val) {
// We just have to execute the generational barrier here because we never // We just have to execute the generational barrier here because we never
// mark through a weak cell and collect evacuation candidates when we process // mark through a weak cell and collect evacuation candidates when we process
// all weak cells. // all weak cells.
WriteBarrierMode mode = ObjectMarking::IsBlack(this) WriteBarrierMode mode =
? UPDATE_WRITE_BARRIER ObjectMarking::IsBlack(this, MarkingState::Internal(this))
: UPDATE_WEAK_WRITE_BARRIER; ? UPDATE_WRITE_BARRIER
: UPDATE_WEAK_WRITE_BARRIER;
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kValueOffset, val, mode); CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kValueOffset, val, mode);
} }
......
...@@ -26,7 +26,7 @@ void CheckInvariantsOfAbortedPage(Page* page) { ...@@ -26,7 +26,7 @@ void CheckInvariantsOfAbortedPage(Page* page) {
// 1) Markbits are cleared // 1) Markbits are cleared
// 2) The page is not marked as evacuation candidate anymore // 2) The page is not marked as evacuation candidate anymore
// 3) The page is not marked as aborted compaction anymore. // 3) The page is not marked as aborted compaction anymore.
CHECK(page->markbits()->IsClean()); CHECK(MarkingState::Internal(page).bitmap()->IsClean());
CHECK(!page->IsEvacuationCandidate()); CHECK(!page->IsEvacuationCandidate());
CHECK(!page->IsFlagSet(Page::COMPACTION_WAS_ABORTED)); CHECK(!page->IsFlagSet(Page::COMPACTION_WAS_ABORTED));
} }
......
...@@ -2416,8 +2416,9 @@ TEST(InstanceOfStubWriteBarrier) { ...@@ -2416,8 +2416,9 @@ TEST(InstanceOfStubWriteBarrier) {
CHECK(f->IsOptimized()); CHECK(f->IsOptimized());
while (!Marking::IsBlack(ObjectMarking::MarkBitFrom(f->code())) && while (
!marking->IsStopped()) { !ObjectMarking::IsBlack(f->code(), MarkingState::Internal(f->code())) &&
!marking->IsStopped()) {
// Discard any pending GC requests otherwise we will get GC when we enter // Discard any pending GC requests otherwise we will get GC when we enter
// code below. // code below.
marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD, marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
...@@ -5363,8 +5364,9 @@ TEST(Regress3631) { ...@@ -5363,8 +5364,9 @@ TEST(Regress3631) {
Handle<JSReceiver> obj = Handle<JSReceiver> obj =
v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result)); v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj)); Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
while (!Marking::IsBlack( HeapObject* weak_map_table = HeapObject::cast(weak_map->table());
ObjectMarking::MarkBitFrom(HeapObject::cast(weak_map->table()))) && while (!ObjectMarking::IsBlack(weak_map_table,
MarkingState::Internal(weak_map_table)) &&
!marking->IsStopped()) { !marking->IsStopped()) {
marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD, marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
IncrementalMarking::FORCE_COMPLETION, StepOrigin::kV8); IncrementalMarking::FORCE_COMPLETION, StepOrigin::kV8);
...@@ -6114,10 +6116,10 @@ TEST(Regress598319) { ...@@ -6114,10 +6116,10 @@ TEST(Regress598319) {
} }
CHECK(heap->lo_space()->Contains(arr.get())); CHECK(heap->lo_space()->Contains(arr.get()));
CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(arr.get()))); CHECK(ObjectMarking::IsWhite(arr.get(), MarkingState::Internal(arr.get())));
for (int i = 0; i < arr.get()->length(); i++) { for (int i = 0; i < arr.get()->length(); i++) {
CHECK(Marking::IsWhite( HeapObject* arr_value = HeapObject::cast(arr.get()->get(i));
ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i))))); CHECK(ObjectMarking::IsWhite(arr_value, MarkingState::Internal(arr_value)));
} }
// Start incremental marking. // Start incremental marking.
...@@ -6131,8 +6133,8 @@ TEST(Regress598319) { ...@@ -6131,8 +6133,8 @@ TEST(Regress598319) {
// Check that we have not marked the interesting array during root scanning. // Check that we have not marked the interesting array during root scanning.
for (int i = 0; i < arr.get()->length(); i++) { for (int i = 0; i < arr.get()->length(); i++) {
CHECK(Marking::IsWhite( HeapObject* arr_value = HeapObject::cast(arr.get()->get(i));
ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i))))); CHECK(ObjectMarking::IsWhite(arr_value, MarkingState::Internal(arr_value)));
} }
// Now we search for a state where we are in incremental marking and have // Now we search for a state where we are in incremental marking and have
...@@ -6167,8 +6169,8 @@ TEST(Regress598319) { ...@@ -6167,8 +6169,8 @@ TEST(Regress598319) {
// All objects need to be black after marking. If a white object crossed the // All objects need to be black after marking. If a white object crossed the
// progress bar, we would fail here. // progress bar, we would fail here.
for (int i = 0; i < arr.get()->length(); i++) { for (int i = 0; i < arr.get()->length(); i++) {
CHECK(Marking::IsBlack( HeapObject* arr_value = HeapObject::cast(arr.get()->get(i));
ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i))))); CHECK(ObjectMarking::IsBlack(arr_value, MarkingState::Internal(arr_value)));
} }
} }
...@@ -6314,13 +6316,13 @@ TEST(LeftTrimFixedArrayInBlackArea) { ...@@ -6314,13 +6316,13 @@ TEST(LeftTrimFixedArrayInBlackArea) {
isolate->factory()->NewFixedArray(4, TENURED); isolate->factory()->NewFixedArray(4, TENURED);
Handle<FixedArray> array = isolate->factory()->NewFixedArray(50, TENURED); Handle<FixedArray> array = isolate->factory()->NewFixedArray(50, TENURED);
CHECK(heap->old_space()->Contains(*array)); CHECK(heap->old_space()->Contains(*array));
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*array))); CHECK(ObjectMarking::IsBlack(*array, MarkingState::Internal(*array)));
// Now left trim the allocated black area. A filler has to be installed // Now left trim the allocated black area. A filler has to be installed
// for the trimmed area and all mark bits of the trimmed area have to be // for the trimmed area and all mark bits of the trimmed area have to be
// cleared. // cleared.
FixedArrayBase* trimmed = heap->LeftTrimFixedArray(*array, 10); FixedArrayBase* trimmed = heap->LeftTrimFixedArray(*array, 10);
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(trimmed))); CHECK(ObjectMarking::IsBlack(trimmed, MarkingState::Internal(trimmed)));
heap::GcAndSweep(heap, OLD_SPACE); heap::GcAndSweep(heap, OLD_SPACE);
} }
...@@ -6357,8 +6359,8 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) { ...@@ -6357,8 +6359,8 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
Address start_address = array->address(); Address start_address = array->address();
Address end_address = start_address + array->Size(); Address end_address = start_address + array->Size();
Page* page = Page::FromAddress(start_address); Page* page = Page::FromAddress(start_address);
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*array))); CHECK(ObjectMarking::IsBlack(*array, MarkingState::Internal(*array)));
CHECK(page->markbits()->AllBitsSetInRange( CHECK(MarkingState::Internal(page).bitmap()->AllBitsSetInRange(
page->AddressToMarkbitIndex(start_address), page->AddressToMarkbitIndex(start_address),
page->AddressToMarkbitIndex(end_address))); page->AddressToMarkbitIndex(end_address)));
CHECK(heap->old_space()->Contains(*array)); CHECK(heap->old_space()->Contains(*array));
...@@ -6371,8 +6373,8 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) { ...@@ -6371,8 +6373,8 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
trimmed = heap->LeftTrimFixedArray(previous, 1); trimmed = heap->LeftTrimFixedArray(previous, 1);
HeapObject* filler = HeapObject::FromAddress(previous->address()); HeapObject* filler = HeapObject::FromAddress(previous->address());
CHECK(filler->IsFiller()); CHECK(filler->IsFiller());
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(trimmed))); CHECK(ObjectMarking::IsBlack(trimmed, MarkingState::Internal(trimmed)));
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(previous))); CHECK(ObjectMarking::IsBlack(previous, MarkingState::Internal(previous)));
previous = trimmed; previous = trimmed;
} }
...@@ -6382,8 +6384,8 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) { ...@@ -6382,8 +6384,8 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
trimmed = heap->LeftTrimFixedArray(previous, i); trimmed = heap->LeftTrimFixedArray(previous, i);
HeapObject* filler = HeapObject::FromAddress(previous->address()); HeapObject* filler = HeapObject::FromAddress(previous->address());
CHECK(filler->IsFiller()); CHECK(filler->IsFiller());
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(trimmed))); CHECK(ObjectMarking::IsBlack(trimmed, MarkingState::Internal(trimmed)));
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(previous))); CHECK(ObjectMarking::IsBlack(previous, MarkingState::Internal(previous)));
previous = trimmed; previous = trimmed;
} }
} }
...@@ -6423,8 +6425,9 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) { ...@@ -6423,8 +6425,9 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
Address start_address = array->address(); Address start_address = array->address();
Address end_address = start_address + array->Size(); Address end_address = start_address + array->Size();
Page* page = Page::FromAddress(start_address); Page* page = Page::FromAddress(start_address);
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*array))); CHECK(ObjectMarking::IsBlack(*array, MarkingState::Internal(*array)));
CHECK(page->markbits()->AllBitsSetInRange(
CHECK(MarkingState::Internal(page).bitmap()->AllBitsSetInRange(
page->AddressToMarkbitIndex(start_address), page->AddressToMarkbitIndex(start_address),
page->AddressToMarkbitIndex(end_address))); page->AddressToMarkbitIndex(end_address)));
CHECK(heap->old_space()->Contains(*array)); CHECK(heap->old_space()->Contains(*array));
...@@ -6434,7 +6437,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) { ...@@ -6434,7 +6437,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
heap->RightTrimFixedArray(*array, 1); heap->RightTrimFixedArray(*array, 1);
HeapObject* filler = HeapObject::FromAddress(previous); HeapObject* filler = HeapObject::FromAddress(previous);
CHECK(filler->IsFiller()); CHECK(filler->IsFiller());
CHECK(Marking::IsImpossible(ObjectMarking::MarkBitFrom(filler))); CHECK(ObjectMarking::IsImpossible(filler, MarkingState::Internal(filler)));
// Trim 10 times by one, two, and three word. // Trim 10 times by one, two, and three word.
for (int i = 1; i <= 3; i++) { for (int i = 1; i <= 3; i++) {
...@@ -6443,7 +6446,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) { ...@@ -6443,7 +6446,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
heap->RightTrimFixedArray(*array, i); heap->RightTrimFixedArray(*array, i);
HeapObject* filler = HeapObject::FromAddress(previous); HeapObject* filler = HeapObject::FromAddress(previous);
CHECK(filler->IsFiller()); CHECK(filler->IsFiller());
CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(filler))); CHECK(ObjectMarking::IsWhite(filler, MarkingState::Internal(filler)));
} }
} }
......
...@@ -355,8 +355,7 @@ TEST(Regress5829) { ...@@ -355,8 +355,7 @@ TEST(Regress5829) {
ClearRecordedSlots::kNo); ClearRecordedSlots::kNo);
heap->old_space()->EmptyAllocationInfo(); heap->old_space()->EmptyAllocationInfo();
Page* page = Page::FromAddress(array->address()); Page* page = Page::FromAddress(array->address());
LiveObjectIterator<kGreyObjects> it(page, LiveObjectIterator<kGreyObjects> it(page, MarkingState::Internal(page));
MarkingState::FromPageInternal(page));
HeapObject* object = nullptr; HeapObject* object = nullptr;
while ((object = it.Next()) != nullptr) { while ((object = it.Next()) != nullptr) {
CHECK(!object->IsFiller()); CHECK(!object->IsFiller());
......
...@@ -65,7 +65,7 @@ UNINITIALIZED_TEST(PagePromotion_NewToOld) { ...@@ -65,7 +65,7 @@ UNINITIALIZED_TEST(PagePromotion_NewToOld) {
// Sanity check that the page meets the requirements for promotion. // Sanity check that the page meets the requirements for promotion.
const int threshold_bytes = const int threshold_bytes =
FLAG_page_promotion_threshold * Page::kAllocatableMemory / 100; FLAG_page_promotion_threshold * Page::kAllocatableMemory / 100;
CHECK_GE(first_page->LiveBytes(), threshold_bytes); CHECK_GE(MarkingState::Internal(first_page).live_bytes(), threshold_bytes);
// Actual checks: The page is in new space first, but is moved to old space // Actual checks: The page is in new space first, but is moved to old space
// during a full GC. // during a full GC.
......
...@@ -1172,7 +1172,7 @@ TEST(DoScavengeWithIncrementalWriteBarrier) { ...@@ -1172,7 +1172,7 @@ TEST(DoScavengeWithIncrementalWriteBarrier) {
// in compacting mode and |obj_value|'s page is an evacuation candidate). // in compacting mode and |obj_value|'s page is an evacuation candidate).
IncrementalMarking* marking = heap->incremental_marking(); IncrementalMarking* marking = heap->incremental_marking();
CHECK(marking->IsCompacting()); CHECK(marking->IsCompacting());
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*obj))); CHECK(ObjectMarking::IsBlack(*obj, MarkingState::Internal(*obj)));
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value)); CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
// Trigger GCs so that |obj| moves to old gen. // Trigger GCs so that |obj| moves to old gen.
...@@ -1492,8 +1492,8 @@ static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map, ...@@ -1492,8 +1492,8 @@ static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
// still active and |obj_value|'s page is indeed an evacuation candidate). // still active and |obj_value|'s page is indeed an evacuation candidate).
IncrementalMarking* marking = heap->incremental_marking(); IncrementalMarking* marking = heap->incremental_marking();
CHECK(marking->IsMarking()); CHECK(marking->IsMarking());
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*obj))); CHECK(ObjectMarking::IsBlack(*obj, MarkingState::Internal(*obj)));
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*obj_value))); CHECK(ObjectMarking::IsBlack(*obj_value, MarkingState::Internal(*obj_value)));
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value)); CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
// Trigger incremental write barrier, which should add a slot to remembered // Trigger incremental write barrier, which should add a slot to remembered
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment