Commit 79ac83e1 authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Enforce explicit MarkingState

Require the use of MarkingState when going through ObjectMarking
and friends.

BUG=chromium:651354

Review-Url: https://codereview.chromium.org/2770253002
Cr-Commit-Position: refs/heads/master@{#44123}
parent c7ec5bf4
......@@ -19,7 +19,10 @@ void LocalArrayBufferTracker::Free() {
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
JSArrayBuffer* buffer = reinterpret_cast<JSArrayBuffer*>(it->first);
if ((free_mode == kFreeAll) || ObjectMarking::IsWhite(buffer)) {
// TODO(mlippautz): Create a dependency on the collector to avoid getting
// the marking state out of thin air.
if ((free_mode == kFreeAll) ||
ObjectMarking::IsWhite(buffer, MarkingState::Internal(buffer))) {
const size_t len = it->second;
heap_->isolate()->array_buffer_allocator()->Free(buffer->backing_store(),
len);
......
......@@ -3164,9 +3164,9 @@ void Heap::AdjustLiveBytes(HeapObject* object, int by) {
lo_space()->AdjustLiveBytes(by);
} else if (!in_heap_iterator() &&
!mark_compact_collector()->sweeping_in_progress() &&
ObjectMarking::IsBlack(object)) {
ObjectMarking::IsBlack(object, MarkingState::Internal(object))) {
DCHECK(MemoryChunk::FromAddress(object->address())->SweepingDone());
MemoryChunk::IncrementLiveBytes(object, by);
MarkingState::Internal(object).IncrementLiveBytes(by);
}
}
......@@ -3201,8 +3201,9 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object,
// Transfer the mark bits to their new location if the object is not within
// a black area.
if (!incremental_marking()->black_allocation() ||
!Marking::IsBlack(
ObjectMarking::MarkBitFrom(HeapObject::FromAddress(new_start)))) {
!Marking::IsBlack(ObjectMarking::MarkBitFrom(
HeapObject::FromAddress(new_start),
MarkingState::Internal(HeapObject::FromAddress(new_start))))) {
IncrementalMarking::TransferMark(this, object,
HeapObject::FromAddress(new_start));
}
......@@ -3285,9 +3286,9 @@ void Heap::RightTrimFixedArray(FixedArrayBase* object, int elements_to_trim) {
// Clear the mark bits of the black area that belongs now to the filler.
// This is an optimization. The sweeper will release black fillers anyway.
if (incremental_marking()->black_allocation() &&
ObjectMarking::IsBlackOrGrey(filler)) {
ObjectMarking::IsBlackOrGrey(filler, MarkingState::Internal(filler))) {
Page* page = Page::FromAddress(new_end);
page->markbits()->ClearRange(
MarkingState::Internal(page).bitmap()->ClearRange(
page->AddressToMarkbitIndex(new_end),
page->AddressToMarkbitIndex(new_end + bytes_to_trim));
}
......@@ -4274,8 +4275,9 @@ void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) {
HeapObject* obj = HeapObject::FromAddress(addr);
// There might be grey objects due to black to grey transitions in
// incremental marking. E.g. see VisitNativeContextIncremental.
DCHECK(ObjectMarking::IsBlackOrGrey(obj));
if (ObjectMarking::IsBlack(obj)) {
DCHECK(
ObjectMarking::IsBlackOrGrey(obj, MarkingState::Internal(obj)));
if (ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))) {
incremental_marking()->IterateBlackObject(obj);
}
addr += obj->Size();
......@@ -4873,7 +4875,8 @@ void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size,
// it would be a violation of the invariant to record it's slots.
bool record_slots = false;
if (incremental_marking()->IsCompacting()) {
record_slots = ObjectMarking::IsBlack(target);
record_slots =
ObjectMarking::IsBlack(target, MarkingState::Internal(target));
}
IterateAndScavengePromotedObjectsVisitor visitor(this, target, record_slots);
......@@ -6107,7 +6110,7 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
bool SkipObject(HeapObject* object) {
if (object->IsFiller()) return true;
return ObjectMarking::IsWhite(object);
return ObjectMarking::IsWhite(object, MarkingState::Internal(object));
}
private:
......@@ -6121,7 +6124,8 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
HeapObject* obj = HeapObject::cast(*p);
// Use Marking instead of ObjectMarking to avoid adjusting live bytes
// counter.
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
MarkBit mark_bit =
ObjectMarking::MarkBitFrom(obj, MarkingState::Internal(obj));
if (Marking::IsWhite(mark_bit)) {
Marking::WhiteToBlack(mark_bit);
marking_stack_.Add(obj);
......
......@@ -40,11 +40,14 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
HeapObject* value_heap_obj = HeapObject::cast(value);
DCHECK(!ObjectMarking::IsImpossible(value_heap_obj));
DCHECK(!ObjectMarking::IsImpossible(obj));
const bool is_black = ObjectMarking::IsBlack(obj);
if (is_black && ObjectMarking::IsWhite(value_heap_obj)) {
DCHECK(!ObjectMarking::IsImpossible(value_heap_obj,
MarkingState::Internal(value_heap_obj)));
DCHECK(!ObjectMarking::IsImpossible(obj, MarkingState::Internal(obj)));
const bool is_black =
ObjectMarking::IsBlack(obj, MarkingState::Internal(obj));
if (is_black && ObjectMarking::IsWhite(
value_heap_obj, MarkingState::Internal(value_heap_obj))) {
WhiteToGreyAndPush(value_heap_obj);
RestartIfNotMarking();
}
......@@ -117,7 +120,7 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
}
void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
ObjectMarking::WhiteToGrey(obj);
ObjectMarking::WhiteToGrey(obj, MarkingState::Internal(obj));
heap_->mark_compact_collector()->marking_deque()->Push(obj);
}
......@@ -135,8 +138,10 @@ void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
// size, so the adjustment to the live data count will be zero anyway.
if (from == to) return;
MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to);
MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from);
MarkBit new_mark_bit =
ObjectMarking::MarkBitFrom(to, MarkingState::Internal(to));
MarkBit old_mark_bit =
ObjectMarking::MarkBitFrom(from, MarkingState::Internal(from));
if (Marking::IsBlack(old_mark_bit)) {
Marking::MarkBlack(new_mark_bit);
......@@ -185,10 +190,11 @@ class IncrementalMarkingMarkingVisitor
} while (scan_until_end && start_offset < object_size);
chunk->set_progress_bar(start_offset);
if (start_offset < object_size) {
if (ObjectMarking::IsGrey(object)) {
if (ObjectMarking::IsGrey(object, MarkingState::Internal(object))) {
heap->mark_compact_collector()->marking_deque()->Unshift(object);
} else {
DCHECK(ObjectMarking::IsBlack(object));
DCHECK(
ObjectMarking::IsBlack(object, MarkingState::Internal(object)));
heap->mark_compact_collector()->UnshiftBlack(object);
}
heap->incremental_marking()->NotifyIncompleteScanOfObject(
......@@ -211,8 +217,10 @@ class IncrementalMarkingMarkingVisitor
HeapObject* heap_obj = HeapObject::cast(cache);
// Mark the object grey if it is white, do not enque it into the marking
// deque.
if (ObjectMarking::IsWhite(heap_obj)) {
ObjectMarking::WhiteToGrey(heap_obj);
if (ObjectMarking::IsWhite(heap_obj,
MarkingState::Internal(heap_obj))) {
ObjectMarking::WhiteToGrey(heap_obj,
MarkingState::Internal(heap_obj));
}
}
}
......@@ -247,8 +255,10 @@ class IncrementalMarkingMarkingVisitor
// Returns true if object needed marking and false otherwise.
INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
HeapObject* heap_object = HeapObject::cast(obj);
if (ObjectMarking::IsWhite(heap_object)) {
ObjectMarking::WhiteToBlack(heap_object);
if (ObjectMarking::IsWhite(heap_object,
MarkingState::Internal(heap_object))) {
ObjectMarking::WhiteToBlack(heap_object,
MarkingState::Internal(heap_object));
return true;
}
return false;
......@@ -256,7 +266,8 @@ class IncrementalMarkingMarkingVisitor
};
void IncrementalMarking::IterateBlackObject(HeapObject* object) {
if (IsMarking() && ObjectMarking::IsBlack(object)) {
if (IsMarking() &&
ObjectMarking::IsBlack(object, MarkingState::Internal(object))) {
Page* page = Page::FromAddress(object->address());
if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) {
// IterateBlackObject requires us to visit the whole object.
......@@ -593,7 +604,7 @@ void IncrementalMarking::ProcessWeakCells() {
HeapObject* value = HeapObject::cast(weak_cell->value());
// Remove weak cells with live objects from the list, they do not need
// clearing.
if (ObjectMarking::IsBlackOrGrey(value)) {
if (ObjectMarking::IsBlackOrGrey(value, MarkingState::Internal(value))) {
// Record slot, if value is pointing to an evacuation candidate.
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot);
......@@ -623,7 +634,9 @@ bool ShouldRetainMap(Map* map, int age) {
}
Object* constructor = map->GetConstructor();
if (!constructor->IsHeapObject() ||
ObjectMarking::IsWhite(HeapObject::cast(constructor))) {
ObjectMarking::IsWhite(
HeapObject::cast(constructor),
MarkingState::Internal(HeapObject::cast(constructor)))) {
// The constructor is dead, no new objects with this map can
// be created. Do not retain this map.
return false;
......@@ -653,13 +666,15 @@ void IncrementalMarking::RetainMaps() {
int new_age;
Map* map = Map::cast(cell->value());
if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
ObjectMarking::IsWhite(map)) {
ObjectMarking::IsWhite(map, MarkingState::Internal(map))) {
if (ShouldRetainMap(map, age)) {
MarkGrey(heap(), map);
}
Object* prototype = map->prototype();
if (age > 0 && prototype->IsHeapObject() &&
ObjectMarking::IsWhite(HeapObject::cast(prototype))) {
ObjectMarking::IsWhite(
HeapObject::cast(prototype),
MarkingState::Internal(HeapObject::cast(prototype)))) {
// The prototype is not marked, age the map.
new_age = age - 1;
} else {
......@@ -763,12 +778,14 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
// them.
if (map_word.IsForwardingAddress()) {
HeapObject* dest = map_word.ToForwardingAddress();
if (ObjectMarking::IsBlack(dest)) continue;
if (ObjectMarking::IsBlack(dest, MarkingState::Internal(dest)))
continue;
array[new_top] = dest;
new_top = ((new_top + 1) & mask);
DCHECK(new_top != marking_deque->bottom());
DCHECK(ObjectMarking::IsGrey(obj) ||
(obj->IsFiller() && ObjectMarking::IsWhite(obj)));
DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) ||
(obj->IsFiller() &&
ObjectMarking::IsWhite(obj, MarkingState::Internal(obj))));
}
} else if (obj->map() != filler_map) {
// Skip one word filler objects that appear on the
......@@ -776,11 +793,12 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
array[new_top] = obj;
new_top = ((new_top + 1) & mask);
DCHECK(new_top != marking_deque->bottom());
DCHECK(ObjectMarking::IsGrey(obj) ||
(obj->IsFiller() && ObjectMarking::IsWhite(obj)) ||
DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) ||
(obj->IsFiller() &&
ObjectMarking::IsWhite(obj, MarkingState::Internal(obj))) ||
(MemoryChunk::FromAddress(obj->address())
->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
ObjectMarking::IsBlack(obj)));
ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))));
}
}
marking_deque->set_top(new_top);
......@@ -793,7 +811,8 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
#if ENABLE_SLOW_DCHECKS
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
MarkBit mark_bit =
ObjectMarking::MarkBitFrom(obj, MarkingState::Internal(obj));
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
SLOW_DCHECK(Marking::IsGrey(mark_bit) ||
(chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
......@@ -803,14 +822,14 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
}
void IncrementalMarking::MarkGrey(Heap* heap, HeapObject* object) {
if (ObjectMarking::IsWhite(object)) {
if (ObjectMarking::IsWhite(object, MarkingState::Internal(object))) {
heap->incremental_marking()->WhiteToGreyAndPush(object);
}
}
void IncrementalMarking::MarkBlack(HeapObject* obj, int size) {
if (ObjectMarking::IsBlack(obj)) return;
ObjectMarking::GreyToBlack(obj);
if (ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))) return;
ObjectMarking::GreyToBlack(obj, MarkingState::Internal(obj));
}
intptr_t IncrementalMarking::ProcessMarkingDeque(
......@@ -825,7 +844,7 @@ intptr_t IncrementalMarking::ProcessMarkingDeque(
// Left trimming may result in white, grey, or black filler objects on the
// marking deque. Ignore these objects.
if (obj->IsFiller()) {
DCHECK(!ObjectMarking::IsImpossible(obj));
DCHECK(!ObjectMarking::IsImpossible(obj, MarkingState::Internal(obj)));
continue;
}
......@@ -880,8 +899,8 @@ void IncrementalMarking::Hurry() {
HeapObject* cache = HeapObject::cast(
Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
if (!cache->IsUndefined(heap_->isolate())) {
if (ObjectMarking::IsGrey(cache)) {
ObjectMarking::GreyToBlack(cache);
if (ObjectMarking::IsGrey(cache, MarkingState::Internal(cache))) {
ObjectMarking::GreyToBlack(cache, MarkingState::Internal(cache));
}
}
context = Context::cast(context)->next_context_link();
......
......@@ -186,16 +186,16 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
static void TransferMark(Heap* heap, HeapObject* from, HeapObject* to);
V8_INLINE static void TransferColor(HeapObject* from, HeapObject* to) {
if (ObjectMarking::IsBlack(to)) {
if (ObjectMarking::IsBlack(to, MarkingState::Internal(to))) {
DCHECK(to->GetHeap()->incremental_marking()->black_allocation());
return;
}
DCHECK(ObjectMarking::IsWhite(to));
if (ObjectMarking::IsGrey(from)) {
ObjectMarking::WhiteToGrey(to);
} else if (ObjectMarking::IsBlack(from)) {
ObjectMarking::WhiteToBlack(to);
DCHECK(ObjectMarking::IsWhite(to, MarkingState::Internal(to)));
if (ObjectMarking::IsGrey(from, MarkingState::Internal(from))) {
ObjectMarking::WhiteToGrey(to, MarkingState::Internal(to));
} else if (ObjectMarking::IsBlack(from, MarkingState::Internal(from))) {
ObjectMarking::WhiteToBlack(to, MarkingState::Internal(to));
}
}
......
......@@ -13,37 +13,44 @@ namespace v8 {
namespace internal {
void MarkCompactCollector::PushBlack(HeapObject* obj) {
DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(obj)));
DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::Internal(obj))));
if (!marking_deque()->Push(obj)) {
ObjectMarking::BlackToGrey<MarkBit::NON_ATOMIC>(obj);
ObjectMarking::BlackToGrey<MarkBit::NON_ATOMIC>(
obj, MarkingState::Internal(obj));
}
}
void MinorMarkCompactCollector::PushBlack(HeapObject* obj) {
DCHECK(
(ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(obj, StateForObject(obj))));
DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::External(obj))));
if (!marking_deque()->Push(obj)) {
ObjectMarking::BlackToGrey<MarkBit::NON_ATOMIC>(obj, StateForObject(obj));
ObjectMarking::BlackToGrey<MarkBit::NON_ATOMIC>(
obj, MarkingState::External(obj));
}
}
void MarkCompactCollector::UnshiftBlack(HeapObject* obj) {
DCHECK(ObjectMarking::IsBlack(obj));
DCHECK(ObjectMarking::IsBlack(obj, MarkingState::Internal(obj)));
if (!marking_deque()->Unshift(obj)) {
ObjectMarking::BlackToGrey(obj);
ObjectMarking::BlackToGrey(obj, MarkingState::Internal(obj));
}
}
void MarkCompactCollector::MarkObject(HeapObject* obj) {
if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(obj)) {
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(obj);
if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
obj, MarkingState::Internal(obj))) {
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::Internal(obj));
PushBlack(obj);
}
}
void MinorMarkCompactCollector::MarkObject(HeapObject* obj) {
if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(obj, StateForObject(obj))) {
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(obj, StateForObject(obj));
if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
obj, MarkingState::External(obj))) {
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::External(obj));
PushBlack(obj);
}
}
......@@ -54,7 +61,8 @@ void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
if (target_page->IsEvacuationCandidate() &&
!ShouldSkipEvacuationSlotRecording(object)) {
DCHECK(ObjectMarking::IsBlackOrGrey(object));
DCHECK(
ObjectMarking::IsBlackOrGrey(object, MarkingState::Internal(object)));
RememberedSet<OLD_TO_OLD>::Insert(source_page,
reinterpret_cast<Address>(slot));
}
......
This diff is collapsed.
......@@ -32,57 +32,13 @@ class MarkCompactCollector;
class MinorMarkCompactCollector;
class MarkingVisitor;
class MarkingState {
public:
static MarkingState FromPageInternal(MemoryChunk* chunk) {
return MarkingState(chunk->markbits<MarkingMode::FULL>(),
chunk->live_bytes_address<MarkingMode::FULL>());
}
static MarkingState FromPageExternal(MemoryChunk* chunk) {
return MarkingState(
chunk->markbits<MarkingMode::YOUNG_GENERATION>(),
chunk->live_bytes_address<MarkingMode::YOUNG_GENERATION>());
}
MarkingState(Bitmap* bitmap, intptr_t* live_bytes)
: bitmap(bitmap), live_bytes(live_bytes) {}
void IncrementLiveBytes(intptr_t by) const {
*live_bytes += static_cast<int>(by);
}
void SetLiveBytes(intptr_t value) const {
*live_bytes = static_cast<int>(value);
}
void ClearLiveness() const {
bitmap->Clear();
*live_bytes = 0;
}
Bitmap* bitmap;
intptr_t* live_bytes;
};
// TODO(mlippautz): Remove duplicate accessors once the architecture for
// different markers is fixed.
class ObjectMarking : public AllStatic {
public:
V8_INLINE static MarkBit MarkBitFrom(HeapObject* obj) {
const Address address = obj->address();
const MemoryChunk* p = MemoryChunk::FromAddress(address);
return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(address));
}
V8_INLINE static MarkBit MarkBitFrom(HeapObject* obj,
const MarkingState& state) {
const Address address = obj->address();
const MemoryChunk* p = MemoryChunk::FromAddress(address);
return state.bitmap->MarkBitFromIndex(p->AddressToMarkbitIndex(address));
}
static Marking::ObjectColor Color(HeapObject* obj) {
return Marking::Color(ObjectMarking::MarkBitFrom(obj));
return state.bitmap()->MarkBitFromIndex(p->AddressToMarkbitIndex(address));
}
static Marking::ObjectColor Color(HeapObject* obj,
......@@ -90,67 +46,33 @@ class ObjectMarking : public AllStatic {
return Marking::Color(ObjectMarking::MarkBitFrom(obj, state));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsImpossible(HeapObject* obj) {
return Marking::IsImpossible<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsImpossible(HeapObject* obj,
const MarkingState& state) {
return Marking::IsImpossible<access_mode>(MarkBitFrom(obj, state));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsBlack(HeapObject* obj) {
return Marking::IsBlack<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsBlack(HeapObject* obj, const MarkingState& state) {
return Marking::IsBlack<access_mode>(MarkBitFrom(obj, state));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsWhite(HeapObject* obj) {
return Marking::IsWhite<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsWhite(HeapObject* obj, const MarkingState& state) {
return Marking::IsWhite<access_mode>(MarkBitFrom(obj, state));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsGrey(HeapObject* obj) {
return Marking::IsGrey<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsGrey(HeapObject* obj, const MarkingState& state) {
return Marking::IsGrey<access_mode>(MarkBitFrom(obj, state));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsBlackOrGrey(HeapObject* obj) {
return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsBlackOrGrey(HeapObject* obj,
const MarkingState& state) {
return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj, state));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool BlackToGrey(HeapObject* obj) {
DCHECK((access_mode == MarkBit::ATOMIC || IsBlack<access_mode>(obj)));
MarkBit markbit = MarkBitFrom(obj);
if (!Marking::BlackToGrey<access_mode>(markbit)) return false;
MemoryChunk::IncrementLiveBytes(obj, -obj->Size());
return true;
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool BlackToGrey(HeapObject* obj,
const MarkingState& state) {
......@@ -162,12 +84,6 @@ class ObjectMarking : public AllStatic {
return true;
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool WhiteToGrey(HeapObject* obj) {
DCHECK((access_mode == MarkBit::ATOMIC || IsWhite<access_mode>(obj)));
return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool WhiteToGrey(HeapObject* obj,
const MarkingState& state) {
......@@ -176,13 +92,6 @@ class ObjectMarking : public AllStatic {
return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj, state));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool WhiteToBlack(HeapObject* obj) {
DCHECK((access_mode == MarkBit::ATOMIC || IsWhite<access_mode>(obj)));
if (!ObjectMarking::WhiteToGrey<access_mode>(obj)) return false;
return ObjectMarking::GreyToBlack<access_mode>(obj);
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool WhiteToBlack(HeapObject* obj,
const MarkingState& state) {
......@@ -192,15 +101,6 @@ class ObjectMarking : public AllStatic {
return ObjectMarking::GreyToBlack<access_mode>(obj, state);
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool GreyToBlack(HeapObject* obj) {
DCHECK((access_mode == MarkBit::ATOMIC || IsGrey<access_mode>(obj)));
MarkBit markbit = MarkBitFrom(obj);
if (!Marking::GreyToBlack<access_mode>(markbit)) return false;
MemoryChunk::IncrementLiveBytes(obj, obj->Size());
return true;
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool GreyToBlack(HeapObject* obj,
const MarkingState& state) {
......@@ -413,7 +313,7 @@ class MarkBitCellIterator BASE_EMBEDDED {
cell_base_ = chunk_->area_start();
cell_index_ = Bitmap::IndexToCell(
Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(cell_base_)));
cells_ = state.bitmap->cells();
cells_ = state.bitmap()->cells();
}
inline bool Done() { return cell_index_ == last_cell_index_; }
......@@ -526,10 +426,6 @@ class MinorMarkCompactCollector {
private:
class RootMarkingVisitor;
static MarkingState StateForObject(HeapObject* object) {
return MarkingState::FromPageExternal(Page::FromAddress(object->address()));
}
inline Heap* heap() { return heap_; }
inline Isolate* isolate() { return heap()->isolate(); }
inline MarkingDeque* marking_deque() { return &marking_deque_; }
......
......@@ -343,7 +343,8 @@ static bool IsCowArray(Heap* heap, FixedArrayBase* array) {
static bool SameLiveness(HeapObject* obj1, HeapObject* obj2) {
return obj1 == nullptr || obj2 == nullptr ||
ObjectMarking::Color(obj1) == ObjectMarking::Color(obj2);
ObjectMarking::Color(obj1, MarkingState::Internal(obj1)) ==
ObjectMarking::Color(obj2, MarkingState::Internal(obj2));
}
bool ObjectStatsCollector::RecordFixedArrayHelper(HeapObject* parent,
......
......@@ -333,7 +333,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
// contain smi zero.
if (weak_cell->next_cleared() && !weak_cell->cleared()) {
HeapObject* value = HeapObject::cast(weak_cell->value());
if (ObjectMarking::IsBlackOrGrey(value)) {
if (ObjectMarking::IsBlackOrGrey(value, MarkingState::Internal(value))) {
// Weak cells with live values are directly processed here to reduce
// the processing time of weak cells during the main GC pause.
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
......@@ -522,7 +522,8 @@ bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
// Code is either on stack, in compilation cache or referenced
// by optimized version of function.
if (ObjectMarking::IsBlackOrGrey(function->code())) {
if (ObjectMarking::IsBlackOrGrey(function->code(),
MarkingState::Internal(function->code()))) {
return false;
}
......@@ -545,7 +546,8 @@ bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
Heap* heap, SharedFunctionInfo* shared_info) {
// Code is either on stack, in compilation cache or referenced
// by optimized version of function.
if (ObjectMarking::IsBlackOrGrey(shared_info->code())) {
if (ObjectMarking::IsBlackOrGrey(
shared_info->code(), MarkingState::Internal(shared_info->code()))) {
return false;
}
......
......@@ -200,8 +200,10 @@ class ScavengingVisitor : public StaticVisitorBase {
reinterpret_cast<base::AtomicWord>(target));
if (object_contents == POINTER_OBJECT) {
heap->promotion_queue()->insert(target, object_size,
ObjectMarking::IsBlack(object));
// TODO(mlippautz): Query collector for marking state.
heap->promotion_queue()->insert(
target, object_size,
ObjectMarking::IsBlack(object, MarkingState::Internal(object)));
}
heap->IncrementPromotedObjectsSize(object_size);
return true;
......@@ -245,7 +247,9 @@ class ScavengingVisitor : public StaticVisitorBase {
DCHECK(map_word.IsForwardingAddress());
HeapObject* target = map_word.ToForwardingAddress();
if (ObjectMarking::IsBlack(target)) {
// TODO(mlippautz): Notify collector of this object so we don't have to
// retrieve the state our of thin air.
if (ObjectMarking::IsBlack(target, MarkingState::Internal(target))) {
// This object is black and it might not be rescanned by marker.
// We should explicitly record code entry slot for compaction because
// promotion queue processing (IteratePromotedObjectPointers) will
......
......@@ -182,7 +182,7 @@ Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable,
page->AllocateLocalTracker();
if (FLAG_minor_mc) {
page->AllocateYoungGenerationBitmap();
page->ClearLiveness<MarkingMode::YOUNG_GENERATION>();
MarkingState::External(page).ClearLiveness();
}
return page;
}
......@@ -230,54 +230,6 @@ void Page::InitializeFreeListCategories() {
}
}
template <MarkingMode mode>
void MemoryChunk::IncrementLiveBytes(HeapObject* object, int by) {
MemoryChunk::FromAddress(object->address())->IncrementLiveBytes<mode>(by);
}
template <MarkingMode mode>
void MemoryChunk::TraceLiveBytes(intptr_t old_value, intptr_t new_value) {
if (!FLAG_trace_live_bytes) return;
PrintIsolate(heap()->isolate(),
"live-bytes[%p:%s]: %" V8PRIdPTR "-> %" V8PRIdPTR "\n",
static_cast<void*>(this),
mode == MarkingMode::FULL ? "internal" : "external", old_value,
new_value);
}
template <MarkingMode mode>
void MemoryChunk::ResetLiveBytes() {
switch (mode) {
case MarkingMode::FULL:
TraceLiveBytes(live_byte_count_, 0);
live_byte_count_ = 0;
break;
case MarkingMode::YOUNG_GENERATION:
TraceLiveBytes(young_generation_live_byte_count_, 0);
young_generation_live_byte_count_ = 0;
break;
}
}
template <MarkingMode mode>
void MemoryChunk::IncrementLiveBytes(int by) {
switch (mode) {
case MarkingMode::FULL:
TraceLiveBytes(live_byte_count_, live_byte_count_ + by);
live_byte_count_ += by;
DCHECK_GE(live_byte_count_, 0);
DCHECK_LE(static_cast<size_t>(live_byte_count_), size_);
break;
case MarkingMode::YOUNG_GENERATION:
TraceLiveBytes(young_generation_live_byte_count_,
young_generation_live_byte_count_ + by);
young_generation_live_byte_count_ += by;
DCHECK_GE(young_generation_live_byte_count_, 0);
DCHECK_LE(static_cast<size_t>(young_generation_live_byte_count_), size_);
break;
}
}
bool PagedSpace::Contains(Address addr) {
return MemoryChunk::FromAnyPointerAddress(heap(), addr)->owner() == this;
}
......
......@@ -538,12 +538,13 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
chunk->mutex_ = new base::Mutex();
chunk->available_in_free_list_ = 0;
chunk->wasted_memory_ = 0;
chunk->ClearLiveness();
chunk->young_generation_bitmap_ = nullptr;
chunk->set_next_chunk(nullptr);
chunk->set_prev_chunk(nullptr);
chunk->local_tracker_ = nullptr;
MarkingState::Internal(chunk).ClearLiveness();
DCHECK(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset);
if (executable == EXECUTABLE) {
......@@ -854,9 +855,10 @@ void Page::CreateBlackArea(Address start, Address end) {
DCHECK_EQ(Page::FromAddress(start), this);
DCHECK_NE(start, end);
DCHECK_EQ(Page::FromAddress(end - 1), this);
markbits()->SetRange(AddressToMarkbitIndex(start),
AddressToMarkbitIndex(end));
IncrementLiveBytes(static_cast<int>(end - start));
MarkingState::Internal(this).bitmap()->SetRange(AddressToMarkbitIndex(start),
AddressToMarkbitIndex(end));
MarkingState::Internal(this).IncrementLiveBytes(
static_cast<int>(end - start));
}
void MemoryAllocator::PartialFreeMemory(MemoryChunk* chunk,
......@@ -1196,15 +1198,6 @@ void MemoryChunk::ReleaseYoungGenerationBitmap() {
young_generation_bitmap_ = nullptr;
}
template <MarkingMode mode>
void MemoryChunk::ClearLiveness() {
markbits<mode>()->Clear();
ResetLiveBytes<mode>();
}
template void MemoryChunk::ClearLiveness<MarkingMode::FULL>();
template void MemoryChunk::ClearLiveness<MarkingMode::YOUNG_GENERATION>();
// -----------------------------------------------------------------------------
// PagedSpace implementation
......@@ -1420,9 +1413,11 @@ void PagedSpace::EmptyAllocationInfo() {
// Clear the bits in the unused black area.
if (current_top != current_limit) {
page->markbits()->ClearRange(page->AddressToMarkbitIndex(current_top),
page->AddressToMarkbitIndex(current_limit));
page->IncrementLiveBytes(-static_cast<int>(current_limit - current_top));
MarkingState::Internal(page).bitmap()->ClearRange(
page->AddressToMarkbitIndex(current_top),
page->AddressToMarkbitIndex(current_limit));
MarkingState::Internal(page).IncrementLiveBytes(
-static_cast<int>(current_limit - current_top));
}
}
......@@ -1436,7 +1431,7 @@ void PagedSpace::IncreaseCapacity(size_t bytes) {
}
void PagedSpace::ReleasePage(Page* page) {
DCHECK_EQ(page->LiveBytes(), 0);
DCHECK_EQ(0, MarkingState::Internal(page).live_bytes());
DCHECK_EQ(page->owner(), this);
free_list_.EvictFreeListItems(page);
......@@ -1497,14 +1492,14 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
// All the interior pointers should be contained in the heap.
int size = object->Size();
object->IterateBody(map->instance_type(), size, visitor);
if (ObjectMarking::IsBlack(object)) {
if (ObjectMarking::IsBlack(object, MarkingState::Internal(object))) {
black_size += size;
}
CHECK(object->address() + size <= top);
end_of_previous_object = object->address() + size;
}
CHECK_LE(black_size, page->LiveBytes());
CHECK_LE(black_size, MarkingState::Internal(page).live_bytes());
}
CHECK(allocation_pointer_found_in_space);
}
......@@ -1637,7 +1632,7 @@ bool SemiSpace::EnsureCurrentCapacity() {
if (current_page == nullptr) return false;
DCHECK_NOT_NULL(current_page);
current_page->InsertAfter(anchor());
current_page->ClearLiveness();
MarkingState::Internal(current_page).ClearLiveness();
current_page->SetFlags(anchor()->prev_page()->GetFlags(),
static_cast<uintptr_t>(Page::kCopyAllFlags));
heap()->CreateFillerObjectAt(current_page->area_start(),
......@@ -1709,7 +1704,7 @@ void NewSpace::ResetAllocationInfo() {
UpdateAllocationInfo();
// Clear all mark-bits in the to-space.
for (Page* p : to_space_) {
p->ClearLiveness();
MarkingState::Internal(p).ClearLiveness();
}
InlineAllocationStep(old_top, allocation_info_.top(), nullptr, 0);
}
......@@ -2010,7 +2005,7 @@ bool SemiSpace::GrowTo(size_t new_capacity) {
return false;
}
new_page->InsertAfter(last_page);
new_page->ClearLiveness();
MarkingState::Internal(new_page).ClearLiveness();
// Duplicate the flags that was set on the old page.
new_page->SetFlags(last_page->GetFlags(), Page::kCopyOnFlipFlagsMask);
last_page = new_page;
......@@ -2071,7 +2066,7 @@ void SemiSpace::FixPagesFlags(intptr_t flags, intptr_t mask) {
page->ClearFlag(MemoryChunk::IN_FROM_SPACE);
page->SetFlag(MemoryChunk::IN_TO_SPACE);
page->ClearFlag(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK);
page->ResetLiveBytes();
MarkingState::Internal(page).SetLiveBytes(0);
} else {
page->SetFlag(MemoryChunk::IN_FROM_SPACE);
page->ClearFlag(MemoryChunk::IN_TO_SPACE);
......@@ -3044,7 +3039,7 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size,
ClearRecordedSlots::kNo);
if (heap()->incremental_marking()->black_allocation()) {
ObjectMarking::WhiteToBlack(object);
ObjectMarking::WhiteToBlack(object, MarkingState::Internal(object));
}
return object;
}
......@@ -3091,13 +3086,14 @@ LargePage* LargeObjectSpace::FindPage(Address a) {
void LargeObjectSpace::ClearMarkingStateOfLiveObjects() {
LargeObjectIterator it(this);
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
if (ObjectMarking::IsBlackOrGrey(obj)) {
Marking::MarkWhite(ObjectMarking::MarkBitFrom(obj));
if (ObjectMarking::IsBlackOrGrey(obj, MarkingState::Internal(obj))) {
Marking::MarkWhite(
ObjectMarking::MarkBitFrom(obj, MarkingState::Internal(obj)));
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
chunk->ResetProgressBar();
chunk->ResetLiveBytes();
MarkingState::Internal(chunk).SetLiveBytes(0);
}
DCHECK(ObjectMarking::IsWhite(obj));
DCHECK(ObjectMarking::IsWhite(obj, MarkingState::Internal(obj)));
}
}
......@@ -3139,8 +3135,8 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
LargePage* current = first_page_;
while (current != NULL) {
HeapObject* object = current->GetObject();
DCHECK(!ObjectMarking::IsGrey(object));
if (ObjectMarking::IsBlack(object)) {
DCHECK(!ObjectMarking::IsGrey(object, MarkingState::Internal(object)));
if (ObjectMarking::IsBlack(object, MarkingState::Internal(object))) {
Address free_start;
if ((free_start = current->GetAddressToShrink()) != 0) {
// TODO(hpayer): Perform partial free concurrently.
......@@ -3276,7 +3272,8 @@ void Page::Print() {
unsigned mark_size = 0;
for (HeapObject* object = objects.Next(); object != NULL;
object = objects.Next()) {
bool is_marked = ObjectMarking::IsBlackOrGrey(object);
bool is_marked =
ObjectMarking::IsBlackOrGrey(object, MarkingState::Internal(object));
PrintF(" %c ", (is_marked ? '!' : ' ')); // Indent a little.
if (is_marked) {
mark_size += object->Size();
......@@ -3285,7 +3282,8 @@ void Page::Print() {
PrintF("\n");
}
printf(" --------------------------------------\n");
printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes());
printf(" Marked: %x, LiveCount: %" V8PRIdPTR "\n", mark_size,
MarkingState::Internal(this).live_bytes());
}
#endif // DEBUG
......
......@@ -224,10 +224,6 @@ class FreeListCategory {
friend class PagedSpace;
};
// MarkingMode determines which bitmaps and counters should be used when
// accessing marking information on MemoryChunk.
enum class MarkingMode { FULL, YOUNG_GENERATION };
// MemoryChunk represents a memory region owned by a specific space.
// It is divided into the header and the body. Chunk start is always
// 1MB aligned. Start of the body is aligned so it can accommodate
......@@ -377,9 +373,6 @@ class MemoryChunk {
static const int kAllocatableMemory = kPageSize - kObjectStartOffset;
template <MarkingMode mode = MarkingMode::FULL>
static inline void IncrementLiveBytes(HeapObject* object, int by);
// Only works if the pointer is in the first kPageSize of the MemoryChunk.
static MemoryChunk* FromAddress(Address a) {
return reinterpret_cast<MemoryChunk*>(OffsetFrom(a) & ~kAlignmentMask);
......@@ -427,33 +420,6 @@ class MemoryChunk {
return concurrent_sweeping_state().Value() == kSweepingDone;
}
// Manage live byte count, i.e., count of bytes in black objects.
template <MarkingMode mode = MarkingMode::FULL>
inline void ResetLiveBytes();
template <MarkingMode mode = MarkingMode::FULL>
inline void IncrementLiveBytes(int by);
template <MarkingMode mode = MarkingMode::FULL>
int LiveBytes() {
switch (mode) {
case MarkingMode::FULL:
DCHECK_LE(static_cast<unsigned>(live_byte_count_), size_);
return static_cast<int>(live_byte_count_);
case MarkingMode::YOUNG_GENERATION:
DCHECK_LE(static_cast<unsigned>(young_generation_live_byte_count_),
size_);
return static_cast<int>(young_generation_live_byte_count_);
}
UNREACHABLE();
return 0;
}
void SetLiveBytes(int live_bytes) {
DCHECK_GE(live_bytes, 0);
DCHECK_LE(static_cast<size_t>(live_bytes), size_);
live_byte_count_ = live_bytes;
}
size_t size() const { return size_; }
void set_size(size_t size) { size_ = size; }
......@@ -513,19 +479,6 @@ class MemoryChunk {
}
}
template <MarkingMode mode = MarkingMode::FULL>
inline Bitmap* markbits() const {
return mode == MarkingMode::FULL
? Bitmap::FromAddress(address() + kHeaderSize)
: young_generation_bitmap_;
}
template <MarkingMode mode = MarkingMode::FULL>
inline intptr_t* live_bytes_address() {
return mode == MarkingMode::FULL ? &live_byte_count_
: &young_generation_live_byte_count_;
}
inline uint32_t AddressToMarkbitIndex(Address addr) const {
return static_cast<uint32_t>(addr - this->address()) >> kPointerSizeLog2;
}
......@@ -534,11 +487,6 @@ class MemoryChunk {
return this->address() + (index << kPointerSizeLog2);
}
template <MarkingMode mode = MarkingMode::FULL>
void ClearLiveness();
void PrintMarkbits() { markbits()->Print(); }
void SetFlag(Flag flag) { flags_ |= flag; }
void ClearFlag(Flag flag) { flags_ &= ~Flags(flag); }
bool IsFlagSet(Flag flag) { return (flags_ & flag) != 0; }
......@@ -622,9 +570,6 @@ class MemoryChunk {
base::VirtualMemory* reserved_memory() { return &reservation_; }
template <MarkingMode mode = MarkingMode::FULL>
inline void TraceLiveBytes(intptr_t old_value, intptr_t new_value);
size_t size_;
Flags flags_;
......@@ -686,6 +631,7 @@ class MemoryChunk {
private:
void InitializeReservedMemory() { reservation_.Reset(); }
friend class MarkingState;
friend class MemoryAllocator;
friend class MemoryChunkValidator;
};
......@@ -695,6 +641,50 @@ DEFINE_OPERATORS_FOR_FLAGS(MemoryChunk::Flags)
static_assert(kMaxRegularHeapObjectSize <= MemoryChunk::kAllocatableMemory,
"kMaxRegularHeapObjectSize <= MemoryChunk::kAllocatableMemory");
class MarkingState {
public:
static MarkingState External(HeapObject* object) {
return External(MemoryChunk::FromAddress(object->address()));
}
static MarkingState External(MemoryChunk* chunk) {
return MarkingState(chunk->young_generation_bitmap_,
&chunk->young_generation_live_byte_count_);
}
static MarkingState Internal(HeapObject* object) {
return Internal(MemoryChunk::FromAddress(object->address()));
}
static MarkingState Internal(MemoryChunk* chunk) {
return MarkingState(
Bitmap::FromAddress(chunk->address() + MemoryChunk::kHeaderSize),
&chunk->live_byte_count_);
}
MarkingState(Bitmap* bitmap, intptr_t* live_bytes)
: bitmap_(bitmap), live_bytes_(live_bytes) {}
void IncrementLiveBytes(intptr_t by) const {
*live_bytes_ += static_cast<int>(by);
}
void SetLiveBytes(intptr_t value) const {
*live_bytes_ = static_cast<int>(value);
}
void ClearLiveness() const {
bitmap_->Clear();
*live_bytes_ = 0;
}
Bitmap* bitmap() const { return bitmap_; }
intptr_t live_bytes() const { return *live_bytes_; }
private:
Bitmap* bitmap_;
intptr_t* live_bytes_;
};
// -----------------------------------------------------------------------------
// A page is a memory chunk of a size 1MB. Large object pages may be larger.
//
......
......@@ -2005,9 +2005,10 @@ void WeakCell::initialize(HeapObject* val) {
// We just have to execute the generational barrier here because we never
// mark through a weak cell and collect evacuation candidates when we process
// all weak cells.
WriteBarrierMode mode = ObjectMarking::IsBlack(this)
? UPDATE_WRITE_BARRIER
: UPDATE_WEAK_WRITE_BARRIER;
WriteBarrierMode mode =
ObjectMarking::IsBlack(this, MarkingState::Internal(this))
? UPDATE_WRITE_BARRIER
: UPDATE_WEAK_WRITE_BARRIER;
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kValueOffset, val, mode);
}
......
......@@ -26,7 +26,7 @@ void CheckInvariantsOfAbortedPage(Page* page) {
// 1) Markbits are cleared
// 2) The page is not marked as evacuation candidate anymore
// 3) The page is not marked as aborted compaction anymore.
CHECK(page->markbits()->IsClean());
CHECK(MarkingState::Internal(page).bitmap()->IsClean());
CHECK(!page->IsEvacuationCandidate());
CHECK(!page->IsFlagSet(Page::COMPACTION_WAS_ABORTED));
}
......
......@@ -2416,8 +2416,9 @@ TEST(InstanceOfStubWriteBarrier) {
CHECK(f->IsOptimized());
while (!Marking::IsBlack(ObjectMarking::MarkBitFrom(f->code())) &&
!marking->IsStopped()) {
while (
!ObjectMarking::IsBlack(f->code(), MarkingState::Internal(f->code())) &&
!marking->IsStopped()) {
// Discard any pending GC requests otherwise we will get GC when we enter
// code below.
marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
......@@ -5363,8 +5364,9 @@ TEST(Regress3631) {
Handle<JSReceiver> obj =
v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
while (!Marking::IsBlack(
ObjectMarking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
HeapObject* weak_map_table = HeapObject::cast(weak_map->table());
while (!ObjectMarking::IsBlack(weak_map_table,
MarkingState::Internal(weak_map_table)) &&
!marking->IsStopped()) {
marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
IncrementalMarking::FORCE_COMPLETION, StepOrigin::kV8);
......@@ -6114,10 +6116,10 @@ TEST(Regress598319) {
}
CHECK(heap->lo_space()->Contains(arr.get()));
CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(arr.get())));
CHECK(ObjectMarking::IsWhite(arr.get(), MarkingState::Internal(arr.get())));
for (int i = 0; i < arr.get()->length(); i++) {
CHECK(Marking::IsWhite(
ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i)))));
HeapObject* arr_value = HeapObject::cast(arr.get()->get(i));
CHECK(ObjectMarking::IsWhite(arr_value, MarkingState::Internal(arr_value)));
}
// Start incremental marking.
......@@ -6131,8 +6133,8 @@ TEST(Regress598319) {
// Check that we have not marked the interesting array during root scanning.
for (int i = 0; i < arr.get()->length(); i++) {
CHECK(Marking::IsWhite(
ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i)))));
HeapObject* arr_value = HeapObject::cast(arr.get()->get(i));
CHECK(ObjectMarking::IsWhite(arr_value, MarkingState::Internal(arr_value)));
}
// Now we search for a state where we are in incremental marking and have
......@@ -6167,8 +6169,8 @@ TEST(Regress598319) {
// All objects need to be black after marking. If a white object crossed the
// progress bar, we would fail here.
for (int i = 0; i < arr.get()->length(); i++) {
CHECK(Marking::IsBlack(
ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i)))));
HeapObject* arr_value = HeapObject::cast(arr.get()->get(i));
CHECK(ObjectMarking::IsBlack(arr_value, MarkingState::Internal(arr_value)));
}
}
......@@ -6314,13 +6316,13 @@ TEST(LeftTrimFixedArrayInBlackArea) {
isolate->factory()->NewFixedArray(4, TENURED);
Handle<FixedArray> array = isolate->factory()->NewFixedArray(50, TENURED);
CHECK(heap->old_space()->Contains(*array));
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*array)));
CHECK(ObjectMarking::IsBlack(*array, MarkingState::Internal(*array)));
// Now left trim the allocated black area. A filler has to be installed
// for the trimmed area and all mark bits of the trimmed area have to be
// cleared.
FixedArrayBase* trimmed = heap->LeftTrimFixedArray(*array, 10);
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(trimmed)));
CHECK(ObjectMarking::IsBlack(trimmed, MarkingState::Internal(trimmed)));
heap::GcAndSweep(heap, OLD_SPACE);
}
......@@ -6357,8 +6359,8 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
Address start_address = array->address();
Address end_address = start_address + array->Size();
Page* page = Page::FromAddress(start_address);
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*array)));
CHECK(page->markbits()->AllBitsSetInRange(
CHECK(ObjectMarking::IsBlack(*array, MarkingState::Internal(*array)));
CHECK(MarkingState::Internal(page).bitmap()->AllBitsSetInRange(
page->AddressToMarkbitIndex(start_address),
page->AddressToMarkbitIndex(end_address)));
CHECK(heap->old_space()->Contains(*array));
......@@ -6371,8 +6373,8 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
trimmed = heap->LeftTrimFixedArray(previous, 1);
HeapObject* filler = HeapObject::FromAddress(previous->address());
CHECK(filler->IsFiller());
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(trimmed)));
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(previous)));
CHECK(ObjectMarking::IsBlack(trimmed, MarkingState::Internal(trimmed)));
CHECK(ObjectMarking::IsBlack(previous, MarkingState::Internal(previous)));
previous = trimmed;
}
......@@ -6382,8 +6384,8 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
trimmed = heap->LeftTrimFixedArray(previous, i);
HeapObject* filler = HeapObject::FromAddress(previous->address());
CHECK(filler->IsFiller());
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(trimmed)));
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(previous)));
CHECK(ObjectMarking::IsBlack(trimmed, MarkingState::Internal(trimmed)));
CHECK(ObjectMarking::IsBlack(previous, MarkingState::Internal(previous)));
previous = trimmed;
}
}
......@@ -6423,8 +6425,9 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
Address start_address = array->address();
Address end_address = start_address + array->Size();
Page* page = Page::FromAddress(start_address);
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*array)));
CHECK(page->markbits()->AllBitsSetInRange(
CHECK(ObjectMarking::IsBlack(*array, MarkingState::Internal(*array)));
CHECK(MarkingState::Internal(page).bitmap()->AllBitsSetInRange(
page->AddressToMarkbitIndex(start_address),
page->AddressToMarkbitIndex(end_address)));
CHECK(heap->old_space()->Contains(*array));
......@@ -6434,7 +6437,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
heap->RightTrimFixedArray(*array, 1);
HeapObject* filler = HeapObject::FromAddress(previous);
CHECK(filler->IsFiller());
CHECK(Marking::IsImpossible(ObjectMarking::MarkBitFrom(filler)));
CHECK(ObjectMarking::IsImpossible(filler, MarkingState::Internal(filler)));
// Trim 10 times by one, two, and three word.
for (int i = 1; i <= 3; i++) {
......@@ -6443,7 +6446,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
heap->RightTrimFixedArray(*array, i);
HeapObject* filler = HeapObject::FromAddress(previous);
CHECK(filler->IsFiller());
CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(filler)));
CHECK(ObjectMarking::IsWhite(filler, MarkingState::Internal(filler)));
}
}
......
......@@ -355,8 +355,7 @@ TEST(Regress5829) {
ClearRecordedSlots::kNo);
heap->old_space()->EmptyAllocationInfo();
Page* page = Page::FromAddress(array->address());
LiveObjectIterator<kGreyObjects> it(page,
MarkingState::FromPageInternal(page));
LiveObjectIterator<kGreyObjects> it(page, MarkingState::Internal(page));
HeapObject* object = nullptr;
while ((object = it.Next()) != nullptr) {
CHECK(!object->IsFiller());
......
......@@ -65,7 +65,7 @@ UNINITIALIZED_TEST(PagePromotion_NewToOld) {
// Sanity check that the page meets the requirements for promotion.
const int threshold_bytes =
FLAG_page_promotion_threshold * Page::kAllocatableMemory / 100;
CHECK_GE(first_page->LiveBytes(), threshold_bytes);
CHECK_GE(MarkingState::Internal(first_page).live_bytes(), threshold_bytes);
// Actual checks: The page is in new space first, but is moved to old space
// during a full GC.
......
......@@ -1172,7 +1172,7 @@ TEST(DoScavengeWithIncrementalWriteBarrier) {
// in compacting mode and |obj_value|'s page is an evacuation candidate).
IncrementalMarking* marking = heap->incremental_marking();
CHECK(marking->IsCompacting());
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*obj)));
CHECK(ObjectMarking::IsBlack(*obj, MarkingState::Internal(*obj)));
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
// Trigger GCs so that |obj| moves to old gen.
......@@ -1492,8 +1492,8 @@ static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
// still active and |obj_value|'s page is indeed an evacuation candidate).
IncrementalMarking* marking = heap->incremental_marking();
CHECK(marking->IsMarking());
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*obj)));
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*obj_value)));
CHECK(ObjectMarking::IsBlack(*obj, MarkingState::Internal(*obj)));
CHECK(ObjectMarking::IsBlack(*obj_value, MarkingState::Internal(*obj_value)));
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
// Trigger incremental write barrier, which should add a slot to remembered
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment