Commit 205457b1 authored by hpayer's avatar hpayer Committed by Commit bot

[heap] Reland "Remove black pages and use black areas instead."

BUG=chromium:630969,chromium:630386

Review-Url: https://codereview.chromium.org/2186863005
Cr-Commit-Position: refs/heads/master@{#38195}
parent dc505196
...@@ -252,11 +252,6 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, ...@@ -252,11 +252,6 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
old_gen_exhausted_ = true; old_gen_exhausted_ = true;
} }
if (!old_gen_exhausted_ && incremental_marking()->black_allocation() &&
space != OLD_SPACE) {
Marking::MarkBlack(ObjectMarking::MarkBitFrom(object));
MemoryChunk::IncrementLiveBytesFromGC(object, size_in_bytes);
}
return allocation; return allocation;
} }
......
...@@ -3066,6 +3066,16 @@ void Heap::CreateFillerObjectAt(Address addr, int size, ...@@ -3066,6 +3066,16 @@ void Heap::CreateFillerObjectAt(Address addr, int size,
if (mode == ClearRecordedSlots::kYes) { if (mode == ClearRecordedSlots::kYes) {
ClearRecordedSlotRange(addr, addr + size); ClearRecordedSlotRange(addr, addr + size);
} }
// If the location where the filler is created is within a black area we have
// to clear the mark bits of the filler space.
if (incremental_marking()->black_allocation() &&
Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(addr))) {
Page* page = Page::FromAddress(addr);
page->markbits()->ClearRange(page->AddressToMarkbitIndex(addr),
page->AddressToMarkbitIndex(addr + size));
}
// At this point, we may be deserializing the heap from a snapshot, and // At this point, we may be deserializing the heap from a snapshot, and
// none of the maps have been created yet and are NULL. // none of the maps have been created yet and are NULL.
DCHECK((filler->map() == NULL && !deserialization_complete_) || DCHECK((filler->map() == NULL && !deserialization_complete_) ||
...@@ -3130,13 +3140,20 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, ...@@ -3130,13 +3140,20 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object,
DCHECK(elements_to_trim <= len); DCHECK(elements_to_trim <= len);
// Calculate location of new array start. // Calculate location of new array start.
Address new_start = object->address() + bytes_to_trim; Address old_start = object->address();
Address new_start = old_start + bytes_to_trim;
// Transfer the mark bits to their new location if the object is not within
// a black area.
if (!incremental_marking()->black_allocation() ||
!Marking::IsBlack(ObjectMarking::MarkBitFrom(new_start))) {
IncrementalMarking::TransferMark(this, old_start, new_start);
}
// Technically in new space this write might be omitted (except for // Technically in new space this write might be omitted (except for
// debug mode which iterates through the heap), but to play safer // debug mode which iterates through the heap), but to play safer
// we still do it. // we still do it.
CreateFillerObjectAt(object->address(), bytes_to_trim, CreateFillerObjectAt(old_start, bytes_to_trim, ClearRecordedSlots::kYes);
ClearRecordedSlots::kYes);
// Initialize header of the trimmed array. Since left trimming is only // Initialize header of the trimmed array. Since left trimming is only
// performed on pages which are not concurrently swept creating a filler // performed on pages which are not concurrently swept creating a filler
// object does not require synchronization. // object does not require synchronization.
...@@ -3145,18 +3162,18 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, ...@@ -3145,18 +3162,18 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object,
int new_start_index = elements_to_trim * (element_size / kPointerSize); int new_start_index = elements_to_trim * (element_size / kPointerSize);
former_start[new_start_index] = map; former_start[new_start_index] = map;
former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim); former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim);
FixedArrayBase* new_object = FixedArrayBase* new_object =
FixedArrayBase::cast(HeapObject::FromAddress(new_start)); FixedArrayBase::cast(HeapObject::FromAddress(new_start));
// Maintain consistency of live bytes during incremental marking
AdjustLiveBytes(new_object, -bytes_to_trim, Heap::CONCURRENT_TO_SWEEPER);
// Remove recorded slots for the new map and length offset. // Remove recorded slots for the new map and length offset.
ClearRecordedSlot(new_object, HeapObject::RawField(new_object, 0)); ClearRecordedSlot(new_object, HeapObject::RawField(new_object, 0));
ClearRecordedSlot(new_object, HeapObject::RawField( ClearRecordedSlot(new_object, HeapObject::RawField(
new_object, FixedArrayBase::kLengthOffset)); new_object, FixedArrayBase::kLengthOffset));
// Maintain consistency of live bytes during incremental marking
IncrementalMarking::TransferMark(this, object->address(), new_start);
AdjustLiveBytes(new_object, -bytes_to_trim, Heap::CONCURRENT_TO_SWEEPER);
// Notify the heap profiler of change in object layout. // Notify the heap profiler of change in object layout.
OnMoveEvent(new_object, object, new_object->Size()); OnMoveEvent(new_object, object, new_object->Size());
return new_object; return new_object;
...@@ -4174,14 +4191,13 @@ void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) { ...@@ -4174,14 +4191,13 @@ void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) {
// Hence we have to color all objects of the reservation first black to avoid // Hence we have to color all objects of the reservation first black to avoid
// unnecessary marking deque load. // unnecessary marking deque load.
if (incremental_marking()->black_allocation()) { if (incremental_marking()->black_allocation()) {
for (int i = CODE_SPACE; i < Serializer::kNumberOfSpaces; i++) { for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) {
const Heap::Reservation& res = reservations[i]; const Heap::Reservation& res = reservations[i];
for (auto& chunk : res) { for (auto& chunk : res) {
Address addr = chunk.start; Address addr = chunk.start;
while (addr < chunk.end) { while (addr < chunk.end) {
HeapObject* obj = HeapObject::FromAddress(addr); HeapObject* obj = HeapObject::FromAddress(addr);
Marking::MarkBlack(ObjectMarking::MarkBitFrom(obj)); Marking::MarkBlack(ObjectMarking::MarkBitFrom(obj));
MemoryChunk::IncrementLiveBytesFromGC(obj, obj->Size());
addr += obj->Size(); addr += obj->Size();
} }
} }
......
...@@ -164,9 +164,7 @@ void IncrementalMarking::TransferMark(Heap* heap, Address old_start, ...@@ -164,9 +164,7 @@ void IncrementalMarking::TransferMark(Heap* heap, Address old_start,
DCHECK(MemoryChunk::FromAddress(old_start) == DCHECK(MemoryChunk::FromAddress(old_start) ==
MemoryChunk::FromAddress(new_start)); MemoryChunk::FromAddress(new_start));
if (!heap->incremental_marking()->IsMarking() || if (!heap->incremental_marking()->IsMarking()) return;
Page::FromAddress(old_start)->IsFlagSet(Page::BLACK_PAGE))
return;
// If the mark doesn't move, we don't check the color of the object. // If the mark doesn't move, we don't check the color of the object.
// It doesn't matter whether the object is black, since it hasn't changed // It doesn't matter whether the object is black, since it hasn't changed
...@@ -603,9 +601,9 @@ void IncrementalMarking::StartBlackAllocation() { ...@@ -603,9 +601,9 @@ void IncrementalMarking::StartBlackAllocation() {
DCHECK(FLAG_black_allocation); DCHECK(FLAG_black_allocation);
DCHECK(IsMarking()); DCHECK(IsMarking());
black_allocation_ = true; black_allocation_ = true;
OldSpace* old_space = heap()->old_space(); heap()->old_space()->MarkAllocationInfoBlack();
old_space->EmptyAllocationInfo(); heap()->map_space()->MarkAllocationInfoBlack();
old_space->free_list()->Reset(); heap()->code_space()->MarkAllocationInfoBlack();
if (FLAG_trace_incremental_marking) { if (FLAG_trace_incremental_marking) {
PrintF("[IncrementalMarking] Black allocation started\n"); PrintF("[IncrementalMarking] Black allocation started\n");
} }
...@@ -833,7 +831,7 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() { ...@@ -833,7 +831,7 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
// them. // them.
if (map_word.IsForwardingAddress()) { if (map_word.IsForwardingAddress()) {
HeapObject* dest = map_word.ToForwardingAddress(); HeapObject* dest = map_word.ToForwardingAddress();
if (Page::FromAddress(dest->address())->IsFlagSet(Page::BLACK_PAGE)) if (Marking::IsBlack(ObjectMarking::MarkBitFrom(dest->address())))
continue; continue;
array[new_top] = dest; array[new_top] = dest;
new_top = ((new_top + 1) & mask); new_top = ((new_top + 1) & mask);
......
...@@ -208,12 +208,17 @@ class IncrementalMarking { ...@@ -208,12 +208,17 @@ class IncrementalMarking {
static void TransferMark(Heap* heap, Address old_start, Address new_start); static void TransferMark(Heap* heap, Address old_start, Address new_start);
// Returns true if the transferred color is black. // Returns true if the color transfer requires live bytes updating.
INLINE(static bool TransferColor(HeapObject* from, HeapObject* to)) { INLINE(static bool TransferColor(HeapObject* from, HeapObject* to,
if (Page::FromAddress(to->address())->IsFlagSet(Page::BLACK_PAGE)) int size)) {
return true;
MarkBit from_mark_bit = ObjectMarking::MarkBitFrom(from); MarkBit from_mark_bit = ObjectMarking::MarkBitFrom(from);
MarkBit to_mark_bit = ObjectMarking::MarkBitFrom(to); MarkBit to_mark_bit = ObjectMarking::MarkBitFrom(to);
if (Marking::IsBlack(to_mark_bit)) {
DCHECK(to->GetHeap()->incremental_marking()->black_allocation());
return false;
}
DCHECK(Marking::IsWhite(to_mark_bit)); DCHECK(Marking::IsWhite(to_mark_bit));
if (from_mark_bit.Get()) { if (from_mark_bit.Get()) {
to_mark_bit.Set(); to_mark_bit.Set();
......
...@@ -153,20 +153,46 @@ HeapObject* LiveObjectIterator<T>::Next() { ...@@ -153,20 +153,46 @@ HeapObject* LiveObjectIterator<T>::Next() {
cell_base_ = it_.CurrentCellBase(); cell_base_ = it_.CurrentCellBase();
current_cell_ = *it_.CurrentCell(); current_cell_ = *it_.CurrentCell();
} }
if (T == kBlackObjects && (current_cell_ & second_bit_index)) {
object = HeapObject::FromAddress(addr); if (current_cell_ & second_bit_index) {
} else if (T == kGreyObjects && !(current_cell_ & second_bit_index)) { // We found a black object. If the black object is within a black area,
object = HeapObject::FromAddress(addr); // make sure that we skip all set bits in the black area until the
} else if (T == kAllLiveObjects) { // object ends.
HeapObject* black_object = HeapObject::FromAddress(addr);
Address end = addr + black_object->Size() - kPointerSize;
DCHECK_EQ(chunk_, MemoryChunk::FromAddress(end));
uint32_t end_mark_bit_index = chunk_->AddressToMarkbitIndex(end);
unsigned int end_cell_index =
end_mark_bit_index >> Bitmap::kBitsPerCellLog2;
MarkBit::CellType end_index_mask =
1u << Bitmap::IndexInCell(end_mark_bit_index);
if (it_.Advance(end_cell_index)) {
cell_base_ = it_.CurrentCellBase();
current_cell_ = *it_.CurrentCell();
}
// Clear all bits in current_cell, including the end index.
current_cell_ &= ~(end_index_mask + end_index_mask - 1);
if (T == kBlackObjects || T == kAllLiveObjects) {
object = black_object;
}
} else if ((T == kGreyObjects || T == kAllLiveObjects)) {
object = HeapObject::FromAddress(addr); object = HeapObject::FromAddress(addr);
} }
// Clear the second bit of the found object.
current_cell_ &= ~second_bit_index;
// We found a live object. // We found a live object.
if (object != nullptr) break; if (object != nullptr) {
if (object->IsFiller()) {
// Black areas together with slack tracking may result in black filler
// objects. We filter these objects out in the iterator.
object = nullptr;
} else {
break;
}
}
} }
if (current_cell_ == 0) { if (current_cell_ == 0) {
if (!it_.Done()) { if (!it_.Done()) {
it_.Advance(); it_.Advance();
......
...@@ -104,30 +104,30 @@ static void VerifyMarking(Heap* heap, Address bottom, Address top) { ...@@ -104,30 +104,30 @@ static void VerifyMarking(Heap* heap, Address bottom, Address top) {
VerifyMarkingVisitor visitor(heap); VerifyMarkingVisitor visitor(heap);
HeapObject* object; HeapObject* object;
Address next_object_must_be_here_or_later = bottom; Address next_object_must_be_here_or_later = bottom;
for (Address current = bottom; current < top;) {
for (Address current = bottom; current < top; current += kPointerSize) {
object = HeapObject::FromAddress(current); object = HeapObject::FromAddress(current);
if (MarkCompactCollector::IsMarked(object)) { if (MarkCompactCollector::IsMarked(object)) {
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object))); CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
CHECK(current >= next_object_must_be_here_or_later); CHECK(current >= next_object_must_be_here_or_later);
object->Iterate(&visitor); object->Iterate(&visitor);
next_object_must_be_here_or_later = current + object->Size(); next_object_must_be_here_or_later = current + object->Size();
// The next word for sure belongs to the current object, jump over it. // The object is either part of a black area of black allocation or a
// regular black object
Page* page = Page::FromAddress(current);
CHECK(
page->markbits()->AllBitsSetInRange(
page->AddressToMarkbitIndex(current),
page->AddressToMarkbitIndex(next_object_must_be_here_or_later)) ||
page->markbits()->AllBitsClearInRange(
page->AddressToMarkbitIndex(current + kPointerSize * 2),
page->AddressToMarkbitIndex(next_object_must_be_here_or_later)));
current = next_object_must_be_here_or_later;
} else {
current += kPointerSize; current += kPointerSize;
} }
} }
} }
static void VerifyMarkingBlackPage(Heap* heap, Page* page) {
CHECK(page->IsFlagSet(Page::BLACK_PAGE));
VerifyMarkingVisitor visitor(heap);
HeapObjectIterator it(page);
for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
object->Iterate(&visitor);
}
}
static void VerifyMarking(NewSpace* space) { static void VerifyMarking(NewSpace* space) {
Address end = space->top(); Address end = space->top();
// The bottom position is at the start of its page. Allows us to use // The bottom position is at the start of its page. Allows us to use
...@@ -146,11 +146,7 @@ static void VerifyMarking(NewSpace* space) { ...@@ -146,11 +146,7 @@ static void VerifyMarking(NewSpace* space) {
static void VerifyMarking(PagedSpace* space) { static void VerifyMarking(PagedSpace* space) {
for (Page* p : *space) { for (Page* p : *space) {
if (p->IsFlagSet(Page::BLACK_PAGE)) { VerifyMarking(space->heap(), p->area_start(), p->area_end());
VerifyMarkingBlackPage(space->heap(), p);
} else {
VerifyMarking(space->heap(), p->area_start(), p->area_end());
}
} }
} }
...@@ -409,9 +405,6 @@ void MarkCompactCollector::VerifyOmittedMapChecks() { ...@@ -409,9 +405,6 @@ void MarkCompactCollector::VerifyOmittedMapChecks() {
static void ClearMarkbitsInPagedSpace(PagedSpace* space) { static void ClearMarkbitsInPagedSpace(PagedSpace* space) {
for (Page* p : *space) { for (Page* p : *space) {
p->ClearLiveness(); p->ClearLiveness();
if (p->IsFlagSet(Page::BLACK_PAGE)) {
p->ClearFlag(Page::BLACK_PAGE);
}
} }
} }
...@@ -435,9 +428,6 @@ void MarkCompactCollector::ClearMarkbits() { ...@@ -435,9 +428,6 @@ void MarkCompactCollector::ClearMarkbits() {
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
chunk->ResetProgressBar(); chunk->ResetProgressBar();
chunk->ResetLiveBytes(); chunk->ResetLiveBytes();
if (chunk->IsFlagSet(Page::BLACK_PAGE)) {
chunk->ClearFlag(Page::BLACK_PAGE);
}
} }
} }
...@@ -659,7 +649,6 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { ...@@ -659,7 +649,6 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
for (Page* p : *space) { for (Page* p : *space) {
if (p->NeverEvacuate()) continue; if (p->NeverEvacuate()) continue;
if (p->IsFlagSet(Page::BLACK_PAGE)) continue;
// Invariant: Evacuation candidates are just created when marking is // Invariant: Evacuation candidates are just created when marking is
// started. This means that sweeping has finished. Furthermore, at the end // started. This means that sweeping has finished. Furthermore, at the end
// of a GC all evacuation candidates are cleared and their slot buffers are // of a GC all evacuation candidates are cleared and their slot buffers are
...@@ -1913,9 +1902,7 @@ class MarkCompactCollector::EvacuateRecordOnlyVisitor final ...@@ -1913,9 +1902,7 @@ class MarkCompactCollector::EvacuateRecordOnlyVisitor final
void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) { void MarkCompactCollector::DiscoverGreyObjectsInSpace(PagedSpace* space) {
for (Page* p : *space) { for (Page* p : *space) {
if (!p->IsFlagSet(Page::BLACK_PAGE)) { DiscoverGreyObjectsOnPage(p);
DiscoverGreyObjectsOnPage(p);
}
if (marking_deque()->IsFull()) return; if (marking_deque()->IsFull()) return;
} }
} }
...@@ -2932,9 +2919,8 @@ bool MarkCompactCollector::IsSlotInBlackObject(MemoryChunk* p, Address slot) { ...@@ -2932,9 +2919,8 @@ bool MarkCompactCollector::IsSlotInBlackObject(MemoryChunk* p, Address slot) {
DCHECK(owner != heap_->lo_space() && owner != nullptr); DCHECK(owner != heap_->lo_space() && owner != nullptr);
USE(owner); USE(owner);
// If we are on a black page, we cannot find the actual object start // We may be part of a black area.
// easiliy. We just return true but do not set the out_object. if (Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(slot))) {
if (p->IsFlagSet(Page::BLACK_PAGE)) {
return true; return true;
} }
...@@ -3031,27 +3017,16 @@ HeapObject* MarkCompactCollector::FindBlackObjectBySlotSlow(Address slot) { ...@@ -3031,27 +3017,16 @@ HeapObject* MarkCompactCollector::FindBlackObjectBySlotSlow(Address slot) {
return nullptr; return nullptr;
} }
if (p->IsFlagSet(Page::BLACK_PAGE)) { LiveObjectIterator<kBlackObjects> it(p);
HeapObjectIterator it(p); HeapObject* object = nullptr;
HeapObject* object = nullptr; while ((object = it.Next()) != nullptr) {
while ((object = it.Next()) != nullptr) { int size = object->Size();
int size = object->Size(); if (object->address() > slot) return nullptr;
if (object->address() > slot) return nullptr; if (object->address() <= slot && slot < (object->address() + size)) {
if (object->address() <= slot && slot < (object->address() + size)) { return object;
return object;
}
}
} else {
LiveObjectIterator<kBlackObjects> it(p);
HeapObject* object = nullptr;
while ((object = it.Next()) != nullptr) {
int size = object->Size();
if (object->address() > slot) return nullptr;
if (object->address() <= slot && slot < (object->address() + size)) {
return object;
}
} }
} }
return nullptr; return nullptr;
} }
...@@ -3388,7 +3363,6 @@ int MarkCompactCollector::Sweeper::RawSweep( ...@@ -3388,7 +3363,6 @@ int MarkCompactCollector::Sweeper::RawSweep(
DCHECK(free_list_mode == IGNORE_FREE_LIST || space->identity() == OLD_SPACE || DCHECK(free_list_mode == IGNORE_FREE_LIST || space->identity() == OLD_SPACE ||
space->identity() == CODE_SPACE || space->identity() == MAP_SPACE); space->identity() == CODE_SPACE || space->identity() == MAP_SPACE);
DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone()); DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone());
DCHECK(!p->IsFlagSet(Page::BLACK_PAGE));
// Before we sweep objects on the page, we free dead array buffers which // Before we sweep objects on the page, we free dead array buffers which
// requires valid mark bits. // requires valid mark bits.
...@@ -3941,7 +3915,6 @@ void MarkCompactCollector::Sweeper::AddSweepingPageSafe(AllocationSpace space, ...@@ -3941,7 +3915,6 @@ void MarkCompactCollector::Sweeper::AddSweepingPageSafe(AllocationSpace space,
} }
void MarkCompactCollector::StartSweepSpace(PagedSpace* space) { void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
Address space_top = space->top();
space->ClearStats(); space->ClearStats();
int will_be_swept = 0; int will_be_swept = 0;
...@@ -3958,24 +3931,6 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) { ...@@ -3958,24 +3931,6 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
continue; continue;
} }
// We can not sweep black pages, since all mark bits are set for these
// pages.
if (p->IsFlagSet(Page::BLACK_PAGE)) {
p->ClearLiveness();
p->concurrent_sweeping_state().SetValue(Page::kSweepingDone);
p->ClearFlag(Page::BLACK_PAGE);
// Area above the high watermark is free.
Address free_start = p->HighWaterMark();
// Check if the space top was in this page, which means that the
// high watermark is not up-to-date.
if (free_start < space_top && space_top <= p->area_end()) {
free_start = space_top;
}
int size = static_cast<int>(p->area_end() - free_start);
space->Free(free_start, size);
continue;
}
if (p->IsFlagSet(Page::NEVER_ALLOCATE_ON_PAGE)) { if (p->IsFlagSet(Page::NEVER_ALLOCATE_ON_PAGE)) {
// We need to sweep the page to get it into an iterable state again. Note // We need to sweep the page to get it into an iterable state again. Note
// that this adds unusable memory into the free list that is later on // that this adds unusable memory into the free list that is later on
......
...@@ -218,7 +218,19 @@ class MarkBitCellIterator BASE_EMBEDDED { ...@@ -218,7 +218,19 @@ class MarkBitCellIterator BASE_EMBEDDED {
inline void Advance() { inline void Advance() {
cell_index_++; cell_index_++;
cell_base_ += 32 * kPointerSize; cell_base_ += Bitmap::kBitsPerCell * kPointerSize;
}
inline bool Advance(unsigned int new_cell_index) {
if (new_cell_index != cell_index_) {
DCHECK_GT(new_cell_index, cell_index_);
DCHECK_LE(new_cell_index, last_cell_index_);
unsigned int diff = new_cell_index - cell_index_;
cell_index_ = new_cell_index;
cell_base_ += diff * (Bitmap::kBitsPerCell * kPointerSize);
return true;
}
return false;
} }
// Return the next mark bit cell. If there is no next it returns 0; // Return the next mark bit cell. If there is no next it returns 0;
...@@ -253,8 +265,6 @@ class LiveObjectIterator BASE_EMBEDDED { ...@@ -253,8 +265,6 @@ class LiveObjectIterator BASE_EMBEDDED {
it_(chunk_), it_(chunk_),
cell_base_(it_.CurrentCellBase()), cell_base_(it_.CurrentCellBase()),
current_cell_(*it_.CurrentCell()) { current_cell_(*it_.CurrentCell()) {
// Black pages can not be iterated.
DCHECK(!chunk->IsFlagSet(Page::BLACK_PAGE));
} }
HeapObject* Next(); HeapObject* Next();
......
...@@ -106,8 +106,98 @@ class Bitmap { ...@@ -106,8 +106,98 @@ class Bitmap {
for (int i = 0; i < CellsCount(); i++) cells()[i] = 0; for (int i = 0; i < CellsCount(); i++) cells()[i] = 0;
} }
void SetAllBits() { // Sets all bits in the range [start_index, end_index).
for (int i = 0; i < CellsCount(); i++) cells()[i] = 0xffffffff; void SetRange(uint32_t start_index, uint32_t end_index) {
unsigned int start_cell_index = start_index >> Bitmap::kBitsPerCellLog2;
MarkBit::CellType start_index_mask = 1u << Bitmap::IndexInCell(start_index);
unsigned int end_cell_index = end_index >> Bitmap::kBitsPerCellLog2;
MarkBit::CellType end_index_mask = 1u << Bitmap::IndexInCell(end_index);
if (start_cell_index != end_cell_index) {
// Firstly, fill all bits from the start address to the end of the first
// cell with 1s.
cells()[start_cell_index] |= ~(start_index_mask - 1);
// Then fill all in between cells with 1s.
for (unsigned int i = start_cell_index + 1; i < end_cell_index; i++) {
cells()[i] = ~0u;
}
// Finally, fill all bits until the end address in the last cell with 1s.
cells()[end_cell_index] |= (end_index_mask - 1);
} else {
cells()[start_cell_index] |= end_index_mask - start_index_mask;
}
}
// Clears all bits in the range [start_index, end_index).
void ClearRange(uint32_t start_index, uint32_t end_index) {
unsigned int start_cell_index = start_index >> Bitmap::kBitsPerCellLog2;
MarkBit::CellType start_index_mask = 1u << Bitmap::IndexInCell(start_index);
unsigned int end_cell_index = end_index >> Bitmap::kBitsPerCellLog2;
MarkBit::CellType end_index_mask = 1u << Bitmap::IndexInCell(end_index);
if (start_cell_index != end_cell_index) {
// Firstly, fill all bits from the start address to the end of the first
// cell with 0s.
cells()[start_cell_index] &= (start_index_mask - 1);
// Then fill all in between cells with 0s.
for (unsigned int i = start_cell_index + 1; i < end_cell_index; i++) {
cells()[i] = 0;
}
// Finally, set all bits until the end address in the last cell with 0s.
cells()[end_cell_index] &= ~(end_index_mask - 1);
} else {
cells()[start_cell_index] &= ~(end_index_mask - start_index_mask);
}
}
// Returns true if all bits in the range [start_index, end_index) are set.
bool AllBitsSetInRange(uint32_t start_index, uint32_t end_index) {
unsigned int start_cell_index = start_index >> Bitmap::kBitsPerCellLog2;
MarkBit::CellType start_index_mask = 1u << Bitmap::IndexInCell(start_index);
unsigned int end_cell_index = end_index >> Bitmap::kBitsPerCellLog2;
MarkBit::CellType end_index_mask = 1u << Bitmap::IndexInCell(end_index);
MarkBit::CellType matching_mask;
if (start_cell_index != end_cell_index) {
matching_mask = ~(start_index_mask - 1);
if ((cells()[start_cell_index] & matching_mask) != matching_mask) {
return false;
}
for (unsigned int i = start_cell_index + 1; i < end_cell_index; i++) {
if (cells()[i] != ~0u) return false;
}
matching_mask = (end_index_mask - 1);
return ((cells()[end_cell_index] & matching_mask) == matching_mask);
} else {
matching_mask = end_index_mask - start_index_mask;
return (cells()[end_cell_index] & matching_mask) == matching_mask;
}
}
// Returns true if all bits in the range [start_index, end_index) are cleared.
bool AllBitsClearInRange(uint32_t start_index, uint32_t end_index) {
unsigned int start_cell_index = start_index >> Bitmap::kBitsPerCellLog2;
MarkBit::CellType start_index_mask = 1u << Bitmap::IndexInCell(start_index);
unsigned int end_cell_index = end_index >> Bitmap::kBitsPerCellLog2;
MarkBit::CellType end_index_mask = 1u << Bitmap::IndexInCell(end_index);
MarkBit::CellType matching_mask;
if (start_cell_index != end_cell_index) {
matching_mask = ~(start_index_mask - 1);
if ((cells()[start_cell_index] & matching_mask)) return false;
for (unsigned int i = start_cell_index + 1; i < end_cell_index; i++) {
if (cells()[i]) return false;
}
matching_mask = (end_index_mask - 1);
return !(cells()[end_cell_index] & matching_mask);
} else {
matching_mask = end_index_mask - start_index_mask;
return !(cells()[end_cell_index] & matching_mask);
}
} }
static void PrintWord(uint32_t word, uint32_t himask = 0) { static void PrintWord(uint32_t word, uint32_t himask = 0) {
...@@ -175,23 +265,6 @@ class Bitmap { ...@@ -175,23 +265,6 @@ class Bitmap {
} }
return true; return true;
} }
// Clears all bits starting from {cell_base_index} up to and excluding
// {index}. Note that {cell_base_index} is required to be cell aligned.
void ClearRange(uint32_t cell_base_index, uint32_t index) {
DCHECK_EQ(IndexInCell(cell_base_index), 0u);
DCHECK_GE(index, cell_base_index);
uint32_t start_cell_index = IndexToCell(cell_base_index);
uint32_t end_cell_index = IndexToCell(index);
DCHECK_GE(end_cell_index, start_cell_index);
// Clear all cells till the cell containing the last index.
for (uint32_t i = start_cell_index; i < end_cell_index; i++) {
cells()[i] = 0;
}
// Clear all bits in the last cell till the last bit before index.
uint32_t clear_mask = ~((1u << IndexInCell(index)) - 1);
cells()[end_cell_index] &= clear_mask;
}
}; };
class Marking : public AllStatic { class Marking : public AllStatic {
......
...@@ -138,7 +138,7 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -138,7 +138,7 @@ class ScavengingVisitor : public StaticVisitorBase {
} }
if (marks_handling == TRANSFER_MARKS) { if (marks_handling == TRANSFER_MARKS) {
if (IncrementalMarking::TransferColor(source, target)) { if (IncrementalMarking::TransferColor(source, target, size)) {
MemoryChunk::IncrementLiveBytesFromGC(target, size); MemoryChunk::IncrementLiveBytesFromGC(target, size);
} }
} }
......
...@@ -34,7 +34,6 @@ NewSpacePageRange::NewSpacePageRange(Address start, Address limit) ...@@ -34,7 +34,6 @@ NewSpacePageRange::NewSpacePageRange(Address start, Address limit)
SemiSpace::AssertValidRange(start, limit); SemiSpace::AssertValidRange(start, limit);
} }
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// SemiSpaceIterator // SemiSpaceIterator
...@@ -242,7 +241,6 @@ void MemoryChunk::ResetLiveBytes() { ...@@ -242,7 +241,6 @@ void MemoryChunk::ResetLiveBytes() {
} }
void MemoryChunk::IncrementLiveBytes(int by) { void MemoryChunk::IncrementLiveBytes(int by) {
if (IsFlagSet(BLACK_PAGE)) return;
if (FLAG_trace_live_bytes) { if (FLAG_trace_live_bytes) {
PrintIsolate( PrintIsolate(
heap()->isolate(), "live-bytes: update page=%p delta=%d %d->%d\n", heap()->isolate(), "live-bytes: update page=%p delta=%d %d->%d\n",
...@@ -444,6 +442,12 @@ AllocationResult PagedSpace::AllocateRawUnaligned( ...@@ -444,6 +442,12 @@ AllocationResult PagedSpace::AllocateRawUnaligned(
if (object == NULL) { if (object == NULL) {
object = SlowAllocateRaw(size_in_bytes); object = SlowAllocateRaw(size_in_bytes);
} }
if (object != NULL) {
if (heap()->incremental_marking()->black_allocation()) {
Marking::MarkBlack(ObjectMarking::MarkBitFrom(object));
MemoryChunk::IncrementLiveBytesFromGC(object, size_in_bytes);
}
}
} }
if (object != NULL) { if (object != NULL) {
......
...@@ -1229,18 +1229,6 @@ bool PagedSpace::Expand() { ...@@ -1229,18 +1229,6 @@ bool PagedSpace::Expand() {
// Pages created during bootstrapping may contain immortal immovable objects. // Pages created during bootstrapping may contain immortal immovable objects.
if (!heap()->deserialization_complete()) p->MarkNeverEvacuate(); if (!heap()->deserialization_complete()) p->MarkNeverEvacuate();
// When incremental marking was activated, old space pages are allocated
// black.
if (heap()->incremental_marking()->black_allocation() &&
identity() == OLD_SPACE) {
p->markbits()->SetAllBits();
p->SetFlag(Page::BLACK_PAGE);
if (FLAG_trace_incremental_marking) {
PrintIsolate(heap()->isolate(), "Added black page %p\n",
static_cast<void*>(p));
}
}
DCHECK(Capacity() <= heap()->MaxOldGenerationSize()); DCHECK(Capacity() <= heap()->MaxOldGenerationSize());
p->InsertAfter(anchor_.prev_page()); p->InsertAfter(anchor_.prev_page());
...@@ -1265,6 +1253,50 @@ void PagedSpace::ResetFreeListStatistics() { ...@@ -1265,6 +1253,50 @@ void PagedSpace::ResetFreeListStatistics() {
} }
} }
void PagedSpace::SetAllocationInfo(Address top, Address limit) {
SetTopAndLimit(top, limit);
if (top != nullptr && top != limit &&
heap()->incremental_marking()->black_allocation()) {
Page* page = Page::FromAddress(top);
page->markbits()->SetRange(page->AddressToMarkbitIndex(top),
page->AddressToMarkbitIndex(limit));
page->IncrementLiveBytes(static_cast<int>(limit - top));
}
}
void PagedSpace::MarkAllocationInfoBlack() {
DCHECK(heap()->incremental_marking()->black_allocation());
Address current_top = top();
Address current_limit = limit();
if (current_top != nullptr && current_top != current_limit) {
Page* page = Page::FromAddress(current_top);
page->markbits()->SetRange(page->AddressToMarkbitIndex(current_top),
page->AddressToMarkbitIndex(current_limit));
page->IncrementLiveBytes(static_cast<int>(current_limit - current_top));
}
}
// Empty space allocation info, returning unused area to free list.
void PagedSpace::EmptyAllocationInfo() {
// Mark the old linear allocation area with a free space map so it can be
// skipped when scanning the heap.
Address current_top = top();
Address current_limit = limit();
if (current_top == nullptr) {
DCHECK(current_limit == nullptr);
return;
}
int old_linear_size = static_cast<int>(current_limit - current_top);
SetTopAndLimit(NULL, NULL);
if (current_top != current_limit &&
heap()->incremental_marking()->black_allocation()) {
Page* page = Page::FromAddress(current_top);
page->markbits()->ClearRange(page->AddressToMarkbitIndex(current_top),
page->AddressToMarkbitIndex(current_limit));
page->IncrementLiveBytes(-static_cast<int>(current_limit - current_top));
}
Free(current_top, old_linear_size);
}
void PagedSpace::IncreaseCapacity(int size) { void PagedSpace::IncreaseCapacity(int size) {
accounting_stats_.ExpandSpace(size); accounting_stats_.ExpandSpace(size);
...@@ -1331,8 +1363,7 @@ void PagedSpace::Verify(ObjectVisitor* visitor) { ...@@ -1331,8 +1363,7 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
// All the interior pointers should be contained in the heap. // All the interior pointers should be contained in the heap.
int size = object->Size(); int size = object->Size();
object->IterateBody(map->instance_type(), size, visitor); object->IterateBody(map->instance_type(), size, visitor);
if (!page->IsFlagSet(Page::BLACK_PAGE) && if (Marking::IsBlack(ObjectMarking::MarkBitFrom(object))) {
Marking::IsBlack(ObjectMarking::MarkBitFrom(object))) {
black_size += size; black_size += size;
} }
...@@ -2429,8 +2460,7 @@ HeapObject* FreeList::Allocate(int size_in_bytes) { ...@@ -2429,8 +2460,7 @@ HeapObject* FreeList::Allocate(int size_in_bytes) {
// Mark the old linear allocation area with a free space map so it can be // Mark the old linear allocation area with a free space map so it can be
// skipped when scanning the heap. This also puts it back in the free list // skipped when scanning the heap. This also puts it back in the free list
// if it is big enough. // if it is big enough.
owner_->Free(owner_->top(), old_linear_size); owner_->EmptyAllocationInfo();
owner_->SetTopAndLimit(nullptr, nullptr);
owner_->heap()->incremental_marking()->OldSpaceStep(size_in_bytes - owner_->heap()->incremental_marking()->OldSpaceStep(size_in_bytes -
old_linear_size); old_linear_size);
...@@ -2464,8 +2494,8 @@ HeapObject* FreeList::Allocate(int size_in_bytes) { ...@@ -2464,8 +2494,8 @@ HeapObject* FreeList::Allocate(int size_in_bytes) {
// Keep the linear allocation area empty if requested to do so, just // Keep the linear allocation area empty if requested to do so, just
// return area back to the free list instead. // return area back to the free list instead.
owner_->Free(new_node->address() + size_in_bytes, bytes_left); owner_->Free(new_node->address() + size_in_bytes, bytes_left);
owner_->SetTopAndLimit(new_node->address() + size_in_bytes, owner_->SetAllocationInfo(new_node->address() + size_in_bytes,
new_node->address() + size_in_bytes); new_node->address() + size_in_bytes);
} else if (bytes_left > kThreshold && } else if (bytes_left > kThreshold &&
owner_->heap()->incremental_marking()->IsMarkingIncomplete() && owner_->heap()->incremental_marking()->IsMarkingIncomplete() &&
FLAG_incremental_marking) { FLAG_incremental_marking) {
...@@ -2475,14 +2505,15 @@ HeapObject* FreeList::Allocate(int size_in_bytes) { ...@@ -2475,14 +2505,15 @@ HeapObject* FreeList::Allocate(int size_in_bytes) {
// we want to do another increment until the linear area is used up. // we want to do another increment until the linear area is used up.
owner_->Free(new_node->address() + size_in_bytes + linear_size, owner_->Free(new_node->address() + size_in_bytes + linear_size,
new_node_size - size_in_bytes - linear_size); new_node_size - size_in_bytes - linear_size);
owner_->SetTopAndLimit(new_node->address() + size_in_bytes, owner_->SetAllocationInfo(
new_node->address() + size_in_bytes + linear_size); new_node->address() + size_in_bytes,
new_node->address() + size_in_bytes + linear_size);
} else { } else {
DCHECK(bytes_left >= 0); DCHECK(bytes_left >= 0);
// Normally we give the rest of the node to the allocator as its new // Normally we give the rest of the node to the allocator as its new
// linear allocation area. // linear allocation area.
owner_->SetTopAndLimit(new_node->address() + size_in_bytes, owner_->SetAllocationInfo(new_node->address() + size_in_bytes,
new_node->address() + new_node_size); new_node->address() + new_node_size);
} }
owner_->AllocationStep(new_node->address(), size_in_bytes); owner_->AllocationStep(new_node->address(), size_in_bytes);
...@@ -2872,6 +2903,11 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size, ...@@ -2872,6 +2903,11 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size,
heap()->incremental_marking()->OldSpaceStep(object_size); heap()->incremental_marking()->OldSpaceStep(object_size);
AllocationStep(object->address(), object_size); AllocationStep(object->address(), object_size);
if (heap()->incremental_marking()->black_allocation()) {
Marking::MarkBlack(ObjectMarking::MarkBitFrom(object));
MemoryChunk::IncrementLiveBytesFromGC(object, object_size);
}
return object; return object;
} }
......
...@@ -250,11 +250,6 @@ class MemoryChunk { ...@@ -250,11 +250,6 @@ class MemoryChunk {
// within the new space during evacuation. // within the new space during evacuation.
PAGE_NEW_NEW_PROMOTION, PAGE_NEW_NEW_PROMOTION,
// A black page has all mark bits set to 1 (black). A black page currently
// cannot be iterated because it is not swept. Moreover live bytes are also
// not updated.
BLACK_PAGE,
// This flag is intended to be used for testing. Works only when both // This flag is intended to be used for testing. Works only when both
// FLAG_stress_compaction and FLAG_manual_evacuation_candidates_selection // FLAG_stress_compaction and FLAG_manual_evacuation_candidates_selection
// are set. It forces the page to become an evacuation candidate at next // are set. It forces the page to become an evacuation candidate at next
...@@ -429,12 +424,10 @@ class MemoryChunk { ...@@ -429,12 +424,10 @@ class MemoryChunk {
int LiveBytes() { int LiveBytes() {
DCHECK_LE(static_cast<unsigned>(live_byte_count_), size_); DCHECK_LE(static_cast<unsigned>(live_byte_count_), size_);
DCHECK(!IsFlagSet(BLACK_PAGE) || live_byte_count_ == 0);
return live_byte_count_; return live_byte_count_;
} }
void SetLiveBytes(int live_bytes) { void SetLiveBytes(int live_bytes) {
if (IsFlagSet(BLACK_PAGE)) return;
DCHECK_GE(live_bytes, 0); DCHECK_GE(live_bytes, 0);
DCHECK_LE(static_cast<size_t>(live_bytes), size_); DCHECK_LE(static_cast<size_t>(live_bytes), size_);
live_byte_count_ = live_bytes; live_byte_count_ = live_bytes;
...@@ -2075,14 +2068,12 @@ class PagedSpace : public Space { ...@@ -2075,14 +2068,12 @@ class PagedSpace : public Space {
allocation_info_.Reset(top, limit); allocation_info_.Reset(top, limit);
} }
void SetAllocationInfo(Address top, Address limit);
// Empty space allocation info, returning unused area to free list. // Empty space allocation info, returning unused area to free list.
void EmptyAllocationInfo() { void EmptyAllocationInfo();
// Mark the old linear allocation area with a free space map so it can be
// skipped when scanning the heap. void MarkAllocationInfoBlack();
int old_linear_size = static_cast<int>(limit() - top());
Free(top(), old_linear_size);
SetTopAndLimit(NULL, NULL);
}
void Allocate(int bytes) { accounting_stats_.AllocateBytes(bytes); } void Allocate(int bytes) { accounting_stats_.AllocateBytes(bytes); }
......
...@@ -139,6 +139,8 @@ void RelocInfo::set_target_object(Object* target, ...@@ -139,6 +139,8 @@ void RelocInfo::set_target_object(Object* target,
host() != NULL && host() != NULL &&
target->IsHeapObject()) { target->IsHeapObject()) {
host()->GetHeap()->RecordWriteIntoCode(host(), this, target); host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
host(), this, HeapObject::cast(target));
} }
} }
......
...@@ -1995,10 +1995,9 @@ void WeakCell::initialize(HeapObject* val) { ...@@ -1995,10 +1995,9 @@ void WeakCell::initialize(HeapObject* val) {
// We just have to execute the generational barrier here because we never // We just have to execute the generational barrier here because we never
// mark through a weak cell and collect evacuation candidates when we process // mark through a weak cell and collect evacuation candidates when we process
// all weak cells. // all weak cells.
WriteBarrierMode mode = WriteBarrierMode mode = Marking::IsBlack(ObjectMarking::MarkBitFrom(this))
Page::FromAddress(this->address())->IsFlagSet(Page::BLACK_PAGE) ? UPDATE_WRITE_BARRIER
? UPDATE_WRITE_BARRIER : UPDATE_WEAK_WRITE_BARRIER;
: UPDATE_WEAK_WRITE_BARRIER;
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kValueOffset, val, mode); CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kValueOffset, val, mode);
} }
......
...@@ -6786,6 +6786,43 @@ TEST(Regress615489) { ...@@ -6786,6 +6786,43 @@ TEST(Regress615489) {
CHECK_LE(size_after, size_before); CHECK_LE(size_after, size_before);
} }
TEST(LeftTrimFixedArrayInBlackArea) {
FLAG_black_allocation = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Heap* heap = CcTest::heap();
Isolate* isolate = heap->isolate();
heap->CollectAllGarbage();
i::MarkCompactCollector* collector = heap->mark_compact_collector();
i::IncrementalMarking* marking = heap->incremental_marking();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
}
CHECK(marking->IsMarking() || marking->IsStopped());
if (marking->IsStopped()) {
heap->StartIncrementalMarking();
}
CHECK(marking->IsMarking());
marking->StartBlackAllocationForTesting();
// Ensure that we allocate a new page, set up a bump pointer area, and
// perform the allocation in a black area.
heap::SimulateFullSpace(heap->old_space());
isolate->factory()->NewFixedArray(4, TENURED);
Handle<FixedArray> array = isolate->factory()->NewFixedArray(50, TENURED);
CHECK(heap->old_space()->Contains(*array));
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*array)));
// Now left trim the allocated black area. A filler has to be installed
// for the trimmed area and all mark bits of the trimmed area have to be
// cleared.
FixedArrayBase* trimmed = heap->LeftTrimFixedArray(*array, 10);
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(trimmed)));
heap::GcAndSweep(heap, OLD_SPACE);
}
TEST(Regress618958) { TEST(Regress618958) {
CcTest::InitializeVM(); CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
......
...@@ -110,5 +110,51 @@ TEST(Marking, TransitionWhiteGreyBlackGrey) { ...@@ -110,5 +110,51 @@ TEST(Marking, TransitionWhiteGreyBlackGrey) {
free(bitmap); free(bitmap);
} }
TEST(Marking, SetAndClearRange) {
Bitmap* bitmap = reinterpret_cast<Bitmap*>(
calloc(Bitmap::kSize / kPointerSize, kPointerSize));
for (int i = 0; i < 3; i++) {
bitmap->SetRange(i, Bitmap::kBitsPerCell + i);
CHECK_EQ(reinterpret_cast<uint32_t*>(bitmap)[0], 0xffffffff << i);
CHECK_EQ(reinterpret_cast<uint32_t*>(bitmap)[1], (1 << i) - 1);
bitmap->ClearRange(i, Bitmap::kBitsPerCell + i);
CHECK_EQ(reinterpret_cast<uint32_t*>(bitmap)[0], 0x0);
CHECK_EQ(reinterpret_cast<uint32_t*>(bitmap)[1], 0x0);
}
free(bitmap);
}
TEST(Marking, ClearMultipleRanges) {
Bitmap* bitmap = reinterpret_cast<Bitmap*>(
calloc(Bitmap::kSize / kPointerSize, kPointerSize));
CHECK(bitmap->AllBitsClearInRange(0, Bitmap::kBitsPerCell * 3));
bitmap->SetRange(0, Bitmap::kBitsPerCell * 3);
CHECK_EQ(reinterpret_cast<uint32_t*>(bitmap)[0], 0xffffffff);
CHECK_EQ(reinterpret_cast<uint32_t*>(bitmap)[1], 0xffffffff);
CHECK_EQ(reinterpret_cast<uint32_t*>(bitmap)[2], 0xffffffff);
CHECK(bitmap->AllBitsSetInRange(0, Bitmap::kBitsPerCell * 3));
bitmap->ClearRange(Bitmap::kBitsPerCell / 2, Bitmap::kBitsPerCell);
bitmap->ClearRange(Bitmap::kBitsPerCell,
Bitmap::kBitsPerCell + Bitmap::kBitsPerCell / 2);
bitmap->ClearRange(Bitmap::kBitsPerCell * 2 + 8,
Bitmap::kBitsPerCell * 2 + 16);
bitmap->ClearRange(Bitmap::kBitsPerCell * 2 + 24, Bitmap::kBitsPerCell * 3);
CHECK_EQ(reinterpret_cast<uint32_t*>(bitmap)[0], 0xffff);
CHECK(bitmap->AllBitsSetInRange(0, Bitmap::kBitsPerCell / 2));
CHECK(bitmap->AllBitsClearInRange(Bitmap::kBitsPerCell / 2,
Bitmap::kBitsPerCell));
CHECK_EQ(reinterpret_cast<uint32_t*>(bitmap)[1], 0xffff0000);
CHECK(
bitmap->AllBitsSetInRange(Bitmap::kBitsPerCell + Bitmap::kBitsPerCell / 2,
2 * Bitmap::kBitsPerCell));
CHECK(bitmap->AllBitsClearInRange(
Bitmap::kBitsPerCell, Bitmap::kBitsPerCell + Bitmap::kBitsPerCell / 2));
CHECK_EQ(reinterpret_cast<uint32_t*>(bitmap)[2], 0xff00ff);
CHECK(bitmap->AllBitsSetInRange(2 * Bitmap::kBitsPerCell,
2 * Bitmap::kBitsPerCell + 8));
CHECK(bitmap->AllBitsClearInRange(2 * Bitmap::kBitsPerCell + 24,
Bitmap::kBitsPerCell * 3));
free(bitmap);
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment