Commit a007dfc1 authored by hpayer's avatar hpayer Committed by Commit bot

[heap] Move typed slot filtering logic into sweeper.

Additionally, remove all code related to the old-style slots filtering and black area end markers.

BUG=chromium:648568

Review-Url: https://chromiumcodereview.appspot.com/2440683002
Cr-Commit-Position: refs/heads/master@{#40494}
parent 1b08c7a7
......@@ -1391,7 +1391,6 @@ v8_source_set("v8_base") {
"src/heap/objects-visiting.cc",
"src/heap/objects-visiting.h",
"src/heap/page-parallel-job.h",
"src/heap/remembered-set.cc",
"src/heap/remembered-set.h",
"src/heap/scavenge-job.cc",
"src/heap/scavenge-job.h",
......
......@@ -587,9 +587,6 @@ void IncrementalMarking::FinishBlackAllocation() {
}
void IncrementalMarking::AbortBlackAllocation() {
for (Page* page : *heap()->old_space()) {
page->ReleaseBlackAreaEndMarkerMap();
}
if (FLAG_trace_incremental_marking) {
heap()->isolate()->PrintWithTimestamp(
"[IncrementalMarking] Black allocation aborted\n");
......
......@@ -2872,128 +2872,6 @@ static String* UpdateReferenceInExternalStringTableEntry(Heap* heap,
return String::cast(*p);
}
bool MarkCompactCollector::IsSlotInBlackObject(MemoryChunk* p, Address slot) {
Space* owner = p->owner();
DCHECK(owner != heap_->lo_space() && owner != nullptr);
USE(owner);
// We may be part of a black area.
if (Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(slot))) {
return true;
}
uint32_t mark_bit_index = p->AddressToMarkbitIndex(slot);
unsigned int cell_index = mark_bit_index >> Bitmap::kBitsPerCellLog2;
MarkBit::CellType index_mask = 1u << Bitmap::IndexInCell(mark_bit_index);
MarkBit::CellType* cells = p->markbits()->cells();
Address base_address = p->area_start();
unsigned int base_address_cell_index = Bitmap::IndexToCell(
Bitmap::CellAlignIndex(p->AddressToMarkbitIndex(base_address)));
// Check if the slot points to the start of an object. This can happen e.g.
// when we left trim a fixed array. Such slots are invalid and we can remove
// them.
if (index_mask > 1) {
if ((cells[cell_index] & index_mask) != 0 &&
(cells[cell_index] & (index_mask >> 1)) == 0) {
return false;
}
} else {
// Left trimming moves the mark bits so we cannot be in the very first cell.
DCHECK(cell_index != base_address_cell_index);
if ((cells[cell_index] & index_mask) != 0 &&
(cells[cell_index - 1] & (1u << Bitmap::kBitIndexMask)) == 0) {
return false;
}
}
// Check if the object is in the current cell.
MarkBit::CellType slot_mask;
if ((cells[cell_index] == 0) ||
(base::bits::CountTrailingZeros32(cells[cell_index]) >
base::bits::CountTrailingZeros32(cells[cell_index] | index_mask))) {
// If we are already in the first cell, there is no live object.
if (cell_index == base_address_cell_index) return false;
// If not, find a cell in a preceding cell slot that has a mark bit set.
do {
cell_index--;
} while (cell_index > base_address_cell_index && cells[cell_index] == 0);
// The slot must be in a dead object if there are no preceding cells that
// have mark bits set.
if (cells[cell_index] == 0) {
return false;
}
// The object is in a preceding cell. Set the mask to find any object.
slot_mask = ~0u;
} else {
// We are interested in object mark bits right before the slot.
slot_mask = index_mask + (index_mask - 1);
}
MarkBit::CellType current_cell = cells[cell_index];
CHECK(current_cell != 0);
// Find the last live object in the cell.
unsigned int leading_zeros =
base::bits::CountLeadingZeros32(current_cell & slot_mask);
CHECK(leading_zeros != Bitmap::kBitsPerCell);
int offset = static_cast<int>(Bitmap::kBitIndexMask - leading_zeros) - 1;
base_address += (cell_index - base_address_cell_index) *
Bitmap::kBitsPerCell * kPointerSize;
Address address = base_address + offset * kPointerSize;
// If the found mark bit is part of a black area, the slot cannot be part
// of a live object since it is not marked.
if (p->IsBlackAreaEndMarker(address + kPointerSize)) return false;
HeapObject* object = HeapObject::FromAddress(address);
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
CHECK(object->address() < reinterpret_cast<Address>(slot));
if ((object->address() + kPointerSize) <= slot &&
(object->address() + object->Size()) > slot) {
// If the slot is within the last found object in the cell, the slot is
// in a live object.
// Slots pointing to the first word of an object are invalid and removed.
// This can happen when we move the object header while left trimming.
return true;
}
return false;
}
HeapObject* MarkCompactCollector::FindBlackObjectBySlotSlow(Address slot) {
Page* p = Page::FromAddress(slot);
Space* owner = p->owner();
if (owner == heap_->lo_space() || owner == nullptr) {
Object* large_object = heap_->lo_space()->FindObject(slot);
// This object has to exist, otherwise we would not have recorded a slot
// for it.
CHECK(large_object->IsHeapObject());
HeapObject* large_heap_object = HeapObject::cast(large_object);
if (IsMarked(large_heap_object)) {
return large_heap_object;
}
return nullptr;
}
LiveObjectIterator<kBlackObjects> it(p);
HeapObject* object = nullptr;
while ((object = it.Next()) != nullptr) {
int size = object->Size();
if (object->address() > slot) return nullptr;
if (object->address() <= slot && slot < (object->address() + size)) {
return object;
}
}
return nullptr;
}
void MarkCompactCollector::EvacuateNewSpacePrologue() {
NewSpace* new_space = heap()->new_space();
// Append the list of new space pages to be processed.
......@@ -3316,23 +3194,38 @@ class EvacuationWeakObjectRetainer : public WeakObjectRetainer {
}
};
MarkCompactCollector::Sweeper::ClearOldToNewSlotsMode
MarkCompactCollector::Sweeper::GetClearOldToNewSlotsMode(Page* p) {
AllocationSpace identity = p->owner()->identity();
if (p->old_to_new_slots() &&
(identity == OLD_SPACE || identity == MAP_SPACE)) {
return MarkCompactCollector::Sweeper::CLEAR_REGULAR_SLOTS;
} else if (p->typed_old_to_new_slots() && identity == CODE_SPACE) {
return MarkCompactCollector::Sweeper::CLEAR_TYPED_SLOTS;
}
return MarkCompactCollector::Sweeper::DO_NOT_CLEAR;
}
int MarkCompactCollector::Sweeper::RawSweep(
Page* p, FreeListRebuildingMode free_list_mode,
FreeSpaceTreatmentMode free_space_mode) {
Space* space = p->owner();
AllocationSpace identity = space->identity();
DCHECK_NOT_NULL(space);
DCHECK(free_list_mode == IGNORE_FREE_LIST || identity == OLD_SPACE ||
identity == CODE_SPACE || identity == MAP_SPACE);
DCHECK(free_list_mode == IGNORE_FREE_LIST || space->identity() == OLD_SPACE ||
space->identity() == CODE_SPACE || space->identity() == MAP_SPACE);
DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone());
// If there are old-to-new slots in that page, we have to filter out slots
// that are in dead memory which is freed by the sweeper.
ClearOldToNewSlotsMode slots_clearing_mode = GetClearOldToNewSlotsMode(p);
// The free ranges map is used for filtering typed slots.
std::map<uint32_t, uint32_t> free_ranges;
// Before we sweep objects on the page, we free dead array buffers which
// requires valid mark bits.
ArrayBufferTracker::FreeDead(p);
// We also release the black area markers here.
p->ReleaseBlackAreaEndMarkerMap();
Address free_start = p->area_start();
DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0);
......@@ -3352,8 +3245,7 @@ int MarkCompactCollector::Sweeper::RawSweep(
LiveObjectIterator<kBlackObjects> it(p);
HeapObject* object = NULL;
bool clear_slots =
p->old_to_new_slots() && (identity == OLD_SPACE || identity == MAP_SPACE);
while ((object = it.Next()) != NULL) {
DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
Address free_end = object->address();
......@@ -3372,9 +3264,13 @@ int MarkCompactCollector::Sweeper::RawSweep(
ClearRecordedSlots::kNo);
}
if (clear_slots) {
if (slots_clearing_mode == CLEAR_REGULAR_SLOTS) {
RememberedSet<OLD_TO_NEW>::RemoveRange(p, free_start, free_end,
SlotSet::KEEP_EMPTY_BUCKETS);
} else if (slots_clearing_mode == CLEAR_TYPED_SLOTS) {
free_ranges.insert(std::pair<uint32_t, uint32_t>(
static_cast<uint32_t>(free_start - p->address()),
static_cast<uint32_t>(free_end - p->address())));
}
}
Map* map = object->synchronized_map();
......@@ -3406,12 +3302,21 @@ int MarkCompactCollector::Sweeper::RawSweep(
ClearRecordedSlots::kNo);
}
if (clear_slots) {
if (slots_clearing_mode == CLEAR_REGULAR_SLOTS) {
RememberedSet<OLD_TO_NEW>::RemoveRange(p, free_start, p->area_end(),
SlotSet::KEEP_EMPTY_BUCKETS);
} else if (slots_clearing_mode == CLEAR_TYPED_SLOTS) {
free_ranges.insert(std::pair<uint32_t, uint32_t>(
static_cast<uint32_t>(free_start - p->address()),
static_cast<uint32_t>(p->area_end() - p->address())));
}
}
// Clear invalid typed slots after collection all free ranges.
if (slots_clearing_mode == CLEAR_TYPED_SLOTS) {
p->typed_old_to_new_slots()->RemoveInvaldSlots(free_ranges);
}
// Clear the mark bits of that page and reset live bytes count.
p->ClearLiveness();
......@@ -3837,9 +3742,6 @@ int MarkCompactCollector::Sweeper::ParallelSweepPage(Page* page,
if (identity == NEW_SPACE) {
RawSweep(page, IGNORE_FREE_LIST, free_space_mode);
} else {
if (identity == CODE_SPACE) {
RememberedSet<OLD_TO_NEW>::ClearInvalidTypedSlots(heap_, page);
}
max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode);
}
......@@ -3907,7 +3809,6 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
if (p->IsEvacuationCandidate()) {
// Will be processed in EvacuateNewSpaceAndCandidates.
DCHECK(evacuation_candidates_.length() > 0);
DCHECK(!p->HasBlackAreas());
continue;
}
......
......@@ -288,6 +288,11 @@ class MarkCompactCollector {
enum FreeListRebuildingMode { REBUILD_FREE_LIST, IGNORE_FREE_LIST };
enum FreeSpaceTreatmentMode { IGNORE_FREE_SPACE, ZAP_FREE_SPACE };
enum ClearOldToNewSlotsMode {
DO_NOT_CLEAR,
CLEAR_REGULAR_SLOTS,
CLEAR_TYPED_SLOTS
};
typedef std::deque<Page*> SweepingList;
typedef List<Page*> SweptList;
......@@ -326,6 +331,8 @@ class MarkCompactCollector {
private:
static const int kAllocationSpaces = LAST_PAGED_SPACE + 1;
static ClearOldToNewSlotsMode GetClearOldToNewSlotsMode(Page* p);
template <typename Callback>
void ForAllSweepingSpaces(Callback callback) {
for (int i = 0; i < kAllocationSpaces; i++) {
......@@ -478,16 +485,6 @@ class MarkCompactCollector {
void InitializeMarkingDeque();
// The following two methods can just be called after marking, when the
// whole transitive closure is known. They must be called before sweeping
// when mark bits are still intact.
bool IsSlotInBlackObject(MemoryChunk* p, Address slot);
HeapObject* FindBlackObjectBySlotSlow(Address slot);
// Removes all the slots in the slot buffers that are within the given
// address range.
void RemoveObjectSlots(Address start_slot, Address end_slot);
Sweeper& sweeper() { return sweeper_; }
private:
......
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/remembered-set.h"
#include "src/heap/heap-inl.h"
#include "src/heap/heap.h"
#include "src/heap/mark-compact.h"
#include "src/heap/slot-set.h"
#include "src/heap/spaces.h"
#include "src/heap/store-buffer.h"
#include "src/macro-assembler.h"
namespace v8 {
namespace internal {
template <PointerDirection direction>
void RememberedSet<direction>::ClearInvalidTypedSlots(Heap* heap,
MemoryChunk* chunk) {
STATIC_ASSERT(direction == OLD_TO_NEW);
DCHECK(chunk->owner()->identity() == CODE_SPACE);
TypedSlotSet* slots = GetTypedSlotSet(chunk);
if (slots != nullptr) {
slots->Iterate(
[heap, chunk](SlotType type, Address host_addr, Address addr) {
if (Marking::IsBlack(ObjectMarking::MarkBitFrom(host_addr))) {
return KEEP_SLOT;
} else {
return REMOVE_SLOT;
}
},
TypedSlotSet::KEEP_EMPTY_CHUNKS);
}
}
template <PointerDirection direction>
bool RememberedSet<direction>::IsValidSlot(Heap* heap, MemoryChunk* chunk,
Object** slot) {
STATIC_ASSERT(direction == OLD_TO_NEW);
// If the target object is not black, the source slot must be part
// of a non-black (dead) object.
return heap->mark_compact_collector()->IsSlotInBlackObject(
chunk, reinterpret_cast<Address>(slot));
}
template void RememberedSet<OLD_TO_NEW>::ClearInvalidTypedSlots(
Heap* heap, MemoryChunk* chunk);
} // namespace internal
} // namespace v8
......@@ -5,6 +5,7 @@
#ifndef V8_SLOT_SET_H
#define V8_SLOT_SET_H
#include <map>
#include <stack>
#include "src/allocation.h"
......@@ -460,6 +461,28 @@ class TypedSlotSet {
}
}
void RemoveInvaldSlots(std::map<uint32_t, uint32_t>& invalid_ranges) {
Chunk* chunk = chunk_.Value();
while (chunk != nullptr) {
TypedSlot* buffer = chunk->buffer.Value();
int count = chunk->count.Value();
for (int i = 0; i < count; i++) {
uint32_t host_offset = buffer[i].host_offset();
std::map<uint32_t, uint32_t>::iterator upper_bound =
invalid_ranges.upper_bound(host_offset);
if (upper_bound == invalid_ranges.begin()) continue;
// upper_bounds points to the invalid range after the given slot. Hence,
// we have to go to the previous element.
upper_bound--;
DCHECK_LE(upper_bound->first, host_offset);
if (upper_bound->second > host_offset) {
buffer[i].Clear();
}
}
chunk = chunk->next.Value();
}
}
private:
static const int kInitialBufferSize = 100;
static const int kMaxBufferSize = 16 * KB;
......
......@@ -527,7 +527,6 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
chunk->set_next_chunk(nullptr);
chunk->set_prev_chunk(nullptr);
chunk->local_tracker_ = nullptr;
chunk->black_area_end_marker_map_ = nullptr;
DCHECK(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset);
......@@ -1385,12 +1384,6 @@ void PagedSpace::EmptyAllocationInfo() {
if (heap()->incremental_marking()->black_allocation()) {
Page* page = Page::FromAllocationAreaAddress(current_top);
// We have to remember the end of the current black allocation area if
// something was allocated in the current bump pointer range.
if (allocation_info_.original_top() != current_top) {
Address end_black_area = current_top - kPointerSize;
page->AddBlackAreaEndMarker(end_black_area);
}
// Clear the bits in the unused black area.
if (current_top != current_limit) {
......@@ -1416,8 +1409,6 @@ void PagedSpace::ReleasePage(Page* page) {
free_list_.EvictFreeListItems(page);
DCHECK(!free_list_.ContainsPageFreeListItems(page));
page->ReleaseBlackAreaEndMarkerMap();
if (Page::FromAllocationAreaAddress(allocation_info_.top()) == page) {
allocation_info_.Reset(nullptr, nullptr);
}
......
......@@ -343,9 +343,7 @@ class MemoryChunk {
+ kPointerSize // AtomicValue prev_chunk_
// FreeListCategory categories_[kNumberOfCategories]
+ FreeListCategory::kSize * kNumberOfCategories +
kPointerSize // LocalArrayBufferTracker* local_tracker_
// std::unordered_set<Address>* black_area_end_marker_map_
+ kPointerSize;
kPointerSize; // LocalArrayBufferTracker* local_tracker_
// We add some more space to the computed header size to amount for missing
// alignment requirements in our computation.
......@@ -576,33 +574,6 @@ class MemoryChunk {
void InsertAfter(MemoryChunk* other);
void Unlink();
void ReleaseBlackAreaEndMarkerMap() {
if (black_area_end_marker_map_) {
delete black_area_end_marker_map_;
black_area_end_marker_map_ = nullptr;
}
}
bool IsBlackAreaEndMarker(Address address) {
if (black_area_end_marker_map_) {
return black_area_end_marker_map_->find(address) !=
black_area_end_marker_map_->end();
}
return false;
}
void AddBlackAreaEndMarker(Address address) {
if (!black_area_end_marker_map_) {
black_area_end_marker_map_ = new std::unordered_set<Address>();
}
auto ret = black_area_end_marker_map_->insert(address);
USE(ret);
// Check that we inserted a new black area end marker.
DCHECK(ret.second);
}
bool HasBlackAreas() { return black_area_end_marker_map_ != nullptr; }
protected:
static MemoryChunk* Initialize(Heap* heap, Address base, size_t size,
Address area_start, Address area_end,
......@@ -669,9 +640,6 @@ class MemoryChunk {
LocalArrayBufferTracker* local_tracker_;
// Stores the end addresses of black areas.
std::unordered_set<Address>* black_area_end_marker_map_;
private:
void InitializeReservedMemory() { reservation_.Reset(); }
......
......@@ -930,7 +930,6 @@
'heap/objects-visiting.cc',
'heap/objects-visiting.h',
'heap/page-parallel-job.h',
'heap/remembered-set.cc',
'heap/remembered-set.h',
'heap/scavenge-job.h',
'heap/scavenge-job.cc',
......
......@@ -6923,49 +6923,6 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
heap::GcAndSweep(heap, OLD_SPACE);
}
TEST(SlotFilteringAfterBlackAreas) {
FLAG_black_allocation = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Heap* heap = CcTest::heap();
Isolate* isolate = heap->isolate();
MarkCompactCollector* mark_compact_collector = heap->mark_compact_collector();
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
i::MarkCompactCollector* collector = heap->mark_compact_collector();
i::IncrementalMarking* marking = heap->incremental_marking();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
}
CHECK(marking->IsMarking() || marking->IsStopped());
if (marking->IsStopped()) {
heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
i::GarbageCollectionReason::kTesting);
}
CHECK(marking->IsMarking());
marking->StartBlackAllocationForTesting();
// Ensure that we allocate a new page, set up a bump pointer area, and
// perform the allocation in a black area.
heap::SimulateFullSpace(heap->old_space());
Handle<FixedArray> array = isolate->factory()->NewFixedArray(10, TENURED);
Page* page = Page::FromAddress(array->address());
// After allocation we empty the allocation info to limit the black area
// only on the allocated array.
heap->old_space()->EmptyAllocationInfo();
// Slots in the black area are part of the black object.
CHECK(mark_compact_collector->IsSlotInBlackObject(page, array->address()));
CHECK(mark_compact_collector->IsSlotInBlackObject(
page, array->address() + array->Size() - kPointerSize));
// Slots after the black area are not part of the black object and have to
// be filtered out.
CHECK(!mark_compact_collector->IsSlotInBlackObject(
page, array->address() + array->Size()));
}
TEST(Regress618958) {
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......
......@@ -3,6 +3,7 @@
// found in the LICENSE file.
#include <limits>
#include <map>
#include "src/globals.h"
#include "src/heap/slot-set.h"
......@@ -186,5 +187,35 @@ TEST(TypedSlotSet, Iterate) {
EXPECT_EQ(added / 2, iterated);
}
TEST(TypedSlotSet, RemoveInvalidSlots) {
TypedSlotSet set(0);
const int kHostDelta = 100;
uint32_t entries = 10;
for (uint32_t i = 0; i < entries; i++) {
SlotType type = static_cast<SlotType>(i % CLEARED_SLOT);
set.Insert(type, i * kHostDelta, i * kHostDelta);
}
std::map<uint32_t, uint32_t> invalid_ranges;
for (uint32_t i = 1; i < entries; i += 2) {
invalid_ranges.insert(
std::pair<uint32_t, uint32_t>(i * kHostDelta, i * kHostDelta + 1));
}
set.RemoveInvaldSlots(invalid_ranges);
for (std::map<uint32_t, uint32_t>::iterator it = invalid_ranges.begin();
it != invalid_ranges.end(); ++it) {
uint32_t start = it->first;
uint32_t end = it->second;
set.Iterate(
[start, end](SlotType slot_type, Address host_addr, Address slot_addr) {
CHECK(reinterpret_cast<uintptr_t>(host_addr) < start ||
reinterpret_cast<uintptr_t>(host_addr) >= end);
return KEEP_SLOT;
},
TypedSlotSet::KEEP_EMPTY_CHUNKS);
}
}
} // namespace internal
} // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment