Commit 7ad2de17 authored by hpayer's avatar hpayer Committed by Commit bot

[heap] Register end of black areas to support faster filtering of invalid slots.

BUG=chromium:630386

Review-Url: https://codereview.chromium.org/2236543002
Cr-Commit-Position: refs/heads/master@{#38581}
parent e77a78cd
......@@ -2991,6 +2991,11 @@ bool MarkCompactCollector::IsSlotInBlackObject(MemoryChunk* p, Address slot) {
base_address += (cell_index - base_address_cell_index) *
Bitmap::kBitsPerCell * kPointerSize;
Address address = base_address + offset * kPointerSize;
// If the found mark bit is part of a black area, the slot cannot be part
// of a live object since it is not marked.
if (p->IsBlackAreaEndMarker(address + kPointerSize)) return false;
HeapObject* object = HeapObject::FromAddress(address);
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
CHECK(object->address() < reinterpret_cast<Address>(slot));
......@@ -3372,6 +3377,9 @@ int MarkCompactCollector::Sweeper::RawSweep(
// requires valid mark bits.
ArrayBufferTracker::FreeDead(p);
// We also release the black area markers here.
p->ReleaseBlackAreaEndMarkerMap();
Address free_start = p->area_start();
DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0);
......@@ -3915,6 +3923,7 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
if (p->IsEvacuationCandidate()) {
// Will be processed in EvacuateNewSpaceAndCandidates.
DCHECK(evacuation_candidates_.length() > 0);
DCHECK(!p->HasBlackAreas());
continue;
}
......
......@@ -504,6 +504,7 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
chunk->set_next_chunk(nullptr);
chunk->set_prev_chunk(nullptr);
chunk->local_tracker_ = nullptr;
chunk->black_area_end_marker_map_ = nullptr;
DCHECK(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset);
DCHECK(OFFSET_OF(MemoryChunk, live_byte_count_) == kLiveBytesOffset);
......@@ -1286,16 +1287,26 @@ void PagedSpace::EmptyAllocationInfo() {
DCHECK(current_limit == nullptr);
return;
}
int old_linear_size = static_cast<int>(current_limit - current_top);
SetTopAndLimit(NULL, NULL);
if (current_top != current_limit &&
heap()->incremental_marking()->black_allocation()) {
if (heap()->incremental_marking()->black_allocation()) {
Page* page = Page::FromAddress(current_top);
// We have to remember the end of the current black allocation area if
// something was allocated in the current bump pointer range.
if (allocation_info_.original_top() != current_top) {
Address end_black_area = current_top - kPointerSize;
page->AddBlackAreaEndMarker(end_black_area);
}
// Clear the bits in the unused black area.
if (current_top != current_limit) {
page->markbits()->ClearRange(page->AddressToMarkbitIndex(current_top),
page->AddressToMarkbitIndex(current_limit));
page->IncrementLiveBytes(-static_cast<int>(current_limit - current_top));
}
Free(current_top, old_linear_size);
}
SetTopAndLimit(NULL, NULL);
Free(current_top, static_cast<int>(current_limit - current_top));
}
void PagedSpace::IncreaseCapacity(int size) {
......@@ -1310,6 +1321,8 @@ void PagedSpace::ReleasePage(Page* page) {
free_list_.EvictFreeListItems(page);
DCHECK(!free_list_.ContainsPageFreeListItems(page));
page->ReleaseBlackAreaEndMarkerMap();
if (Page::FromAllocationAreaAddress(allocation_info_.top()) == page) {
allocation_info_.Reset(nullptr, nullptr);
}
......
......@@ -7,6 +7,7 @@
#include <list>
#include <memory>
#include <unordered_set>
#include "src/allocation.h"
#include "src/base/atomic-utils.h"
......@@ -350,7 +351,9 @@ class MemoryChunk {
+ kPointerSize // AtomicValue prev_chunk_
// FreeListCategory categories_[kNumberOfCategories]
+ FreeListCategory::kSize * kNumberOfCategories +
kPointerSize; // LocalArrayBufferTracker* local_tracker_;
kPointerSize // LocalArrayBufferTracker* local_tracker_;
// std::unordered_set<Address>* black_area_end_marker_map_
+ kPointerSize;
// We add some more space to the computed header size to amount for missing
// alignment requirements in our computation.
......@@ -592,6 +595,33 @@ class MemoryChunk {
void InsertAfter(MemoryChunk* other);
void Unlink();
void ReleaseBlackAreaEndMarkerMap() {
if (black_area_end_marker_map_) {
delete black_area_end_marker_map_;
black_area_end_marker_map_ = nullptr;
}
}
bool IsBlackAreaEndMarker(Address address) {
if (black_area_end_marker_map_) {
return black_area_end_marker_map_->find(address) !=
black_area_end_marker_map_->end();
}
return false;
}
void AddBlackAreaEndMarker(Address address) {
if (!black_area_end_marker_map_) {
black_area_end_marker_map_ = new std::unordered_set<Address>();
}
auto ret = black_area_end_marker_map_->insert(address);
USE(ret);
// Check that we inserted a new black area end marker.
DCHECK(ret.second);
}
bool HasBlackAreas() { return black_area_end_marker_map_ != nullptr; }
protected:
static MemoryChunk* Initialize(Heap* heap, Address base, size_t size,
Address area_start, Address area_end,
......@@ -660,6 +690,9 @@ class MemoryChunk {
LocalArrayBufferTracker* local_tracker_;
// Stores the end addresses of black areas.
std::unordered_set<Address>* black_area_end_marker_map_;
private:
void InitializeReservedMemory() { reservation_.Reset(); }
......@@ -1481,14 +1514,22 @@ class HeapObjectIterator : public ObjectIterator {
// space.
class AllocationInfo {
public:
AllocationInfo() : top_(nullptr), limit_(nullptr) {}
AllocationInfo(Address top, Address limit) : top_(top), limit_(limit) {}
AllocationInfo() : original_top_(nullptr), top_(nullptr), limit_(nullptr) {}
AllocationInfo(Address top, Address limit)
: original_top_(top), top_(top), limit_(limit) {}
void Reset(Address top, Address limit) {
original_top_ = top;
set_top(top);
set_limit(limit);
}
Address original_top() {
SLOW_DCHECK(top_ == NULL ||
(reinterpret_cast<intptr_t>(top_) & kHeapObjectTagMask) == 0);
return original_top_;
}
INLINE(void set_top(Address top)) {
SLOW_DCHECK(top == NULL ||
(reinterpret_cast<intptr_t>(top) & kHeapObjectTagMask) == 0);
......@@ -1522,6 +1563,8 @@ class AllocationInfo {
#endif
private:
// The original top address when the allocation info was initialized.
Address original_top_;
// Current allocation top.
Address top_;
// Current allocation limit.
......
......@@ -6986,6 +6986,48 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
heap::GcAndSweep(heap, OLD_SPACE);
}
TEST(SlotFilteringAfterBlackAreas) {
FLAG_black_allocation = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Heap* heap = CcTest::heap();
Isolate* isolate = heap->isolate();
MarkCompactCollector* mark_compact_collector = heap->mark_compact_collector();
heap->CollectAllGarbage();
i::MarkCompactCollector* collector = heap->mark_compact_collector();
i::IncrementalMarking* marking = heap->incremental_marking();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
}
CHECK(marking->IsMarking() || marking->IsStopped());
if (marking->IsStopped()) {
heap->StartIncrementalMarking();
}
CHECK(marking->IsMarking());
marking->StartBlackAllocationForTesting();
// Ensure that we allocate a new page, set up a bump pointer area, and
// perform the allocation in a black area.
heap::SimulateFullSpace(heap->old_space());
Handle<FixedArray> array = isolate->factory()->NewFixedArray(10, TENURED);
Page* page = Page::FromAddress(array->address());
// After allocation we empty the allocation info to limit the black area
// only on the allocated array.
heap->old_space()->EmptyAllocationInfo();
// Slots in the black area are part of the black object.
CHECK(mark_compact_collector->IsSlotInBlackObject(page, array->address()));
CHECK(mark_compact_collector->IsSlotInBlackObject(
page, array->address() + array->Size() - kPointerSize));
// Slots after the black area are not part of the black object and have to
// be filtered out.
CHECK(!mark_compact_collector->IsSlotInBlackObject(
page, array->address() + array->Size()));
}
TEST(Regress618958) {
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment