Commit ad815a7b authored by ulan's avatar ulan Committed by Commit bot

[heap] Refactor marking deque.

This patch moves management of marking deque backing store into the
MarkingDeque class, which will simplify unmapping of backing store in
concurrent task.

BUG=

Review-Url: https://codereview.chromium.org/2439063002
Cr-Commit-Position: refs/heads/master@{#40523}
parent 231c8ac0
......@@ -1457,11 +1457,7 @@ void Heap::MarkCompactEpilogue() {
PreprocessStackTraces();
DCHECK(incremental_marking()->IsStopped());
// We finished a marking cycle. We can uncommit the marking deque until
// we start marking again.
mark_compact_collector()->marking_deque()->Uninitialize();
mark_compact_collector()->EnsureMarkingDequeIsCommitted(
MarkCompactCollector::kMinMarkingDequeSize);
mark_compact_collector()->marking_deque()->StopUsing();
}
......
......@@ -538,8 +538,7 @@ void IncrementalMarking::StartMarking() {
PatchIncrementalMarkingRecordWriteStubs(heap_, mode);
heap_->mark_compact_collector()->EnsureMarkingDequeIsCommittedAndInitialize(
MarkCompactCollector::kMaxMarkingDequeSize);
heap_->mark_compact_collector()->marking_deque()->StartUsing();
ActivateIncrementalWriteBarrier();
......
......@@ -58,8 +58,6 @@ MarkCompactCollector::MarkCompactCollector(Heap* heap)
compacting_(false),
black_allocation_(false),
have_code_to_deoptimize_(false),
marking_deque_memory_(NULL),
marking_deque_memory_committed_(0),
code_flusher_(nullptr),
sweeper_(heap) {
}
......@@ -240,9 +238,7 @@ void MarkCompactCollector::SetUp() {
DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
EnsureMarkingDequeIsReserved();
EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize);
marking_deque()->SetUp();
if (FLAG_flush_code) {
code_flusher_ = new CodeFlusher(isolate());
......@@ -255,7 +251,7 @@ void MarkCompactCollector::SetUp() {
void MarkCompactCollector::TearDown() {
AbortCompaction();
delete marking_deque_memory_;
marking_deque()->TearDown();
delete code_flusher_;
}
......@@ -783,6 +779,10 @@ void MarkCompactCollector::Prepare() {
EnsureSweepingCompleted();
}
if (heap()->incremental_marking()->IsSweeping()) {
heap()->incremental_marking()->Stop();
}
// If concurrent unmapping tasks are still running, we should wait for
// them here.
heap()->memory_allocator()->unmapper()->WaitUntilCompleted();
......@@ -799,6 +799,7 @@ void MarkCompactCollector::Prepare() {
if (heap_->UsingEmbedderHeapTracer()) {
heap_->embedder_heap_tracer()->AbortTracing();
}
marking_deque()->Clear();
was_marked_incrementally_ = false;
}
......@@ -2106,85 +2107,70 @@ void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) {
}
}
void MarkCompactCollector::EnsureMarkingDequeIsReserved() {
DCHECK(!marking_deque()->in_use());
if (marking_deque_memory_ == NULL) {
marking_deque_memory_ = new base::VirtualMemory(kMaxMarkingDequeSize);
marking_deque_memory_committed_ = 0;
}
if (marking_deque_memory_ == NULL) {
V8::FatalProcessOutOfMemory("EnsureMarkingDequeIsReserved");
void MarkingDeque::SetUp() {
backing_store_ = new base::VirtualMemory(kMaxSize);
backing_store_committed_size_ = 0;
if (backing_store_ == nullptr) {
V8::FatalProcessOutOfMemory("MarkingDeque::SetUp");
}
}
void MarkingDeque::TearDown() { delete backing_store_; }
void MarkCompactCollector::EnsureMarkingDequeIsCommitted(size_t max_size) {
// If the marking deque is too small, we try to allocate a bigger one.
// If that fails, make do with a smaller one.
CHECK(!marking_deque()->in_use());
for (size_t size = max_size; size >= kMinMarkingDequeSize; size >>= 1) {
base::VirtualMemory* memory = marking_deque_memory_;
size_t currently_committed = marking_deque_memory_committed_;
if (currently_committed == size) return;
if (currently_committed > size) {
bool success = marking_deque_memory_->Uncommit(
reinterpret_cast<Address>(marking_deque_memory_->address()) + size,
currently_committed - size);
if (success) {
marking_deque_memory_committed_ = size;
return;
}
UNREACHABLE();
}
bool success = memory->Commit(
reinterpret_cast<Address>(memory->address()) + currently_committed,
size - currently_committed,
false); // Not executable.
if (success) {
marking_deque_memory_committed_ = size;
return;
}
void MarkingDeque::StartUsing() {
if (in_use_) {
// This can happen in mark-compact GC if the incremental marker already
// started using the marking deque.
return;
}
V8::FatalProcessOutOfMemory("EnsureMarkingDequeIsCommitted");
in_use_ = true;
EnsureCommitted();
array_ = reinterpret_cast<HeapObject**>(backing_store_->address());
size_t size = FLAG_force_marking_deque_overflows
? 64 * kPointerSize
: backing_store_committed_size_;
DCHECK(
base::bits::IsPowerOfTwo32(static_cast<uint32_t>(size / kPointerSize)));
mask_ = static_cast<int>((size / kPointerSize) - 1);
top_ = bottom_ = 0;
overflowed_ = false;
}
void MarkCompactCollector::InitializeMarkingDeque() {
DCHECK(!marking_deque()->in_use());
DCHECK(marking_deque_memory_committed_ > 0);
Address addr = static_cast<Address>(marking_deque_memory_->address());
size_t size = marking_deque_memory_committed_;
if (FLAG_force_marking_deque_overflows) size = 64 * kPointerSize;
marking_deque()->Initialize(addr, addr + size);
void MarkingDeque::StopUsing() {
DCHECK(IsEmpty());
DCHECK(!overflowed_);
top_ = bottom_ = mask_ = 0;
Uncommit();
in_use_ = false;
}
void MarkingDeque::Initialize(Address low, Address high) {
DCHECK(!in_use_);
HeapObject** obj_low = reinterpret_cast<HeapObject**>(low);
HeapObject** obj_high = reinterpret_cast<HeapObject**>(high);
array_ = obj_low;
mask_ = base::bits::RoundDownToPowerOfTwo32(
static_cast<uint32_t>(obj_high - obj_low)) -
1;
void MarkingDeque::Clear() {
DCHECK(in_use_);
top_ = bottom_ = 0;
overflowed_ = false;
in_use_ = true;
}
void MarkingDeque::Uncommit() {
DCHECK(in_use_);
bool success = backing_store_->Uncommit(backing_store_->address(),
backing_store_committed_size_);
backing_store_committed_size_ = 0;
CHECK(success);
}
void MarkingDeque::EnsureCommitted() {
DCHECK(in_use_);
if (backing_store_committed_size_ > 0) return;
void MarkingDeque::Uninitialize(bool aborting) {
if (!aborting) {
DCHECK(IsEmpty());
DCHECK(!overflowed_);
for (size_t size = kMaxSize; size >= kMinSize; size /= 2) {
if (backing_store_->Commit(backing_store_->address(), size, false)) {
backing_store_committed_size_ = size;
break;
}
}
if (backing_store_committed_size_ == 0) {
V8::FatalProcessOutOfMemory("MarkingDeque::EnsureCommitted");
}
DCHECK(in_use_);
top_ = bottom_ = 0xdecbad;
in_use_ = false;
}
class MarkCompactCollector::ObjectStatsVisitor
......@@ -2260,11 +2246,7 @@ void MarkCompactCollector::MarkLiveObjects() {
if (was_marked_incrementally_) {
incremental_marking->Finalize();
} else {
// Abort any pending incremental activities e.g. incremental sweeping.
incremental_marking->Stop();
if (marking_deque()->in_use()) {
marking_deque()->Uninitialize(true);
}
CHECK(incremental_marking->IsStopped());
}
}
......@@ -2273,8 +2255,7 @@ void MarkCompactCollector::MarkLiveObjects() {
state_ = MARK_LIVE_OBJECTS;
#endif
EnsureMarkingDequeIsCommittedAndInitialize(
MarkCompactCollector::kMaxMarkingDequeSize);
marking_deque()->StartUsing();
{
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_PREPARE_CODE_FLUSH);
......
......@@ -53,15 +53,21 @@ class ObjectMarking : public AllStatic {
class MarkingDeque {
public:
MarkingDeque()
: array_(NULL),
: backing_store_(nullptr),
backing_store_committed_size_(0),
array_(nullptr),
top_(0),
bottom_(0),
mask_(0),
overflowed_(false),
in_use_(false) {}
void Initialize(Address low, Address high);
void Uninitialize(bool aborting = false);
void SetUp();
void TearDown();
void StartUsing();
void StopUsing();
void Clear();
inline bool IsFull() { return ((top_ + 1) & mask_) == bottom_; }
......@@ -69,8 +75,6 @@ class MarkingDeque {
bool overflowed() const { return overflowed_; }
bool in_use() const { return in_use_; }
void ClearOverflowed() { overflowed_ = false; }
void SetOverflowed() { overflowed_ = true; }
......@@ -118,6 +122,14 @@ class MarkingDeque {
void set_top(int top) { top_ = top; }
private:
static const size_t kMaxSize = 4 * MB;
static const size_t kMinSize = 256 * KB;
void EnsureCommitted();
void Uncommit();
base::VirtualMemory* backing_store_;
size_t backing_store_committed_size_;
HeapObject** array_;
// array_[(top - 1) & mask_] is the top element in the deque. The Deque is
// empty when top_ == bottom_. It is full when top_ + 1 == bottom
......@@ -470,21 +482,6 @@ class MarkCompactCollector {
MarkingDeque* marking_deque() { return &marking_deque_; }
static const size_t kMaxMarkingDequeSize = 4 * MB;
static const size_t kMinMarkingDequeSize = 256 * KB;
void EnsureMarkingDequeIsCommittedAndInitialize(size_t max_size) {
if (!marking_deque()->in_use()) {
EnsureMarkingDequeIsCommitted(max_size);
InitializeMarkingDeque();
}
}
void EnsureMarkingDequeIsCommitted(size_t max_size);
void EnsureMarkingDequeIsReserved();
void InitializeMarkingDeque();
Sweeper& sweeper() { return sweeper_; }
private:
......@@ -708,8 +705,6 @@ class MarkCompactCollector {
bool have_code_to_deoptimize_;
base::VirtualMemory* marking_deque_memory_;
size_t marking_deque_memory_committed_;
MarkingDeque marking_deque_;
CodeFlusher* code_flusher_;
......
......@@ -51,13 +51,9 @@ using v8::Just;
TEST(MarkingDeque) {
CcTest::InitializeVM();
int mem_size = 20 * kPointerSize;
byte* mem = NewArray<byte>(20*kPointerSize);
Address low = reinterpret_cast<Address>(mem);
Address high = low + mem_size;
MarkingDeque s;
s.Initialize(low, high);
s.SetUp();
s.StartUsing();
Address original_address = reinterpret_cast<Address>(&s);
Address current_address = original_address;
while (!s.IsFull()) {
......@@ -72,7 +68,8 @@ TEST(MarkingDeque) {
}
CHECK_EQ(original_address, current_address);
DeleteArray(mem);
s.StopUsing();
s.TearDown();
}
TEST(Promotion) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment