Commit 17a0a575 authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Pause black allocation during GCs

There is no point in doing black allocation here as we then have to
iterate the objects for various reasons. The marker does the same work
but can be moved outside of the atomic pause.

BUG=chromium:581412

Review-Url: https://codereview.chromium.org/2862563002
Cr-Commit-Position: refs/heads/master@{#45063}
parent c3fda2b8
......@@ -38,23 +38,21 @@ HeapObject* AllocationResult::ToObjectChecked() {
return HeapObject::cast(object_);
}
void PromotionQueue::insert(HeapObject* target, int32_t size,
bool was_marked_black) {
void PromotionQueue::insert(HeapObject* target, int32_t size) {
if (emergency_stack_ != NULL) {
emergency_stack_->Add(Entry(target, size, was_marked_black));
emergency_stack_->Add(Entry(target, size));
return;
}
if ((rear_ - 1) < limit_) {
RelocateQueueHead();
emergency_stack_->Add(Entry(target, size, was_marked_black));
emergency_stack_->Add(Entry(target, size));
return;
}
struct Entry* entry = reinterpret_cast<struct Entry*>(--rear_);
entry->obj_ = target;
entry->size_ = size;
entry->was_marked_black_ = was_marked_black;
// Assert no overflow into live objects.
#ifdef DEBUG
......@@ -63,21 +61,18 @@ void PromotionQueue::insert(HeapObject* target, int32_t size,
#endif
}
void PromotionQueue::remove(HeapObject** target, int32_t* size,
bool* was_marked_black) {
void PromotionQueue::remove(HeapObject** target, int32_t* size) {
DCHECK(!is_empty());
if (front_ == rear_) {
Entry e = emergency_stack_->RemoveLast();
*target = e.obj_;
*size = e.size_;
*was_marked_black = e.was_marked_black_;
return;
}
struct Entry* entry = reinterpret_cast<struct Entry*>(--front_);
*target = entry->obj_;
*size = entry->size_;
*was_marked_black = entry->was_marked_black_;
// Assert no underflow.
SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
......
......@@ -1499,6 +1499,8 @@ void Heap::MinorMarkCompact() {
TRACE_GC(tracer(), GCTracer::Scope::MC_MINOR_MC);
AlwaysAllocateScope always_allocate(isolate());
PauseAllocationObserversScope pause_observers(this);
IncrementalMarking::PauseBlackAllocationScope pause_black_allocation(
incremental_marking());
minor_mark_compact_collector()->CollectGarbage();
......@@ -1677,6 +1679,9 @@ void Heap::Scavenge() {
// Pause the inline allocation steps.
PauseAllocationObserversScope pause_observers(this);
IncrementalMarking::PauseBlackAllocationScope pause_black_allocation(
incremental_marking());
mark_compact_collector()->sweeper().EnsureNewSpaceCompleted();
SetGCState(SCAVENGE);
......@@ -2007,8 +2012,7 @@ Address Heap::DoScavenge(Address new_space_front) {
while (!promotion_queue()->is_empty()) {
HeapObject* target;
int32_t size;
bool was_marked_black;
promotion_queue()->remove(&target, &size, &was_marked_black);
promotion_queue()->remove(&target, &size);
// Promoted object might be already partially visited
// during old space pointer iteration. Thus we search specifically
......@@ -2016,8 +2020,7 @@ Address Heap::DoScavenge(Address new_space_front) {
// to new space.
DCHECK(!target->IsMap());
IterateAndScavengePromotedObject(target, static_cast<int>(size),
was_marked_black);
IterateAndScavengePromotedObject(target, static_cast<int>(size));
}
}
......@@ -4834,8 +4837,7 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
bool record_slots_;
};
void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size,
bool was_marked_black) {
void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size) {
// We are not collecting slots on new space objects during mutation
// thus we have to scan for pointers to evacuation candidates when we
// promote objects. But we should not record any slots in non-black
......@@ -4856,18 +4858,6 @@ void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size,
} else {
target->IterateBody(target->map()->instance_type(), size, &visitor);
}
// When black allocations is on, we have to visit not already marked black
// objects (in new space) promoted to black pages to keep their references
// alive.
// TODO(hpayer): Implement a special promotion visitor that incorporates
// regular visiting and IteratePromotedObjectPointers.
if (!was_marked_black) {
if (incremental_marking()->black_allocation()) {
incremental_marking()->MarkGrey(target->map());
incremental_marking()->IterateBlackObject(target);
}
}
}
void Heap::IterateRoots(RootVisitor* v, VisitMode mode) {
......
......@@ -434,9 +434,8 @@ class PromotionQueue {
inline void SetNewLimit(Address limit);
inline bool IsBelowPromotionQueue(Address to_space_top);
inline void insert(HeapObject* target, int32_t size, bool was_marked_black);
inline void remove(HeapObject** target, int32_t* size,
bool* was_marked_black);
inline void insert(HeapObject* target, int32_t size);
inline void remove(HeapObject** target, int32_t* size);
bool is_empty() {
return (front_ == rear_) &&
......@@ -445,12 +444,10 @@ class PromotionQueue {
private:
struct Entry {
Entry(HeapObject* obj, int32_t size, bool was_marked_black)
: obj_(obj), size_(size), was_marked_black_(was_marked_black) {}
Entry(HeapObject* obj, int32_t size) : obj_(obj), size_(size) {}
HeapObject* obj_;
int32_t size_ : 31;
bool was_marked_black_ : 1;
int32_t size_;
};
inline Page* GetHeadPage();
......@@ -1215,8 +1212,7 @@ class Heap {
void IterateWeakRoots(RootVisitor* v, VisitMode mode);
// Iterate pointers of promoted objects.
void IterateAndScavengePromotedObject(HeapObject* target, int size,
bool was_marked_black);
void IterateAndScavengePromotedObject(HeapObject* target, int size);
// ===========================================================================
// Store buffer API. =========================================================
......
......@@ -573,6 +573,19 @@ void IncrementalMarking::StartBlackAllocation() {
}
}
void IncrementalMarking::PauseBlackAllocation() {
DCHECK(FLAG_black_allocation);
DCHECK(IsMarking());
heap()->old_space()->UnmarkAllocationInfo();
heap()->map_space()->UnmarkAllocationInfo();
heap()->code_space()->UnmarkAllocationInfo();
if (FLAG_trace_incremental_marking) {
heap()->isolate()->PrintWithTimestamp(
"[IncrementalMarking] Black allocation paused\n");
}
black_allocation_ = false;
}
void IncrementalMarking::FinishBlackAllocation() {
if (black_allocation_) {
black_allocation_ = false;
......
......@@ -32,6 +32,27 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
enum GCRequestType { NONE, COMPLETE_MARKING, FINALIZATION };
class PauseBlackAllocationScope {
public:
explicit PauseBlackAllocationScope(IncrementalMarking* marking)
: marking_(marking), paused_(false) {
if (marking_->black_allocation()) {
paused_ = true;
marking_->PauseBlackAllocation();
}
}
~PauseBlackAllocationScope() {
if (paused_) {
marking_->StartBlackAllocation();
}
}
private:
IncrementalMarking* marking_;
bool paused_;
};
static void Initialize();
explicit IncrementalMarking(Heap* heap);
......@@ -254,6 +275,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
void StartMarking();
void StartBlackAllocation();
void PauseBlackAllocation();
void FinishBlackAllocation();
void MarkRoots();
......
......@@ -191,6 +191,8 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject* target = NULL; // Initialization to please compiler.
if (allocation.To(&target)) {
DCHECK(ObjectMarking::IsWhite(
target, heap->mark_compact_collector()->marking_state(target)));
MigrateObject(heap, object, target, object_size);
// Update slot to new target using CAS. A concurrent sweeper thread my
......@@ -201,10 +203,7 @@ class ScavengingVisitor : public StaticVisitorBase {
reinterpret_cast<base::AtomicWord>(target));
if (object_contents == POINTER_OBJECT) {
// TODO(mlippautz): Query collector for marking state.
heap->promotion_queue()->insert(
target, object_size,
ObjectMarking::IsBlack(object, MarkingState::Internal(object)));
heap->promotion_queue()->insert(target, object_size);
}
heap->IncrementPromotedObjectsSize(object_size);
return true;
......
......@@ -849,6 +849,17 @@ void Page::CreateBlackArea(Address start, Address end) {
static_cast<int>(end - start));
}
void Page::DestroyBlackArea(Address start, Address end) {
DCHECK(heap()->incremental_marking()->black_allocation());
DCHECK_EQ(Page::FromAddress(start), this);
DCHECK_NE(start, end);
DCHECK_EQ(Page::FromAddress(end - 1), this);
MarkingState::Internal(this).bitmap()->ClearRange(
AddressToMarkbitIndex(start), AddressToMarkbitIndex(end));
MarkingState::Internal(this).IncrementLiveBytes(
-static_cast<int>(end - start));
}
void MemoryAllocator::PartialFreeMemory(MemoryChunk* chunk,
Address start_free) {
// We do not allow partial shrink for code.
......@@ -1413,6 +1424,15 @@ void PagedSpace::MarkAllocationInfoBlack() {
}
}
void PagedSpace::UnmarkAllocationInfo() {
Address current_top = top();
Address current_limit = limit();
if (current_top != nullptr && current_top != current_limit) {
Page::FromAllocationAreaAddress(current_top)
->DestroyBlackArea(current_top, current_limit);
}
}
// Empty space allocation info, returning unused area to free list.
void PagedSpace::EmptyAllocationInfo() {
// Mark the old linear allocation area with a free space map so it can be
......
......@@ -820,6 +820,7 @@ class Page : public MemoryChunk {
size_t ShrinkToHighWaterMark();
V8_EXPORT_PRIVATE void CreateBlackArea(Address start, Address end);
void DestroyBlackArea(Address start, Address end);
#ifdef DEBUG
void Print();
......@@ -2076,6 +2077,7 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) {
void EmptyAllocationInfo();
void MarkAllocationInfoBlack();
void UnmarkAllocationInfo();
void AccountAllocatedBytes(size_t bytes) {
accounting_stats_.AllocateBytes(bytes);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment