Commit f3817e9b authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] Scavenger: Cache compacting property

Bug: chromium:738865
Change-Id: I02cb7ea48a1dfaec25bf702b09242d537fe612f4
Reviewed-on: https://chromium-review.googlesource.com/589271Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46939}
parent 8bc526bc
......@@ -1920,14 +1920,13 @@ void Heap::Scavenge() {
const int kMainThreadId = 0;
Scavenger* scavengers[kMaxScavengerTasks];
const bool is_logging = IsLogging(isolate());
const bool is_incremental_marking = incremental_marking()->IsMarking();
const int num_scavenge_tasks = NumberOfScavengeTasks();
Scavenger::Barrier barrier;
CopiedList copied_list(num_scavenge_tasks);
PromotionList promotion_list(num_scavenge_tasks);
for (int i = 0; i < num_scavenge_tasks; i++) {
scavengers[i] = new Scavenger(this, is_logging, is_incremental_marking,
&copied_list, &promotion_list, i);
scavengers[i] =
new Scavenger(this, is_logging, &copied_list, &promotion_list, i);
job.AddTask(new ScavengingTask(this, scavengers[i], &barrier));
}
......
......@@ -58,6 +58,19 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
const bool record_slots_;
};
Scavenger::Scavenger(Heap* heap, bool is_logging, CopiedList* copied_list,
PromotionList* promotion_list, int task_id)
: heap_(heap),
promotion_list_(promotion_list, task_id),
copied_list_(copied_list, task_id),
local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity),
copied_size_(0),
promoted_size_(0),
allocator_(heap),
is_logging_(is_logging),
is_incremental_marking_(heap->incremental_marking()->IsMarking()),
is_compacting_(heap->incremental_marking()->IsCompacting()) {}
void Scavenger::IterateAndScavengePromotedObject(HeapObject* target, int size) {
// We are not collecting slots on new space objects during mutation
// thus we have to scan for pointers to evacuation candidates when we
......@@ -65,9 +78,9 @@ void Scavenger::IterateAndScavengePromotedObject(HeapObject* target, int size) {
// objects. Grey object's slots would be rescanned.
// White object might not survive until the end of collection
// it would be a violation of the invariant to record it's slots.
const bool record_slots = heap()->incremental_marking()->IsCompacting() &&
ObjectMarking::IsBlack<AccessMode::ATOMIC>(
target, MarkingState::Internal(target));
const bool record_slots =
is_compacting_ && ObjectMarking::IsBlack<AccessMode::ATOMIC>(
target, MarkingState::Internal(target));
IterateAndScavengePromotedObjectsVisitor visitor(heap(), this, record_slots);
if (target->IsJSFunction()) {
// JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for
......
......@@ -63,17 +63,8 @@ class Scavenger {
bool done_;
};
Scavenger(Heap* heap, bool is_logging, bool is_incremental_marking,
CopiedList* copied_list, PromotionList* promotion_list, int task_id)
: heap_(heap),
promotion_list_(promotion_list, task_id),
copied_list_(copied_list, task_id),
local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity),
copied_size_(0),
promoted_size_(0),
allocator_(heap),
is_logging_(is_logging),
is_incremental_marking_(is_incremental_marking) {}
Scavenger(Heap* heap, bool is_logging, CopiedList* copied_list,
PromotionList* promotion_list, int task_id);
// Scavenges an object |object| referenced from slot |p|. |object| is required
// to be in from space.
......@@ -142,8 +133,9 @@ class Scavenger {
size_t copied_size_;
size_t promoted_size_;
LocalAllocator allocator_;
bool is_logging_;
bool is_incremental_marking_;
const bool is_logging_;
const bool is_incremental_marking_;
const bool is_compacting_;
friend class IterateAndScavengePromotedObjectsVisitor;
};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment