Commit 652c9522 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] MinorMC: Identify unmodified global handles on the fly

For the Scavenger we require a first pass over global handles for identifying
unmodified nodes because the Scavenger might have already written forwarding
pointers during scanning, making it hard to perform the proper checks.

The minor MC does not mutate the object graph during marking and can thus merge
this phase into the regular phase executed during marking roots.

Furthermore, moves processing into the parallel marking phase of the minor MC
collector.

Bug: chromium:720477, chromium:651354
Change-Id: Id33552124264e3ab0bdf34d22ac30c19c1522707
Reviewed-on: https://chromium-review.googlesource.com/509550
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#45461}
parent 661618f3
...@@ -647,6 +647,21 @@ void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(RootVisitor* v) { ...@@ -647,6 +647,21 @@ void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(RootVisitor* v) {
} }
} }
void GlobalHandles::IterateNewSpaceStrongAndDependentRootsAndIdentifyUnmodified(
RootVisitor* v, size_t start, size_t end) {
for (size_t i = start; i < end; ++i) {
Node* node = new_space_nodes_[static_cast<int>(i)];
if (node->IsWeak() && !JSObject::IsUnmodifiedApiObject(node->location())) {
node->set_active(true);
}
if (node->IsStrongRetainer() ||
(node->IsWeakRetainer() && !node->is_independent() &&
node->is_active())) {
v->VisitRootPointer(Root::kGlobalHandles, node->location());
}
}
}
void GlobalHandles::IdentifyWeakUnmodifiedObjects( void GlobalHandles::IdentifyWeakUnmodifiedObjects(
WeakSlotCallback is_unmodified) { WeakSlotCallback is_unmodified) {
for (int i = 0; i < new_space_nodes_.length(); ++i) { for (int i = 0; i < new_space_nodes_.length(); ++i) {
......
...@@ -92,6 +92,8 @@ class GlobalHandles { ...@@ -92,6 +92,8 @@ class GlobalHandles {
number_of_phantom_handle_resets_ = 0; number_of_phantom_handle_resets_ = 0;
} }
size_t NumberOfNewSpaceNodes() { return new_space_nodes_.length(); }
// Clear the weakness of a global handle. // Clear the weakness of a global handle.
static void* ClearWeakness(Object** location); static void* ClearWeakness(Object** location);
...@@ -142,9 +144,14 @@ class GlobalHandles { ...@@ -142,9 +144,14 @@ class GlobalHandles {
// guaranteed to contain all handles holding new space objects (but // guaranteed to contain all handles holding new space objects (but
// may also include old space objects). // may also include old space objects).
// Iterates over strong and dependent handles. See the node above. // Iterates over strong and dependent handles. See the note above.
void IterateNewSpaceStrongAndDependentRoots(RootVisitor* v); void IterateNewSpaceStrongAndDependentRoots(RootVisitor* v);
// Iterates over strong and dependent handles. See the note above.
// Also marks unmodified nodes in the same iteration.
void IterateNewSpaceStrongAndDependentRootsAndIdentifyUnmodified(
RootVisitor* v, size_t start, size_t end);
// Finds weak independent or unmodified handles satisfying // Finds weak independent or unmodified handles satisfying
// the callback predicate and marks them as pending. See the note above. // the callback predicate and marks them as pending. See the note above.
void MarkNewSpaceWeakUnmodifiedObjectsPending( void MarkNewSpaceWeakUnmodifiedObjectsPending(
......
...@@ -589,6 +589,7 @@ enum Executability { NOT_EXECUTABLE, EXECUTABLE }; ...@@ -589,6 +589,7 @@ enum Executability { NOT_EXECUTABLE, EXECUTABLE };
enum VisitMode { enum VisitMode {
VISIT_ALL, VISIT_ALL,
VISIT_ALL_IN_MINOR_MC_MARK,
VISIT_ALL_IN_MINOR_MC_UPDATE, VISIT_ALL_IN_MINOR_MC_UPDATE,
VISIT_ALL_IN_SCAVENGE, VISIT_ALL_IN_SCAVENGE,
VISIT_ALL_IN_SWEEP_NEWSPACE, VISIT_ALL_IN_SWEEP_NEWSPACE,
......
...@@ -528,7 +528,6 @@ void GCTracer::PrintNVP() const { ...@@ -528,7 +528,6 @@ void GCTracer::PrintNVP() const {
"minor_mc=%.2f " "minor_mc=%.2f "
"finish_sweeping=%.2f " "finish_sweeping=%.2f "
"mark=%.2f " "mark=%.2f "
"mark.identify_global_handles=%.2f "
"mark.seed=%.2f " "mark.seed=%.2f "
"mark.roots=%.2f " "mark.roots=%.2f "
"mark.weak=%.2f " "mark.weak=%.2f "
...@@ -549,7 +548,6 @@ void GCTracer::PrintNVP() const { ...@@ -549,7 +548,6 @@ void GCTracer::PrintNVP() const {
current_.scopes[Scope::MINOR_MC], current_.scopes[Scope::MINOR_MC],
current_.scopes[Scope::MINOR_MC_SWEEPING], current_.scopes[Scope::MINOR_MC_SWEEPING],
current_.scopes[Scope::MINOR_MC_MARK], current_.scopes[Scope::MINOR_MC_MARK],
current_.scopes[Scope::MINOR_MC_MARK_IDENTIFY_GLOBAL_HANDLES],
current_.scopes[Scope::MINOR_MC_MARK_SEED], current_.scopes[Scope::MINOR_MC_MARK_SEED],
current_.scopes[Scope::MINOR_MC_MARK_ROOTS], current_.scopes[Scope::MINOR_MC_MARK_ROOTS],
current_.scopes[Scope::MINOR_MC_MARK_WEAK], current_.scopes[Scope::MINOR_MC_MARK_WEAK],
......
...@@ -98,7 +98,6 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects }; ...@@ -98,7 +98,6 @@ enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects };
F(MINOR_MC_EVACUATE_UPDATE_POINTERS_WEAK) \ F(MINOR_MC_EVACUATE_UPDATE_POINTERS_WEAK) \
F(MINOR_MC_MARK) \ F(MINOR_MC_MARK) \
F(MINOR_MC_MARK_GLOBAL_HANDLES) \ F(MINOR_MC_MARK_GLOBAL_HANDLES) \
F(MINOR_MC_MARK_IDENTIFY_GLOBAL_HANDLES) \
F(MINOR_MC_MARK_SEED) \ F(MINOR_MC_MARK_SEED) \
F(MINOR_MC_MARK_ROOTS) \ F(MINOR_MC_MARK_ROOTS) \
F(MINOR_MC_MARK_WEAK) \ F(MINOR_MC_MARK_WEAK) \
......
...@@ -1727,7 +1727,7 @@ void Heap::Scavenge() { ...@@ -1727,7 +1727,7 @@ void Heap::Scavenge() {
RootScavengeVisitor root_scavenge_visitor(this); RootScavengeVisitor root_scavenge_visitor(this);
isolate()->global_handles()->IdentifyWeakUnmodifiedObjects( isolate()->global_handles()->IdentifyWeakUnmodifiedObjects(
&IsUnmodifiedHeapObject); &JSObject::IsUnmodifiedApiObject);
{ {
// Copy roots. // Copy roots.
...@@ -2954,21 +2954,6 @@ bool Heap::RootCanBeTreatedAsConstant(RootListIndex root_index) { ...@@ -2954,21 +2954,6 @@ bool Heap::RootCanBeTreatedAsConstant(RootListIndex root_index) {
!InNewSpace(root(root_index)); !InNewSpace(root(root_index));
} }
bool Heap::IsUnmodifiedHeapObject(Object** p) {
Object* object = *p;
if (object->IsSmi()) return false;
HeapObject* heap_object = HeapObject::cast(object);
if (!object->IsJSObject()) return false;
JSObject* js_object = JSObject::cast(object);
if (!js_object->WasConstructedFromApiFunction()) return false;
Object* maybe_constructor = js_object->map()->GetConstructor();
if (!maybe_constructor->IsJSFunction()) return false;
JSFunction* constructor = JSFunction::cast(maybe_constructor);
if (js_object->elements()->length() != 0) return false;
return constructor->initial_map() == heap_object->map();
}
int Heap::FullSizeNumberStringCacheLength() { int Heap::FullSizeNumberStringCacheLength() {
// Compute the size of the number string cache based on the max newspace size. // Compute the size of the number string cache based on the max newspace size.
// The number string cache has a minimum size based on twice the initial cache // The number string cache has a minimum size based on twice the initial cache
...@@ -5036,10 +5021,13 @@ void Heap::IterateRoots(RootVisitor* v, VisitMode mode) { ...@@ -5036,10 +5021,13 @@ void Heap::IterateRoots(RootVisitor* v, VisitMode mode) {
} }
void Heap::IterateWeakRoots(RootVisitor* v, VisitMode mode) { void Heap::IterateWeakRoots(RootVisitor* v, VisitMode mode) {
const bool isMinorGC = mode == VISIT_ALL_IN_SCAVENGE ||
mode == VISIT_ALL_IN_MINOR_MC_MARK ||
mode == VISIT_ALL_IN_MINOR_MC_UPDATE;
v->VisitRootPointer(Root::kStringTable, reinterpret_cast<Object**>( v->VisitRootPointer(Root::kStringTable, reinterpret_cast<Object**>(
&roots_[kStringTableRootIndex])); &roots_[kStringTableRootIndex]));
v->Synchronize(VisitorSynchronization::kStringTable); v->Synchronize(VisitorSynchronization::kStringTable);
if (mode != VISIT_ALL_IN_SCAVENGE && mode != VISIT_ALL_IN_SWEEP_NEWSPACE) { if (!isMinorGC && mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
// Scavenge collections have special processing for this. // Scavenge collections have special processing for this.
external_string_table_.IterateAll(v); external_string_table_.IterateAll(v);
} }
...@@ -5104,6 +5092,9 @@ class FixStaleLeftTrimmedHandlesVisitor : public RootVisitor { ...@@ -5104,6 +5092,9 @@ class FixStaleLeftTrimmedHandlesVisitor : public RootVisitor {
}; };
void Heap::IterateStrongRoots(RootVisitor* v, VisitMode mode) { void Heap::IterateStrongRoots(RootVisitor* v, VisitMode mode) {
const bool isMinorGC = mode == VISIT_ALL_IN_SCAVENGE ||
mode == VISIT_ALL_IN_MINOR_MC_MARK ||
mode == VISIT_ALL_IN_MINOR_MC_UPDATE;
v->VisitRootPointers(Root::kStrongRootList, &roots_[0], v->VisitRootPointers(Root::kStrongRootList, &roots_[0],
&roots_[kStrongRootListLength]); &roots_[kStrongRootListLength]);
v->Synchronize(VisitorSynchronization::kStrongRootList); v->Synchronize(VisitorSynchronization::kStrongRootList);
...@@ -5134,7 +5125,7 @@ void Heap::IterateStrongRoots(RootVisitor* v, VisitMode mode) { ...@@ -5134,7 +5125,7 @@ void Heap::IterateStrongRoots(RootVisitor* v, VisitMode mode) {
// Iterate over the builtin code objects and code stubs in the // Iterate over the builtin code objects and code stubs in the
// heap. Note that it is not necessary to iterate over code objects // heap. Note that it is not necessary to iterate over code objects
// on scavenge collections. // on scavenge collections.
if (mode != VISIT_ALL_IN_SCAVENGE && mode != VISIT_ALL_IN_MINOR_MC_UPDATE) { if (!isMinorGC) {
isolate_->builtins()->IterateBuiltins(v); isolate_->builtins()->IterateBuiltins(v);
v->Synchronize(VisitorSynchronization::kBuiltins); v->Synchronize(VisitorSynchronization::kBuiltins);
isolate_->interpreter()->IterateDispatchTable(v); isolate_->interpreter()->IterateDispatchTable(v);
...@@ -5154,6 +5145,9 @@ void Heap::IterateStrongRoots(RootVisitor* v, VisitMode mode) { ...@@ -5154,6 +5145,9 @@ void Heap::IterateStrongRoots(RootVisitor* v, VisitMode mode) {
case VISIT_ALL_IN_SCAVENGE: case VISIT_ALL_IN_SCAVENGE:
isolate_->global_handles()->IterateNewSpaceStrongAndDependentRoots(v); isolate_->global_handles()->IterateNewSpaceStrongAndDependentRoots(v);
break; break;
case VISIT_ALL_IN_MINOR_MC_MARK:
// Global handles are processed manually be the minor MC.
break;
case VISIT_ALL_IN_MINOR_MC_UPDATE: case VISIT_ALL_IN_MINOR_MC_UPDATE:
isolate_->global_handles()->IterateAllNewSpaceRoots(v); isolate_->global_handles()->IterateAllNewSpaceRoots(v);
break; break;
...@@ -5165,7 +5159,7 @@ void Heap::IterateStrongRoots(RootVisitor* v, VisitMode mode) { ...@@ -5165,7 +5159,7 @@ void Heap::IterateStrongRoots(RootVisitor* v, VisitMode mode) {
v->Synchronize(VisitorSynchronization::kGlobalHandles); v->Synchronize(VisitorSynchronization::kGlobalHandles);
// Iterate over eternal handles. // Iterate over eternal handles.
if (mode == VISIT_ALL_IN_SCAVENGE || mode == VISIT_ALL_IN_MINOR_MC_UPDATE) { if (isMinorGC) {
isolate_->eternal_handles()->IterateNewSpaceRoots(v); isolate_->eternal_handles()->IterateNewSpaceRoots(v);
} else { } else {
isolate_->eternal_handles()->IterateAllRoots(v); isolate_->eternal_handles()->IterateAllRoots(v);
......
...@@ -683,8 +683,6 @@ class Heap { ...@@ -683,8 +683,6 @@ class Heap {
// they are in new space. // they are in new space.
static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index); static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
static bool IsUnmodifiedHeapObject(Object** p);
// Zapping is needed for verify heap, and always done in debug builds. // Zapping is needed for verify heap, and always done in debug builds.
static inline bool ShouldZapGarbage() { static inline bool ShouldZapGarbage() {
#ifdef DEBUG #ifdef DEBUG
......
...@@ -2297,6 +2297,7 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor { ...@@ -2297,6 +2297,7 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor {
}; };
class MarkingItem; class MarkingItem;
class GlobalHandlesMarkingItem;
class PageMarkingItem; class PageMarkingItem;
class RootMarkingItem; class RootMarkingItem;
class YoungGenerationMarkingTask; class YoungGenerationMarkingTask;
...@@ -2435,6 +2436,47 @@ class PageMarkingItem : public MarkingItem { ...@@ -2435,6 +2436,47 @@ class PageMarkingItem : public MarkingItem {
MemoryChunk* chunk_; MemoryChunk* chunk_;
}; };
class GlobalHandlesMarkingItem : public MarkingItem {
public:
GlobalHandlesMarkingItem(GlobalHandles* global_handles, size_t start,
size_t end)
: global_handles_(global_handles), start_(start), end_(end) {}
virtual ~GlobalHandlesMarkingItem() {}
void Process(YoungGenerationMarkingTask* task) override {
GlobalHandlesRootMarkingVisitor visitor(task);
global_handles_
->IterateNewSpaceStrongAndDependentRootsAndIdentifyUnmodified(
&visitor, start_, end_);
}
private:
class GlobalHandlesRootMarkingVisitor : public RootVisitor {
public:
explicit GlobalHandlesRootMarkingVisitor(YoungGenerationMarkingTask* task)
: task_(task) {}
void VisitRootPointer(Root root, Object** p) override {
DCHECK(Root::kGlobalHandles == root);
task_->MarkObject(*p);
}
void VisitRootPointers(Root root, Object** start, Object** end) override {
DCHECK(Root::kGlobalHandles == root);
for (Object** p = start; p < end; p++) {
task_->MarkObject(*p);
}
}
private:
YoungGenerationMarkingTask* task_;
};
GlobalHandles* global_handles_;
size_t start_;
size_t end_;
};
// This root visitor walks all roots and creates items bundling objects that // This root visitor walks all roots and creates items bundling objects that
// are then processed later on. Slots have to be dereferenced as they could // are then processed later on. Slots have to be dereferenced as they could
// live on the native (C++) stack, which requires filtering out the indirection. // live on the native (C++) stack, which requires filtering out the indirection.
...@@ -2524,21 +2566,36 @@ static bool IsUnmarkedObjectForYoungGeneration(Heap* heap, Object** p) { ...@@ -2524,21 +2566,36 @@ static bool IsUnmarkedObjectForYoungGeneration(Heap* heap, Object** p) {
} }
void MinorMarkCompactCollector::MarkRootSetInParallel() { void MinorMarkCompactCollector::MarkRootSetInParallel() {
// Seed the root set (roots + old->new set).
ItemParallelJob job(isolate()->cancelable_task_manager(), ItemParallelJob job(isolate()->cancelable_task_manager(),
&page_parallel_job_semaphore_); &page_parallel_job_semaphore_);
// Seed the root set (roots + old->new set).
{ {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_SEED); TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_SEED);
// Create batches of roots.
RootMarkingVisitorSeedOnly root_seed_visitor(&job); RootMarkingVisitorSeedOnly root_seed_visitor(&job);
heap()->IterateRoots(&root_seed_visitor, VISIT_ALL_IN_SCAVENGE); heap()->IterateRoots(&root_seed_visitor, VISIT_ALL_IN_MINOR_MC_MARK);
// Create batches of global handles.
const size_t kGlobalHandlesBufferSize = 1000;
const size_t new_space_nodes =
isolate()->global_handles()->NumberOfNewSpaceNodes();
for (size_t start = 0; start < new_space_nodes;
start += kGlobalHandlesBufferSize) {
size_t end = start + kGlobalHandlesBufferSize;
if (end > new_space_nodes) end = new_space_nodes;
job.AddItem(new GlobalHandlesMarkingItem(isolate()->global_handles(),
start, end));
}
// Create items for each page.
RememberedSet<OLD_TO_NEW>::IterateMemoryChunks( RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(
heap(), [&job](MemoryChunk* chunk) { heap(), [&job](MemoryChunk* chunk) {
job.AddItem(new PageMarkingItem(chunk)); job.AddItem(new PageMarkingItem(chunk));
}); });
// Flush any remaining objects in the seeding visitor.
root_seed_visitor.FlushObjects(); root_seed_visitor.FlushObjects();
} }
// Add tasks and run in parallel.
{ {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_ROOTS); TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_ROOTS);
const int num_tasks = NumberOfMarkingTasks(); const int num_tasks = NumberOfMarkingTasks();
...@@ -2557,13 +2614,6 @@ void MinorMarkCompactCollector::MarkLiveObjects() { ...@@ -2557,13 +2614,6 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
RootMarkingVisitor root_visitor(this); RootMarkingVisitor root_visitor(this);
{
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MINOR_MC_MARK_IDENTIFY_GLOBAL_HANDLES);
isolate()->global_handles()->IdentifyWeakUnmodifiedObjects(
&Heap::IsUnmodifiedHeapObject);
}
MarkRootSetInParallel(); MarkRootSetInParallel();
// Mark rest on the main thread. // Mark rest on the main thread.
......
...@@ -2942,6 +2942,20 @@ void JSObject::PrintInstanceMigration(FILE* file, ...@@ -2942,6 +2942,20 @@ void JSObject::PrintInstanceMigration(FILE* file,
PrintF(file, "\n"); PrintF(file, "\n");
} }
bool JSObject::IsUnmodifiedApiObject(Object** o) {
Object* object = *o;
if (object->IsSmi()) return false;
HeapObject* heap_object = HeapObject::cast(object);
if (!object->IsJSObject()) return false;
JSObject* js_object = JSObject::cast(object);
if (!js_object->WasConstructedFromApiFunction()) return false;
Object* maybe_constructor = js_object->map()->GetConstructor();
if (!maybe_constructor->IsJSFunction()) return false;
JSFunction* constructor = JSFunction::cast(maybe_constructor);
if (js_object->elements()->length() != 0) return false;
return constructor->initial_map() == heap_object->map();
}
void HeapObject::HeapObjectShortPrint(std::ostream& os) { // NOLINT void HeapObject::HeapObjectShortPrint(std::ostream& os) { // NOLINT
Heap* heap = GetHeap(); Heap* heap = GetHeap();
......
...@@ -2113,6 +2113,8 @@ class JSReceiver: public HeapObject { ...@@ -2113,6 +2113,8 @@ class JSReceiver: public HeapObject {
// caching. // caching.
class JSObject: public JSReceiver { class JSObject: public JSReceiver {
public: public:
static bool IsUnmodifiedApiObject(Object** o);
static MUST_USE_RESULT MaybeHandle<JSObject> New( static MUST_USE_RESULT MaybeHandle<JSObject> New(
Handle<JSFunction> constructor, Handle<JSReceiver> new_target, Handle<JSFunction> constructor, Handle<JSReceiver> new_target,
Handle<AllocationSite> site = Handle<AllocationSite>::null()); Handle<AllocationSite> site = Handle<AllocationSite>::null());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment