Commit 3d68306c authored by ulan's avatar ulan Committed by Commit bot

[heap] Simplify clearing of normalized map caches.

Currently the incremental marking visitor treats elements of normalized
map caches weakly by coloring the caches grey without pusing to marking
deque.

The mark-compact prologue then clears all normalized map caches.

We can achieve similar effect by just clearing the caches in the marking
visitor.

BUG=chromium:694255

Review-Url: https://codereview.chromium.org/2745183002
Cr-Commit-Position: refs/heads/master@{#43941}
parent dc789377
......@@ -1254,26 +1254,6 @@ void Heap::EnsureFromSpaceIsCommitted() {
}
void Heap::ClearNormalizedMapCaches() {
if (isolate_->bootstrapper()->IsActive() &&
!incremental_marking()->IsMarking()) {
return;
}
Object* context = native_contexts_list();
while (!context->IsUndefined(isolate())) {
// GC can happen when the context is not fully initialized,
// so the cache can be undefined.
Object* cache =
Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX);
if (!cache->IsUndefined(isolate())) {
NormalizedMapCache::cast(cache)->Clear();
}
context = Context::cast(context)->next_context_link();
}
}
void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
if (start_new_space_size == 0) return;
......@@ -1515,7 +1495,6 @@ void Heap::MarkCompactPrologue() {
CompletelyClearInstanceofCache();
FlushNumberStringCache();
ClearNormalizedMapCaches();
}
......
......@@ -861,8 +861,6 @@ class Heap {
// scavenge operation.
inline bool ShouldBePromoted(Address old_address, int object_size);
void ClearNormalizedMapCaches();
void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
// Completely clear the Instanceof cache (to stop it keeping objects alive
......
......@@ -153,7 +153,6 @@ class IncrementalMarkingMarkingVisitor
static void Initialize() {
StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental);
table_.Register(kVisitNativeContext, &VisitNativeContextIncremental);
}
static const int kProgressBarScanningChunk = 32 * 1024;
......@@ -199,26 +198,6 @@ class IncrementalMarkingMarkingVisitor
}
}
static void VisitNativeContextIncremental(Map* map, HeapObject* object) {
Context* context = Context::cast(object);
// We will mark cache black with a separate pass when we finish marking.
// Note that GC can happen when the context is not fully initialized,
// so the cache can be undefined.
Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX);
if (!cache->IsUndefined(map->GetIsolate())) {
if (cache->IsHeapObject()) {
HeapObject* heap_obj = HeapObject::cast(cache);
// Mark the object grey if it is white, do not enque it into the marking
// deque.
if (ObjectMarking::IsWhite(heap_obj)) {
ObjectMarking::WhiteToGrey(heap_obj);
}
}
}
VisitNativeContext(map, context);
}
INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) {
Object* target = *p;
if (target->IsHeapObject()) {
......
......@@ -295,6 +295,13 @@ void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray(
template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
Map* map, HeapObject* object) {
// GC can happen when the context is not fully initialized,
// so the cache can be undefined.
Object* cache =
Context::cast(object)->get(Context::NORMALIZED_MAP_CACHE_INDEX);
if (cache->IsNormalizedMapCache()) {
NormalizedMapCache::cast(cache)->Clear();
}
FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
void>::Visit(map, object);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment