Commit 1a7a6057 authored by ulan's avatar ulan Committed by Commit bot

Revert of [heap] Simplify clearing of normalized map caches. (patchset #1 id:1...

Revert of [heap] Simplify clearing of normalized map caches. (patchset #1 id:1 of https://codereview.chromium.org/2745183002/ )

Reason for revert:
https://bugs.chromium.org/p/v8/issues/detail?id=6135

Original issue's description:
> [heap] Simplify clearing of normalized map caches.
>
> Currently the incremental marking visitor treats elements of normalized
> map caches weakly by coloring the caches grey without pusing to marking
> deque.
>
> The mark-compact prologue then clears all normalized map caches.
>
> We can achieve similar effect by just clearing the caches in the marking
> visitor.
>
> BUG=chromium:694255
>
> Review-Url: https://codereview.chromium.org/2745183002
> Cr-Commit-Position: refs/heads/master@{#43941}
> Committed: https://chromium.googlesource.com/v8/v8/+/3d68306c71b17ebcb306b4e2ed8cae110c52229c

TBR=hpayer@chromium.org,verwaest@chromium.org
# Not skipping CQ checks because original CL landed more than 1 days ago.
BUG=chromium:694255

Review-Url: https://codereview.chromium.org/2771703003
Cr-Commit-Position: refs/heads/master@{#44056}
parent 8aa3459f
...@@ -1255,6 +1255,26 @@ void Heap::EnsureFromSpaceIsCommitted() { ...@@ -1255,6 +1255,26 @@ void Heap::EnsureFromSpaceIsCommitted() {
} }
void Heap::ClearNormalizedMapCaches() {
if (isolate_->bootstrapper()->IsActive() &&
!incremental_marking()->IsMarking()) {
return;
}
Object* context = native_contexts_list();
while (!context->IsUndefined(isolate())) {
// GC can happen when the context is not fully initialized,
// so the cache can be undefined.
Object* cache =
Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX);
if (!cache->IsUndefined(isolate())) {
NormalizedMapCache::cast(cache)->Clear();
}
context = Context::cast(context)->next_context_link();
}
}
void Heap::UpdateSurvivalStatistics(int start_new_space_size) { void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
if (start_new_space_size == 0) return; if (start_new_space_size == 0) return;
...@@ -1510,6 +1530,7 @@ void Heap::MarkCompactPrologue() { ...@@ -1510,6 +1530,7 @@ void Heap::MarkCompactPrologue() {
CompletelyClearInstanceofCache(); CompletelyClearInstanceofCache();
FlushNumberStringCache(); FlushNumberStringCache();
ClearNormalizedMapCaches();
} }
......
...@@ -862,6 +862,8 @@ class Heap { ...@@ -862,6 +862,8 @@ class Heap {
// scavenge operation. // scavenge operation.
inline bool ShouldBePromoted(Address old_address, int object_size); inline bool ShouldBePromoted(Address old_address, int object_size);
void ClearNormalizedMapCaches();
void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature); void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
// Completely clear the Instanceof cache (to stop it keeping objects alive // Completely clear the Instanceof cache (to stop it keeping objects alive
......
...@@ -153,6 +153,7 @@ class IncrementalMarkingMarkingVisitor ...@@ -153,6 +153,7 @@ class IncrementalMarkingMarkingVisitor
static void Initialize() { static void Initialize() {
StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize(); StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental); table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental);
table_.Register(kVisitNativeContext, &VisitNativeContextIncremental);
} }
static const int kProgressBarScanningChunk = 32 * 1024; static const int kProgressBarScanningChunk = 32 * 1024;
...@@ -198,6 +199,26 @@ class IncrementalMarkingMarkingVisitor ...@@ -198,6 +199,26 @@ class IncrementalMarkingMarkingVisitor
} }
} }
static void VisitNativeContextIncremental(Map* map, HeapObject* object) {
Context* context = Context::cast(object);
// We will mark cache black with a separate pass when we finish marking.
// Note that GC can happen when the context is not fully initialized,
// so the cache can be undefined.
Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX);
if (!cache->IsUndefined(map->GetIsolate())) {
if (cache->IsHeapObject()) {
HeapObject* heap_obj = HeapObject::cast(cache);
// Mark the object grey if it is white, do not enque it into the marking
// deque.
if (ObjectMarking::IsWhite(heap_obj)) {
ObjectMarking::WhiteToGrey(heap_obj);
}
}
}
VisitNativeContext(map, context);
}
INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) { INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) {
Object* target = *p; Object* target = *p;
if (target->IsHeapObject()) { if (target->IsHeapObject()) {
......
...@@ -295,13 +295,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray( ...@@ -295,13 +295,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray(
template <typename StaticVisitor> template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext( void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
Map* map, HeapObject* object) { Map* map, HeapObject* object) {
// GC can happen when the context is not fully initialized,
// so the cache can be undefined.
Object* cache =
Context::cast(object)->get(Context::NORMALIZED_MAP_CACHE_INDEX);
if (cache->IsNormalizedMapCache()) {
NormalizedMapCache::cast(cache)->Clear();
}
FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor, FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
void>::Visit(map, object); void>::Visit(map, object);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment