Commit 71a3cc54 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

Revert "[heap] Use weak cell in normalized map cache."

This reverts commit f3c8da56.

Reason for revert: GC stress failures
https://build.chromium.org/p/client.v8/builders/V8%20Linux64%20GC%20Stress%20-%20custom%20snapshot/builds/15396

Original change's description:
> [heap] Use weak cell in normalized map cache.
> 
> This replaces ad-hoc weakness in normalized map cache with weak cell.
> 
> Bug: chromium:694255
> Change-Id: I6a12301b2176fe3723b56178a65582cfb412f7d2
> Reviewed-on: https://chromium-review.googlesource.com/704834
> Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
> Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#48344}

TBR=ulan@chromium.org,mlippautz@chromium.org

Change-Id: I0b2d39a1dcff6416998ab36506ee950220c87e89
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: chromium:694255
Reviewed-on: https://chromium-review.googlesource.com/705194Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48349}
parent 34de39bf
......@@ -203,11 +203,15 @@ class ConcurrentMarkingVisitor final
}
int VisitNativeContext(Map* map, Context* object) {
if (!ShouldVisit(object)) return 0;
int size = Context::BodyDescriptorWeak::SizeOf(map, object);
VisitMapPointer(object, object->map_slot());
Context::BodyDescriptorWeak::IterateBody(object, size, this);
return size;
if (marking_state_.IsGrey(object)) {
int size = Context::BodyDescriptorWeak::SizeOf(map, object);
VisitMapPointer(object, object->map_slot());
Context::BodyDescriptorWeak::IterateBody(object, size, this);
// TODO(ulan): implement proper weakness for normalized map cache
// and remove this bailout.
bailout_.Push(object);
}
return 0;
}
int VisitTransitionArray(Map* map, TransitionArray* array) {
......
......@@ -1473,6 +1473,26 @@ void Heap::EnsureFromSpaceIsCommitted() {
}
void Heap::ClearNormalizedMapCaches() {
if (isolate_->bootstrapper()->IsActive() &&
!incremental_marking()->IsMarking()) {
return;
}
Object* context = native_contexts_list();
while (!context->IsUndefined(isolate())) {
// GC can happen when the context is not fully initialized,
// so the cache can be undefined.
Object* cache =
Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX);
if (!cache->IsUndefined(isolate())) {
NormalizedMapCache::cast(cache)->Clear();
}
context = Context::cast(context)->next_context_link();
}
}
void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
if (start_new_space_size == 0) return;
......@@ -1713,6 +1733,7 @@ void Heap::MarkCompactPrologue() {
isolate_->compilation_cache()->MarkCompactPrologue();
FlushNumberStringCache();
ClearNormalizedMapCaches();
}
......
......@@ -813,6 +813,8 @@ class Heap {
// scavenge operation.
inline bool ShouldBePromoted(Address old_address);
void ClearNormalizedMapCaches();
void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
inline uint32_t HashSeed();
......
......@@ -244,6 +244,22 @@ class IncrementalMarkingMarkingVisitor final
return object_size;
}
V8_INLINE int VisitNativeContext(Map* map, Context* context) {
// We will mark cache black with a separate pass when we finish marking.
// Note that GC can happen when the context is not fully initialized,
// so the cache can be undefined.
Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX);
if (!cache->IsUndefined(map->GetIsolate())) {
if (cache->IsHeapObject()) {
HeapObject* heap_obj = HeapObject::cast(cache);
// Mark the object grey if it is white, do not enque it into the marking
// deque.
incremental_marking_->marking_state()->WhiteToGrey(heap_obj);
}
}
return Parent::VisitNativeContext(map, context);
}
V8_INLINE void VisitPointer(HeapObject* host, Object** p) final {
Object* target = *p;
if (target->IsHeapObject()) {
......@@ -882,6 +898,18 @@ void IncrementalMarking::Hurry() {
}
}
}
Object* context = heap_->native_contexts_list();
while (!context->IsUndefined(heap_->isolate())) {
// GC can happen when the context is not fully initialized,
// so the cache can be undefined.
HeapObject* cache = HeapObject::cast(
Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
if (!cache->IsUndefined(heap_->isolate())) {
marking_state()->GreyToBlack(cache);
}
context = Context::cast(context)->next_context_link();
}
}
......
......@@ -1402,10 +1402,8 @@ void NormalizedMapCache::NormalizedMapCacheVerify() {
Isolate* isolate = GetIsolate();
for (int i = 0; i < length(); i++) {
Object* e = FixedArray::get(i);
if (e->IsWeakCell()) {
if (!WeakCell::cast(e)->cleared()) {
Map::cast(WeakCell::cast(e)->value())->DictionaryMapVerify();
}
if (e->IsMap()) {
Map::cast(e)->DictionaryMapVerify();
} else {
CHECK(e->IsUndefined(isolate));
}
......
......@@ -6108,23 +6108,19 @@ MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map,
PropertyNormalizationMode mode) {
DisallowHeapAllocation no_gc;
Object* value = FixedArray::get(GetIndex(fast_map));
if (!value->IsWeakCell() || WeakCell::cast(value)->cleared()) {
if (!value->IsMap() ||
!Map::cast(value)->EquivalentToForNormalization(*fast_map, mode)) {
return MaybeHandle<Map>();
}
Map* normalized_map = Map::cast(WeakCell::cast(value)->value());
if (!normalized_map->EquivalentToForNormalization(*fast_map, mode)) {
return MaybeHandle<Map>();
}
return handle(normalized_map);
return handle(Map::cast(value));
}
void NormalizedMapCache::Set(Handle<Map> fast_map, Handle<Map> normalized_map,
Handle<WeakCell> normalized_map_weak_cell) {
void NormalizedMapCache::Set(Handle<Map> fast_map,
Handle<Map> normalized_map) {
DisallowHeapAllocation no_gc;
DCHECK(normalized_map->is_dictionary_map());
DCHECK_EQ(normalized_map_weak_cell->value(), *normalized_map);
FixedArray::set(GetIndex(fast_map), *normalized_map_weak_cell);
FixedArray::set(GetIndex(fast_map), *normalized_map);
}
......@@ -9031,8 +9027,7 @@ Handle<Map> Map::Normalize(Handle<Map> fast_map, PropertyNormalizationMode mode,
} else {
new_map = Map::CopyNormalized(fast_map, mode);
if (use_cache) {
Handle<WeakCell> cell = Map::WeakCellForMap(new_map);
cache->Set(fast_map, new_map, cell);
cache->Set(fast_map, new_map);
isolate->counters()->maps_normalized()->Increment();
}
#if V8_TRACE_MAPS
......
......@@ -917,8 +917,7 @@ class NormalizedMapCache : public FixedArray {
MUST_USE_RESULT MaybeHandle<Map> Get(Handle<Map> fast_map,
PropertyNormalizationMode mode);
void Set(Handle<Map> fast_map, Handle<Map> normalized_map,
Handle<WeakCell> normalized_map_weak_cell);
void Set(Handle<Map> fast_map, Handle<Map> normalized_map);
void Clear();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment