Commit f3c8da56 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Use weak cell in normalized map cache.

This replaces ad-hoc weakness in normalized map cache with weak cell.

Bug: chromium:694255
Change-Id: I6a12301b2176fe3723b56178a65582cfb412f7d2
Reviewed-on: https://chromium-review.googlesource.com/704834
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48344}
parent 8e456031
......@@ -203,15 +203,11 @@ class ConcurrentMarkingVisitor final
}
int VisitNativeContext(Map* map, Context* object) {
if (marking_state_.IsGrey(object)) {
int size = Context::BodyDescriptorWeak::SizeOf(map, object);
VisitMapPointer(object, object->map_slot());
Context::BodyDescriptorWeak::IterateBody(object, size, this);
// TODO(ulan): implement proper weakness for normalized map cache
// and remove this bailout.
bailout_.Push(object);
}
return 0;
if (!ShouldVisit(object)) return 0;
int size = Context::BodyDescriptorWeak::SizeOf(map, object);
VisitMapPointer(object, object->map_slot());
Context::BodyDescriptorWeak::IterateBody(object, size, this);
return size;
}
int VisitTransitionArray(Map* map, TransitionArray* array) {
......
......@@ -1473,26 +1473,6 @@ void Heap::EnsureFromSpaceIsCommitted() {
}
void Heap::ClearNormalizedMapCaches() {
if (isolate_->bootstrapper()->IsActive() &&
!incremental_marking()->IsMarking()) {
return;
}
Object* context = native_contexts_list();
while (!context->IsUndefined(isolate())) {
// GC can happen when the context is not fully initialized,
// so the cache can be undefined.
Object* cache =
Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX);
if (!cache->IsUndefined(isolate())) {
NormalizedMapCache::cast(cache)->Clear();
}
context = Context::cast(context)->next_context_link();
}
}
void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
if (start_new_space_size == 0) return;
......@@ -1733,7 +1713,6 @@ void Heap::MarkCompactPrologue() {
isolate_->compilation_cache()->MarkCompactPrologue();
FlushNumberStringCache();
ClearNormalizedMapCaches();
}
......
......@@ -812,8 +812,6 @@ class Heap {
// scavenge operation.
inline bool ShouldBePromoted(Address old_address);
void ClearNormalizedMapCaches();
void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
inline uint32_t HashSeed();
......
......@@ -244,22 +244,6 @@ class IncrementalMarkingMarkingVisitor final
return object_size;
}
V8_INLINE int VisitNativeContext(Map* map, Context* context) {
// We will mark cache black with a separate pass when we finish marking.
// Note that GC can happen when the context is not fully initialized,
// so the cache can be undefined.
Object* cache = context->get(Context::NORMALIZED_MAP_CACHE_INDEX);
if (!cache->IsUndefined(map->GetIsolate())) {
if (cache->IsHeapObject()) {
HeapObject* heap_obj = HeapObject::cast(cache);
// Mark the object grey if it is white, do not enque it into the marking
// deque.
incremental_marking_->marking_state()->WhiteToGrey(heap_obj);
}
}
return Parent::VisitNativeContext(map, context);
}
V8_INLINE void VisitPointer(HeapObject* host, Object** p) final {
Object* target = *p;
if (target->IsHeapObject()) {
......@@ -898,18 +882,6 @@ void IncrementalMarking::Hurry() {
}
}
}
Object* context = heap_->native_contexts_list();
while (!context->IsUndefined(heap_->isolate())) {
// GC can happen when the context is not fully initialized,
// so the cache can be undefined.
HeapObject* cache = HeapObject::cast(
Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
if (!cache->IsUndefined(heap_->isolate())) {
marking_state()->GreyToBlack(cache);
}
context = Context::cast(context)->next_context_link();
}
}
......
......@@ -1402,8 +1402,10 @@ void NormalizedMapCache::NormalizedMapCacheVerify() {
Isolate* isolate = GetIsolate();
for (int i = 0; i < length(); i++) {
Object* e = FixedArray::get(i);
if (e->IsMap()) {
Map::cast(e)->DictionaryMapVerify();
if (e->IsWeakCell()) {
if (!WeakCell::cast(e)->cleared()) {
Map::cast(WeakCell::cast(e)->value())->DictionaryMapVerify();
}
} else {
CHECK(e->IsUndefined(isolate));
}
......
......@@ -6108,19 +6108,23 @@ MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map,
PropertyNormalizationMode mode) {
DisallowHeapAllocation no_gc;
Object* value = FixedArray::get(GetIndex(fast_map));
if (!value->IsMap() ||
!Map::cast(value)->EquivalentToForNormalization(*fast_map, mode)) {
if (!value->IsWeakCell() || WeakCell::cast(value)->cleared()) {
return MaybeHandle<Map>();
}
return handle(Map::cast(value));
}
Map* normalized_map = Map::cast(WeakCell::cast(value)->value());
if (!normalized_map->EquivalentToForNormalization(*fast_map, mode)) {
return MaybeHandle<Map>();
}
return handle(normalized_map);
}
void NormalizedMapCache::Set(Handle<Map> fast_map,
Handle<Map> normalized_map) {
void NormalizedMapCache::Set(Handle<Map> fast_map, Handle<Map> normalized_map,
Handle<WeakCell> normalized_map_weak_cell) {
DisallowHeapAllocation no_gc;
DCHECK(normalized_map->is_dictionary_map());
FixedArray::set(GetIndex(fast_map), *normalized_map);
DCHECK_EQ(normalized_map_weak_cell->value(), *normalized_map);
FixedArray::set(GetIndex(fast_map), *normalized_map_weak_cell);
}
......@@ -9027,7 +9031,8 @@ Handle<Map> Map::Normalize(Handle<Map> fast_map, PropertyNormalizationMode mode,
} else {
new_map = Map::CopyNormalized(fast_map, mode);
if (use_cache) {
cache->Set(fast_map, new_map);
Handle<WeakCell> cell = Map::WeakCellForMap(new_map);
cache->Set(fast_map, new_map, cell);
isolate->counters()->maps_normalized()->Increment();
}
#if V8_TRACE_MAPS
......
......@@ -917,7 +917,8 @@ class NormalizedMapCache : public FixedArray {
MUST_USE_RESULT MaybeHandle<Map> Get(Handle<Map> fast_map,
PropertyNormalizationMode mode);
void Set(Handle<Map> fast_map, Handle<Map> normalized_map);
void Set(Handle<Map> fast_map, Handle<Map> normalized_map,
Handle<WeakCell> normalized_map_weak_cell);
void Clear();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment