Commit 9991fbd2 authored by Marja Hölttä's avatar Marja Hölttä Committed by Commit Bot

[in-place weak refs] Make NormalizedMapCache use in-place weak references.

Now we can finally get rid of Map::weak_cell_cache!

BUG=v8:7308

Change-Id: I87a06509bf638bf6833ea2ba1eca525fb4b15df1
Reviewed-on: https://chromium-review.googlesource.com/1128882
Commit-Queue: Marja Hölttä <marja@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#54511}
parent 6cda092e
......@@ -310,7 +310,6 @@ class ConcurrentMarkingVisitor final
VisitPointer(map, HeapObject::RawMaybeWeakField(
map, Map::kTransitionsOrPrototypeInfoOffset));
VisitPointer(map, HeapObject::RawField(map, Map::kDependentCodeOffset));
VisitPointer(map, HeapObject::RawField(map, Map::kWeakCellCacheOffset));
bailout_.Push(map);
}
return 0;
......
......@@ -1852,7 +1852,6 @@ Map* Factory::InitializeMap(Map* map, InstanceType type, int instance_size,
}
map->set_dependent_code(DependentCode::cast(*empty_fixed_array()),
SKIP_WRITE_BARRIER);
map->set_weak_cell_cache(Smi::kZero);
map->set_raw_transitions(MaybeObject::FromSmi(Smi::kZero));
map->SetInObjectUnusedPropertyFields(inobject_properties);
map->set_instance_descriptors(*empty_descriptor_array());
......
......@@ -108,15 +108,6 @@ AllocationResult Heap::AllocateMap(InstanceType instance_type,
Map::cast(result), instance_type, instance_size, elements_kind,
inobject_properties);
if (!is_js_object) {
// Eagerly initialize the WeakCell cache for the map as it will not be
// writable in RO_SPACE.
HandleScope handle_scope(isolate());
Handle<WeakCell> weak_cell = isolate()->factory()->NewWeakCell(
Handle<Map>(map, isolate()), TENURED_READ_ONLY);
map->set_weak_cell_cache(*weak_cell);
}
return map;
}
......@@ -148,7 +139,6 @@ AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
Map::OwnsDescriptorsBit::encode(true) |
Map::ConstructionCounterBits::encode(Map::kNoSlackTracking);
map->set_bit_field3(bit_field3);
map->set_weak_cell_cache(Smi::kZero);
map->set_elements_kind(TERMINAL_FAST_ELEMENTS_KIND);
return map;
}
......@@ -163,13 +153,6 @@ void Heap::FinalizePartialMap(Map* map) {
}
map->set_prototype(roots.null_value());
map->set_constructor_or_backpointer(roots.null_value());
// Eagerly initialize the WeakCell cache for the map as it will not be
// writable in RO_SPACE.
HandleScope handle_scope(isolate());
Handle<WeakCell> weak_cell = isolate()->factory()->NewWeakCell(
Handle<Map>(map, isolate()), TENURED_READ_ONLY);
map->set_weak_cell_cache(*weak_cell);
}
AllocationResult Heap::Allocate(Map* map, AllocationSpace space) {
......
......@@ -1783,16 +1783,17 @@ void Script::ScriptVerify(Isolate* isolate) {
}
void NormalizedMapCache::NormalizedMapCacheVerify(Isolate* isolate) {
FixedArray::cast(this)->FixedArrayVerify(isolate);
WeakFixedArray::cast(this)->WeakFixedArrayVerify(isolate);
if (FLAG_enable_slow_asserts) {
for (int i = 0; i < length(); i++) {
Object* e = FixedArray::get(i);
if (e->IsWeakCell()) {
if (!WeakCell::cast(e)->cleared()) {
Map::cast(WeakCell::cast(e)->value())->DictionaryMapVerify(isolate);
}
MaybeObject* e = WeakFixedArray::Get(i);
HeapObject* heap_object;
if (e->ToWeakHeapObject(&heap_object)) {
Map::cast(heap_object)->DictionaryMapVerify(isolate);
} else {
CHECK(e->IsUndefined(isolate));
CHECK(e->IsClearedWeakHeapObject() ||
(e->ToStrongHeapObject(&heap_object) &&
heap_object->IsUndefined(isolate)));
}
}
}
......
......@@ -5559,15 +5559,6 @@ bool Map::IsMapInArrayPrototypeChain(Isolate* isolate) const {
return false;
}
Handle<WeakCell> Map::WeakCellForMap(Isolate* isolate, Handle<Map> map) {
if (map->weak_cell_cache()->IsWeakCell()) {
return Handle<WeakCell>(WeakCell::cast(map->weak_cell_cache()), isolate);
}
Handle<WeakCell> weak_cell = isolate->factory()->NewWeakCell(map);
map->set_weak_cell_cache(*weak_cell);
return weak_cell;
}
static Handle<Map> AddMissingElementsTransitions(Isolate* isolate,
Handle<Map> map,
ElementsKind to_kind) {
......@@ -6343,8 +6334,8 @@ Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
Handle<NormalizedMapCache> NormalizedMapCache::New(Isolate* isolate) {
Handle<FixedArray> array(
isolate->factory()->NewFixedArray(kEntries, TENURED));
Handle<WeakFixedArray> array(
isolate->factory()->NewWeakFixedArray(kEntries, TENURED));
return Handle<NormalizedMapCache>::cast(array);
}
......@@ -6352,35 +6343,26 @@ Handle<NormalizedMapCache> NormalizedMapCache::New(Isolate* isolate) {
MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map,
PropertyNormalizationMode mode) {
DisallowHeapAllocation no_gc;
Object* value = FixedArray::get(GetIndex(fast_map));
if (!value->IsWeakCell() || WeakCell::cast(value)->cleared()) {
MaybeObject* value = WeakFixedArray::Get(GetIndex(fast_map));
HeapObject* heap_object;
if (!value->ToWeakHeapObject(&heap_object)) {
return MaybeHandle<Map>();
}
Map* normalized_map = Map::cast(WeakCell::cast(value)->value());
Map* normalized_map = Map::cast(heap_object);
if (!normalized_map->EquivalentToForNormalization(*fast_map, mode)) {
return MaybeHandle<Map>();
}
return handle(normalized_map, GetIsolate());
}
void NormalizedMapCache::Set(Handle<Map> fast_map, Handle<Map> normalized_map,
Handle<WeakCell> normalized_map_weak_cell) {
void NormalizedMapCache::Set(Handle<Map> fast_map, Handle<Map> normalized_map) {
DisallowHeapAllocation no_gc;
DCHECK(normalized_map->is_dictionary_map());
DCHECK_EQ(normalized_map_weak_cell->value(), *normalized_map);
FixedArray::set(GetIndex(fast_map), *normalized_map_weak_cell);
WeakFixedArray::Set(GetIndex(fast_map),
HeapObjectReference::Weak(*normalized_map));
}
void NormalizedMapCache::Clear() {
int entries = length();
for (int i = 0; i != entries; i++) {
set_undefined(i);
}
}
void JSObject::NormalizeProperties(Handle<JSObject> object,
PropertyNormalizationMode mode,
int expected_additional_properties,
......@@ -9293,10 +9275,8 @@ Handle<Map> Map::Normalize(Isolate* isolate, Handle<Map> fast_map,
reinterpret_cast<void*>(new_map->address()),
Map::kDependentCodeOffset));
}
STATIC_ASSERT(Map::kWeakCellCacheOffset ==
Map::kDependentCodeOffset + kPointerSize);
STATIC_ASSERT(Map::kPrototypeValidityCellOffset ==
Map::kWeakCellCacheOffset + kPointerSize);
Map::kDependentCodeOffset + kPointerSize);
int offset = Map::kPrototypeValidityCellOffset + kPointerSize;
DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address() + offset),
reinterpret_cast<void*>(new_map->address() + offset),
......@@ -9306,8 +9286,7 @@ Handle<Map> Map::Normalize(Isolate* isolate, Handle<Map> fast_map,
} else {
new_map = Map::CopyNormalized(isolate, fast_map, mode);
if (use_cache) {
Handle<WeakCell> cell = Map::WeakCellForMap(isolate, new_map);
cache->Set(fast_map, new_map, cell);
cache->Set(fast_map, new_map);
isolate->counters()->maps_normalized()->Increment();
}
if (FLAG_trace_maps) {
......@@ -10321,9 +10300,8 @@ void FixedArrayOfWeakCells::Set(Isolate* isolate,
Handle<FixedArrayOfWeakCells> array, int index,
Handle<HeapObject> value) {
DCHECK(array->IsEmptySlot(index)); // Don't overwrite anything.
Handle<WeakCell> cell =
value->IsMap() ? Map::WeakCellForMap(isolate, Handle<Map>::cast(value))
: isolate->factory()->NewWeakCell(value);
DCHECK(!value->IsMap());
Handle<WeakCell> cell = isolate->factory()->NewWeakCell(value);
Handle<FixedArray>::cast(array)->set(index + kFirstIndex, *cell);
array->set_last_used_index(index);
}
......
......@@ -672,7 +672,6 @@ void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
}
ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
ACCESSORS(Map, prototype_validity_cell, Object, kPrototypeValidityCellOffset)
ACCESSORS(Map, constructor_or_backpointer, Object,
kConstructorOrBackPointerOffset)
......@@ -746,8 +745,8 @@ int NormalizedMapCache::GetIndex(Handle<Map> map) {
}
bool NormalizedMapCache::IsNormalizedMapCache(const HeapObject* obj) {
if (!obj->IsFixedArray()) return false;
if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
if (!obj->IsWeakFixedArray()) return false;
if (WeakFixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
return false;
}
#ifdef VERIFY_HEAP
......
......@@ -166,8 +166,6 @@ typedef std::vector<Handle<Map>> MapHandles;
// +*************************************************************+
// | TaggedPointer | [dependent_code] |
// +---------------+---------------------------------------------+
// | TaggedPointer | [weak_cell_cache] |
// +---------------+---------------------------------------------+
class Map : public HeapObject {
public:
......@@ -574,9 +572,6 @@ class Map : public HeapObject {
// [dependent code]: list of optimized codes that weakly embed this map.
DECL_ACCESSORS(dependent_code, DependentCode)
// [weak cell cache]: cache that stores a weak cell pointing to this map.
DECL_ACCESSORS(weak_cell_cache, Object)
// [prototype_validity_cell]: Cell containing the validity bit for prototype
// chains or Smi(0) if uninitialized.
// The meaning of this validity cell is different for prototype maps and
......@@ -780,8 +775,6 @@ class Map : public HeapObject {
bool IsMapInArrayPrototypeChain(Isolate* isolate) const;
static Handle<WeakCell> WeakCellForMap(Isolate* isolate, Handle<Map> map);
// Dispatched behavior.
DECL_PRINTER(Map)
DECL_VERIFIER(Map)
......@@ -820,7 +813,6 @@ class Map : public HeapObject {
V(kDescriptorsOffset, kPointerSize) \
V(kLayoutDescriptorOffset, FLAG_unbox_double_fields ? kPointerSize : 0) \
V(kDependentCodeOffset, kPointerSize) \
V(kWeakCellCacheOffset, kPointerSize) \
V(kPrototypeValidityCellOffset, kPointerSize) \
V(kPointerFieldsEndOffset, 0) \
/* Total size. */ \
......@@ -969,7 +961,8 @@ class Map : public HeapObject {
// The cache for maps used by normalized (dictionary mode) objects.
// Such maps do not have property descriptors, so a typical program
// needs very limited number of distinct normalized maps.
class NormalizedMapCache : public FixedArray, public NeverReadOnlySpaceObject {
class NormalizedMapCache : public WeakFixedArray,
public NeverReadOnlySpaceObject {
public:
using NeverReadOnlySpaceObject::GetHeap;
using NeverReadOnlySpaceObject::GetIsolate;
......@@ -978,10 +971,7 @@ class NormalizedMapCache : public FixedArray, public NeverReadOnlySpaceObject {
V8_WARN_UNUSED_RESULT MaybeHandle<Map> Get(Handle<Map> fast_map,
PropertyNormalizationMode mode);
void Set(Handle<Map> fast_map, Handle<Map> normalized_map,
Handle<WeakCell> normalized_map_weak_cell);
void Clear();
void Set(Handle<Map> fast_map, Handle<Map> normalized_map);
DECL_CAST(NormalizedMapCache)
......
......@@ -1172,9 +1172,6 @@ void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
TagObject(map->dependent_code(), "(dependent code)");
SetInternalReference(map, entry, "dependent_code", map->dependent_code(),
Map::kDependentCodeOffset);
TagObject(map->weak_cell_cache(), "(weak cell)");
SetInternalReference(map, entry, "weak_cell_cache", map->weak_cell_cache(),
Map::kWeakCellCacheOffset);
}
......
......@@ -381,7 +381,6 @@ UNINITIALIZED_TEST(StartupSerializerRootMapDependencies) {
v8::internal::Handle<Map> map(
ReadOnlyRoots(internal_isolate).one_byte_internalized_string_map(),
internal_isolate);
Map::WeakCellForMap(internal_isolate, map);
// Need to avoid DCHECKs inside SnapshotCreator.
snapshot_creator.SetDefaultContext(v8::Context::New(isolate));
}
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment