Commit d4abe22e authored by Leszek Swirski's avatar Leszek Swirski Committed by Commit Bot

Revert "[snapshot] rehash JSMap and JSSet during deserialization"

This reverts commit 8374feed.

Reason for revert: Breaking mjsunit/global-hash under the stress_snapshot variant, e.g.
https://ci.chromium.org/p/v8/builders/ci/V8%20Linux64%20-%20debug%20-%20fyi/12560

Original change's description:
> [snapshot] rehash JSMap and JSSet during deserialization
> 
> To rehash JSMap and JSSet, we simply replace the backing store
> with a new one created with the new hash.
> 
> Bug: v8:9187
> Change-Id: I90c25b18b33b7bc2b6ffe1b89fe17aa5f978b517
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2143983
> Commit-Queue: Joyee Cheung <joyee@igalia.com>
> Reviewed-by: Jakob Gruber <jgruber@chromium.org>
> Reviewed-by: Camillo Bruni <cbruni@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#67663}

TBR=cbruni@chromium.org,jgruber@chromium.org,verwaest@chromium.org,joyee@igalia.com

# Not skipping CQ checks because original CL landed > 1 day ago.

Bug: v8:9187
Change-Id: I4a89768c031cd3971eefd9f88528ddd52e1284c9
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2192657Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Commit-Queue: Camillo Bruni <cbruni@chromium.org>
Cr-Commit-Position: refs/heads/master@{#67696}
parent 9470091b
......@@ -191,7 +191,7 @@ class HeapObject : public Object {
bool CanBeRehashed() const;
// Rehash the object based on the layout inferred from its map.
void RehashBasedOnMap(Isolate* isolate);
void RehashBasedOnMap(ReadOnlyRoots root);
// Layout description.
#define HEAP_OBJECT_FIELDS(V) \
......
......@@ -30,7 +30,6 @@ class JSSet : public TorqueGeneratedJSSet<JSSet, JSCollection> {
public:
static void Initialize(Handle<JSSet> set, Isolate* isolate);
static void Clear(Isolate* isolate, Handle<JSSet> set);
void Rehash(Isolate* isolate);
// Dispatched behavior.
DECL_PRINTER(JSSet)
......@@ -57,7 +56,6 @@ class JSMap : public TorqueGeneratedJSMap<JSMap, JSCollection> {
public:
static void Initialize(Handle<JSMap> map, Isolate* isolate);
static void Clear(Isolate* isolate, Handle<JSMap> map);
void Rehash(Isolate* isolate);
// Dispatched behavior.
DECL_PRINTER(JSMap)
......
......@@ -2306,8 +2306,9 @@ bool HeapObject::NeedsRehashing() const {
case TRANSITION_ARRAY_TYPE:
return TransitionArray::cast(*this).number_of_entries() > 1;
case ORDERED_HASH_MAP_TYPE:
return OrderedHashMap::cast(*this).NumberOfElements() > 0;
case ORDERED_HASH_SET_TYPE:
return false; // We'll rehash from the JSMap or JSSet referencing them.
return OrderedHashSet::cast(*this).NumberOfElements() > 0;
case NAME_DICTIONARY_TYPE:
case GLOBAL_DICTIONARY_TYPE:
case NUMBER_DICTIONARY_TYPE:
......@@ -2317,8 +2318,6 @@ bool HeapObject::NeedsRehashing() const {
case SMALL_ORDERED_HASH_MAP_TYPE:
case SMALL_ORDERED_HASH_SET_TYPE:
case SMALL_ORDERED_NAME_DICTIONARY_TYPE:
case JS_MAP_TYPE:
case JS_SET_TYPE:
return true;
default:
return false;
......@@ -2328,13 +2327,10 @@ bool HeapObject::NeedsRehashing() const {
bool HeapObject::CanBeRehashed() const {
DCHECK(NeedsRehashing());
switch (map().instance_type()) {
case JS_MAP_TYPE:
case JS_SET_TYPE:
return true;
case ORDERED_HASH_MAP_TYPE:
case ORDERED_HASH_SET_TYPE:
UNREACHABLE(); // We'll rehash from the JSMap or JSSet referencing them.
case ORDERED_NAME_DICTIONARY_TYPE:
// TODO(yangguo): actually support rehashing OrderedHash{Map,Set}.
return false;
case NAME_DICTIONARY_TYPE:
case GLOBAL_DICTIONARY_TYPE:
......@@ -2358,8 +2354,7 @@ bool HeapObject::CanBeRehashed() const {
return false;
}
void HeapObject::RehashBasedOnMap(Isolate* isolate) {
ReadOnlyRoots roots = ReadOnlyRoots(isolate);
void HeapObject::RehashBasedOnMap(ReadOnlyRoots roots) {
switch (map().instance_type()) {
case HASH_TABLE_TYPE:
UNREACHABLE();
......@@ -2391,17 +2386,6 @@ void HeapObject::RehashBasedOnMap(Isolate* isolate) {
case SMALL_ORDERED_HASH_SET_TYPE:
DCHECK_EQ(0, SmallOrderedHashSet::cast(*this).NumberOfElements());
break;
case ORDERED_HASH_MAP_TYPE:
case ORDERED_HASH_SET_TYPE:
UNREACHABLE(); // We'll rehash from the JSMap or JSSet referencing them.
case JS_MAP_TYPE: {
JSMap::cast(*this).Rehash(isolate);
break;
}
case JS_SET_TYPE: {
JSSet::cast(*this).Rehash(isolate);
break;
}
case SMALL_ORDERED_NAME_DICTIONARY_TYPE:
DCHECK_EQ(0, SmallOrderedNameDictionary::cast(*this).NumberOfElements());
break;
......@@ -7868,13 +7852,6 @@ void JSSet::Clear(Isolate* isolate, Handle<JSSet> set) {
set->set_table(*table);
}
void JSSet::Rehash(Isolate* isolate) {
Handle<OrderedHashSet> table_handle(OrderedHashSet::cast(table()), isolate);
Handle<OrderedHashSet> new_table =
OrderedHashSet::Rehash(isolate, table_handle).ToHandleChecked();
set_table(*new_table);
}
void JSMap::Initialize(Handle<JSMap> map, Isolate* isolate) {
Handle<OrderedHashMap> table = isolate->factory()->NewOrderedHashMap();
map->set_table(*table);
......@@ -7886,13 +7863,6 @@ void JSMap::Clear(Isolate* isolate, Handle<JSMap> map) {
map->set_table(*table);
}
void JSMap::Rehash(Isolate* isolate) {
Handle<OrderedHashMap> table_handle(OrderedHashMap::cast(table()), isolate);
Handle<OrderedHashMap> new_table =
OrderedHashMap::Rehash(isolate, table_handle).ToHandleChecked();
set_table(*new_table);
}
void JSWeakCollection::Initialize(Handle<JSWeakCollection> weak_collection,
Isolate* isolate) {
Handle<EphemeronHashTable> table = EphemeronHashTable::New(isolate, 0);
......
......@@ -194,13 +194,6 @@ HeapObject OrderedHashMap::GetEmpty(ReadOnlyRoots ro_roots) {
return ro_roots.empty_ordered_hash_map();
}
template <class Derived, int entrysize>
MaybeHandle<Derived> OrderedHashTable<Derived, entrysize>::Rehash(
Isolate* isolate, Handle<Derived> table) {
return OrderedHashTable<Derived, entrysize>::Rehash(isolate, table,
table->Capacity());
}
template <class Derived, int entrysize>
MaybeHandle<Derived> OrderedHashTable<Derived, entrysize>::Rehash(
Isolate* isolate, Handle<Derived> table, int new_capacity) {
......@@ -257,20 +250,6 @@ MaybeHandle<OrderedHashSet> OrderedHashSet::Rehash(Isolate* isolate,
new_capacity);
}
MaybeHandle<OrderedHashSet> OrderedHashSet::Rehash(
Isolate* isolate, Handle<OrderedHashSet> table) {
return OrderedHashTable<
OrderedHashSet, OrderedHashSet::kEntrySizeWithoutChain>::Rehash(isolate,
table);
}
MaybeHandle<OrderedHashMap> OrderedHashMap::Rehash(
Isolate* isolate, Handle<OrderedHashMap> table) {
return OrderedHashTable<
OrderedHashMap, OrderedHashMap::kEntrySizeWithoutChain>::Rehash(isolate,
table);
}
MaybeHandle<OrderedHashMap> OrderedHashMap::Rehash(Isolate* isolate,
Handle<OrderedHashMap> table,
int new_capacity) {
......
......@@ -138,7 +138,6 @@ class OrderedHashTable : public FixedArray {
// The extra +1 is for linking the bucket chains together.
static const int kEntrySize = entrysize + 1;
static const int kEntrySizeWithoutChain = entrysize;
static const int kChainOffset = entrysize;
static const int kNotFound = -1;
......@@ -201,8 +200,6 @@ class OrderedHashTable : public FixedArray {
static MaybeHandle<Derived> Allocate(
Isolate* isolate, int capacity,
AllocationType allocation = AllocationType::kYoung);
static MaybeHandle<Derived> Rehash(Isolate* isolate, Handle<Derived> table);
static MaybeHandle<Derived> Rehash(Isolate* isolate, Handle<Derived> table,
int new_capacity);
......@@ -247,8 +244,6 @@ class V8_EXPORT_PRIVATE OrderedHashSet
static MaybeHandle<OrderedHashSet> Rehash(Isolate* isolate,
Handle<OrderedHashSet> table,
int new_capacity);
static MaybeHandle<OrderedHashSet> Rehash(Isolate* isolate,
Handle<OrderedHashSet> table);
static MaybeHandle<OrderedHashSet> Allocate(
Isolate* isolate, int capacity,
AllocationType allocation = AllocationType::kYoung);
......@@ -278,8 +273,6 @@ class V8_EXPORT_PRIVATE OrderedHashMap
static MaybeHandle<OrderedHashMap> Rehash(Isolate* isolate,
Handle<OrderedHashMap> table,
int new_capacity);
static MaybeHandle<OrderedHashMap> Rehash(Isolate* isolate,
Handle<OrderedHashMap> table);
Object ValueAt(int entry);
// This takes and returns raw Address values containing tagged Object
......
......@@ -59,12 +59,12 @@ MaybeHandle<Object> ContextDeserializer::Deserialize(
// new code, which also has to be flushed from instruction cache.
CHECK_EQ(start_address, code_space->top());
if (FLAG_rehash_snapshot && can_rehash()) Rehash();
LogNewMapEvents();
result = handle(root, isolate);
}
if (FLAG_rehash_snapshot && can_rehash()) Rehash();
SetupOffHeapArrayBufferBackingStores();
return result;
......
......@@ -82,7 +82,7 @@ void Deserializer::Initialize(Isolate* isolate) {
void Deserializer::Rehash() {
DCHECK(can_rehash() || deserializing_user_code());
for (HeapObject item : to_rehash_) {
item.RehashBasedOnMap(isolate_);
item.RehashBasedOnMap(ReadOnlyRoots(isolate_));
}
}
......@@ -142,14 +142,6 @@ void Deserializer::DeserializeDeferredObjects() {
}
}
}
// When the deserialization of maps are deferred, they will be created
// as filler maps, and we postpone the post processing until the maps
// are also deserialized.
for (const auto& pair : fillers_to_post_process_) {
DCHECK(!pair.first.IsFiller());
PostProcessNewObject(pair.first, pair.second);
}
}
void Deserializer::LogNewObjectEvents() {
......@@ -221,11 +213,7 @@ HeapObject Deserializer::PostProcessNewObject(HeapObject obj,
DisallowHeapAllocation no_gc;
if ((FLAG_rehash_snapshot && can_rehash_) || deserializing_user_code()) {
if (obj.IsFiller()) {
DCHECK_EQ(fillers_to_post_process_.find(obj),
fillers_to_post_process_.end());
fillers_to_post_process_.insert({obj, space});
} else if (obj.IsString()) {
if (obj.IsString()) {
// Uninitialize hash field as we need to recompute the hash.
String string = String::cast(obj);
string.set_hash_field(String::kEmptyHashField);
......
......@@ -197,11 +197,6 @@ class V8_EXPORT_PRIVATE Deserializer : public SerializerDeserializer {
// TODO(6593): generalize rehashing, and remove this flag.
bool can_rehash_;
std::vector<HeapObject> to_rehash_;
// Store the objects whose maps are deferred and thus initialized as filler
// maps during deserialization, so that they can be processed later when the
// maps become available.
std::unordered_map<HeapObject, SnapshotSpace, Object::Hasher>
fillers_to_post_process_;
#ifdef DEBUG
uint32_t num_api_references_;
......
......@@ -47,10 +47,9 @@ MaybeHandle<HeapObject> ObjectDeserializer::Deserialize(Isolate* isolate) {
LinkAllocationSites();
LogNewMapEvents();
result = handle(HeapObject::cast(root), isolate);
Rehash();
allocator()->RegisterDeserializedObjectsForBlackAllocation();
}
Rehash();
CommitPostProcessedObjects();
return scope.CloseAndEscape(result);
}
......
......@@ -3755,7 +3755,7 @@ UNINITIALIZED_TEST(SnapshotCreatorIncludeGlobalProxy) {
FreeCurrentEmbeddedBlob();
}
UNINITIALIZED_TEST(ReinitializeHashSeedJSCollectionRehashable) {
UNINITIALIZED_TEST(ReinitializeHashSeedNotRehashable) {
DisableAlwaysOpt();
i::FLAG_rehash_snapshot = true;
i::FLAG_hash_seed = 42;
......@@ -3773,16 +3773,13 @@ UNINITIALIZED_TEST(ReinitializeHashSeedJSCollectionRehashable) {
CompileRun(
"var m = new Map();"
"m.set('a', 1);"
"m.set('b', 2);"
"var s = new Set();"
"s.add(1)");
"m.set('b', 2);");
ExpectInt32("m.get('b')", 2);
ExpectTrue("s.has(1)");
creator.SetDefaultContext(context);
}
blob =
creator.CreateBlob(v8::SnapshotCreator::FunctionCodeHandling::kClear);
CHECK(blob.CanBeRehashed());
CHECK(!blob.CanBeRehashed());
}
i::FLAG_hash_seed = 1337;
......@@ -3791,8 +3788,8 @@ UNINITIALIZED_TEST(ReinitializeHashSeedJSCollectionRehashable) {
create_params.snapshot_blob = &blob;
v8::Isolate* isolate = v8::Isolate::New(create_params);
{
// Check that rehashing has been performed.
CHECK_EQ(static_cast<uint64_t>(1337),
// Check that no rehashing has been performed.
CHECK_EQ(static_cast<uint64_t>(42),
HashSeed(reinterpret_cast<i::Isolate*>(isolate)));
v8::Isolate::Scope isolate_scope(isolate);
v8::HandleScope handle_scope(isolate);
......@@ -3800,7 +3797,6 @@ UNINITIALIZED_TEST(ReinitializeHashSeedJSCollectionRehashable) {
CHECK(!context.IsEmpty());
v8::Context::Scope context_scope(context);
ExpectInt32("m.get('b')", 2);
ExpectTrue("s.has(1)");
}
isolate->Dispose();
delete[] blob.data;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment