Commit 8f1a5c8e authored by Patrick Thier's avatar Patrick Thier Committed by V8 LUCI CQ

[cleanup] Remove unused code introduced for concurrent string migrations

With the introduction of the StringForwardingTable, there are no
concurrent transitions of strings happening anymore.

- Remove String migration sentinel maps + helper methods
- Remove CanMigrateInParallel()
- Remove MigrateStringMapUnderLockIfNeeded() and simplify MakeThin()

There is still unused code I didn't remove in this CL, as we might need
it later for shared struct features: YIELD_PROCESSOR for spinlocks and
Relaxed_Memcmp().

Bug: v8:12007
Change-Id: Iaa09ef93d2ee612e42cd73395a06eada22fe7dae
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3629545
Commit-Queue: Patrick Thier <pthier@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarShu-yu Guo <syg@chromium.org>
Reviewed-by: 's avatarMarja Hölttä <marja@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80414}
parent 17a052b2
...@@ -1069,25 +1069,6 @@ MaybeHandle<Map> FactoryBase<Impl>::GetInPlaceInternalizedStringMap( ...@@ -1069,25 +1069,6 @@ MaybeHandle<Map> FactoryBase<Impl>::GetInPlaceInternalizedStringMap(
return map; return map;
} }
template <typename Impl>
Handle<Map> FactoryBase<Impl>::GetStringMigrationSentinelMap(
InstanceType from_string_type) {
Handle<Map> map;
switch (from_string_type) {
case SHARED_STRING_TYPE:
map = read_only_roots().seq_string_migration_sentinel_map_handle();
break;
case SHARED_ONE_BYTE_STRING_TYPE:
map =
read_only_roots().one_byte_seq_string_migration_sentinel_map_handle();
break;
default:
UNREACHABLE();
}
DCHECK_EQ(map->instance_type(), from_string_type);
return map;
}
template <typename Impl> template <typename Impl>
AllocationType AllocationType
FactoryBase<Impl>::RefineAllocationTypeForInPlaceInternalizableString( FactoryBase<Impl>::RefineAllocationTypeForInPlaceInternalizableString(
......
...@@ -268,8 +268,6 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) FactoryBase ...@@ -268,8 +268,6 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) FactoryBase
MaybeHandle<Map> GetInPlaceInternalizedStringMap(Map from_string_map); MaybeHandle<Map> GetInPlaceInternalizedStringMap(Map from_string_map);
Handle<Map> GetStringMigrationSentinelMap(InstanceType from_string_type);
AllocationType RefineAllocationTypeForInPlaceInternalizableString( AllocationType RefineAllocationTypeForInPlaceInternalizableString(
AllocationType allocation, Map string_map); AllocationType allocation, Map string_map);
......
...@@ -406,9 +406,6 @@ bool Heap::CreateInitialMaps() { ...@@ -406,9 +406,6 @@ bool Heap::CreateInitialMaps() {
if (StringShape(entry.type).IsCons()) map.mark_unstable(); if (StringShape(entry.type).IsCons()) map.mark_unstable();
roots_table()[entry.index] = map.ptr(); roots_table()[entry.index] = map.ptr();
} }
ALLOCATE_VARSIZE_MAP(SHARED_STRING_TYPE, seq_string_migration_sentinel);
ALLOCATE_VARSIZE_MAP(SHARED_ONE_BYTE_STRING_TYPE,
one_byte_seq_string_migration_sentinel);
ALLOCATE_VARSIZE_MAP(FIXED_DOUBLE_ARRAY_TYPE, fixed_double_array) ALLOCATE_VARSIZE_MAP(FIXED_DOUBLE_ARRAY_TYPE, fixed_double_array)
roots.fixed_double_array_map().set_elements_kind(HOLEY_DOUBLE_ELEMENTS); roots.fixed_double_array_map().set_elements_kind(HOLEY_DOUBLE_ELEMENTS);
......
...@@ -179,27 +179,6 @@ bool StringShape::IsShared() const { ...@@ -179,27 +179,6 @@ bool StringShape::IsShared() const {
(FLAG_shared_string_table && IsInternalized()); (FLAG_shared_string_table && IsInternalized());
} }
bool StringShape::CanMigrateInParallel() const {
switch (representation_encoding_and_shared_tag()) {
case kSeqOneByteStringTag | kSharedStringTag:
case kSeqTwoByteStringTag | kSharedStringTag:
// Shared SeqStrings can migrate to ThinStrings.
return true;
case kThinStringTag | kOneByteStringTag | kSharedStringTag:
case kThinStringTag | kTwoByteStringTag | kSharedStringTag:
// Shared ThinStrings do not migrate.
return false;
default:
// TODO(v8:12007): Set is_shared to true on internalized string when
// FLAG_shared_string_table is removed.
//
// If you crashed here, you probably added a new shared string
// type. Explicitly handle all shared string cases above.
DCHECK((FLAG_shared_string_table && IsInternalized()) || !IsShared());
return false;
}
}
StringRepresentationTag StringShape::representation_tag() const { StringRepresentationTag StringShape::representation_tag() const {
uint32_t tag = (type_ & kStringRepresentationMask); uint32_t tag = (type_ & kStringRepresentationMask);
return static_cast<StringRepresentationTag>(tag); return static_cast<StringRepresentationTag>(tag);
......
...@@ -482,8 +482,8 @@ Handle<String> StringTable::LookupString(Isolate* isolate, ...@@ -482,8 +482,8 @@ Handle<String> StringTable::LookupString(Isolate* isolate,
// For lookup misses, the internalized string map is the same map in RO space // For lookup misses, the internalized string map is the same map in RO space
// regardless of which thread is doing the lookup. // regardless of which thread is doing the lookup.
// //
// For lookup hits, String::MakeThin is threadsafe and spinlocks on // For lookup hits, we use the StringForwardingTable for shared strings to
// migrating into a ThinString. // delay the transition into a ThinString to the next stop-the-world GC.
string = String::Flatten(isolate, string); string = String::Flatten(isolate, string);
if (string->IsInternalizedString()) return string; if (string->IsInternalizedString()) return string;
......
...@@ -4,7 +4,6 @@ ...@@ -4,7 +4,6 @@
#include "src/objects/string.h" #include "src/objects/string.h"
#include "src/base/platform/yield-processor.h"
#include "src/common/assert-scope.h" #include "src/common/assert-scope.h"
#include "src/common/globals.h" #include "src/common/globals.h"
#include "src/execution/isolate-utils.h" #include "src/execution/isolate-utils.h"
...@@ -164,136 +163,6 @@ Map ComputeThinStringMap(IsolateT* isolate, StringShape from_string_shape, ...@@ -164,136 +163,6 @@ Map ComputeThinStringMap(IsolateT* isolate, StringShape from_string_shape,
return one_byte ? roots.thin_one_byte_string_map() : roots.thin_string_map(); return one_byte ? roots.thin_one_byte_string_map() : roots.thin_string_map();
} }
enum class StringMigrationResult {
kThisThreadMigrated,
kAnotherThreadMigrated
};
// This function must be used when migrating strings whose
// StringShape::CanMigrateInParallel() is true. It encapsulates the
// synchronization needed for parallel migrations from multiple threads. The
// user passes a lambda to perform to update the representation.
//
// Returns whether this thread successfully migrated the string or another
// thread did so.
//
// The locking algorithm to migrate a String uses its map word as a migration
// lock:
//
// map = string.map(kAcquireLoad);
// if (map != SENTINEL_MAP &&
// string.compare_and_swap_map(map, SENTINEL_MAP)) {
// // Lock acquired, i.e. the string's map is SENTINEL_MAP.
// } else {
// // Lock not acquired. Another thread set the sentinel. Spin until the
// // map is no longer the sentinel, i.e. until the other thread
// // releases the lock.
// Map reloaded_map;
// do {
// reloaded_map = string.map(kAcquireLoad);
// } while (reloaded_map == SENTINEL_MAP);
// }
//
// Some notes on usage:
// - The initial map must be loaded with kAcquireLoad for synchronization.
// - Avoid loading the map multiple times. Load the map once and branch
// on that.
// - The lambda is passed the string and its initial (pre-migration)
// StringShape.
// - The lambda may be executed under a spinlock, so it should be as short
// as possible.
// - Currently only SeqString -> ThinString migrations can happen in
// parallel. If kAnotherThreadMigrated is returned, then the caller doesn't
// need to do any other work. In the future, if additional migrations can
// happen in parallel, then restarts may be needed if the parallel migration
// was to a different type (e.g. SeqString -> External).
//
// Example:
//
// DisallowGarbageCollection no_gc;
// Map initial_map = string.map(kAcquireLoad);
// switch (MigrateStringMapUnderLockIfNeeded(
// isolate, string, initial_map, target_map,
// [](Isolate* isolate, String string, StringShape initial_shape) {
// auto t = TargetStringType::unchecked_cast(string);
// t.set_field(foo);
// t.set_another_field(bar);
// }, no_gc);
//
template <typename IsolateT, typename Callback>
StringMigrationResult MigrateStringMapUnderLockIfNeeded(
IsolateT* isolate, String string, Map initial_map, Map target_map,
Callback update_representation, const DisallowGarbageCollection& no_gc) {
USE(no_gc);
InstanceType initial_type = initial_map.instance_type();
StringShape initial_shape(initial_type);
if (initial_shape.CanMigrateInParallel()) {
// A string whose map is a sentinel map means that it is in the critical
// section for being migrated to a different map. There are multiple
// sentinel maps: one for each InstanceType that may be migrated from.
Map sentinel_map =
*isolate->factory()->GetStringMigrationSentinelMap(initial_type);
// Try to acquire the migration lock by setting the string's map to the
// sentinel map. Note that it's possible that we've already witnessed a
// sentinel map.
if (initial_map == sentinel_map ||
!string.release_compare_and_swap_map_word(
MapWord::FromMap(initial_map), MapWord::FromMap(sentinel_map))) {
// If the lock couldn't be acquired, another thread must be migrating this
// string. The string's map will be the sentinel map until the migration
// is finished. Spin until the map is no longer the sentinel map.
//
// TODO(v8:12007): Replace this spin lock with a ParkingLot-like
// primitive.
Map reloaded_map = string.map(kAcquireLoad);
while (reloaded_map == sentinel_map) {
YIELD_PROCESSOR;
reloaded_map = string.map(kAcquireLoad);
}
// Another thread must have migrated once the map is no longer the
// sentinel map.
//
// TODO(v8:12007): At time of writing there is only a single kind of
// migration that can happen in parallel: SeqString -> ThinString. If
// other parallel migrations are added, this DCHECK will fail, and users
// of MigrateStringMapUnderLockIfNeeded would need to restart if the
// string was migrated to a different map than target_map.
DCHECK_EQ(reloaded_map, target_map);
return StringMigrationResult::kAnotherThreadMigrated;
}
}
// With the lock held for cases where it's needed, do the work to update the
// representation before storing the map word. In addition to parallel
// migrations, this also ensures that the concurrent marker will read the
// updated representation when visiting migrated strings.
update_representation(isolate, string, initial_shape);
// Do the store on the map word.
//
// In debug mode, do a compare-and-swap that is checked to succeed, to check
// that all string map migrations are using this function, since to be in the
// migration critical section, the string's current map must be the sentinel
// map.
//
// Otherwise do a normal release store.
if (DEBUG_BOOL && initial_shape.CanMigrateInParallel()) {
DCHECK_NE(initial_map, target_map);
Map sentinel_map =
*isolate->factory()->GetStringMigrationSentinelMap(initial_type);
CHECK(string.release_compare_and_swap_map_word(
MapWord::FromMap(sentinel_map), MapWord::FromMap(target_map)));
} else {
string.set_map_safe_transition(target_map, kReleaseStore);
}
return StringMigrationResult::kThisThreadMigrated;
}
} // namespace } // namespace
template <typename IsolateT> template <typename IsolateT>
...@@ -302,10 +171,7 @@ void String::MakeThin(IsolateT* isolate, String internalized) { ...@@ -302,10 +171,7 @@ void String::MakeThin(IsolateT* isolate, String internalized) {
DCHECK_NE(*this, internalized); DCHECK_NE(*this, internalized);
DCHECK(internalized.IsInternalizedString()); DCHECK(internalized.IsInternalizedString());
// Load the map once at the beginning and use it to query for the shape of the Map initial_map = map(kAcquireLoad);
// string to avoid reloading the map in case of parallel migrations. See
// comment above for MigrateStringMapUnderLockIfNeeded.
Map initial_map = this->map(kAcquireLoad);
StringShape initial_shape(initial_map); StringShape initial_shape(initial_map);
DCHECK(!initial_shape.IsThin()); DCHECK(!initial_shape.IsThin());
...@@ -319,35 +185,23 @@ void String::MakeThin(IsolateT* isolate, String internalized) { ...@@ -319,35 +185,23 @@ void String::MakeThin(IsolateT* isolate, String internalized) {
HasForwardingIndex()); HasForwardingIndex());
bool has_pointers = initial_shape.IsIndirect(); bool has_pointers = initial_shape.IsIndirect();
int old_size = this->SizeFromMap(initial_map); int old_size = SizeFromMap(initial_map);
Map target_map = ComputeThinStringMap(isolate, initial_shape, Map target_map = ComputeThinStringMap(isolate, initial_shape,
internalized.IsOneByteRepresentation()); internalized.IsOneByteRepresentation());
// TODO(pthier): We don't need to migrate under lock anymore, as shared
// strings are only transitioned during stop-the-world GC.
switch (MigrateStringMapUnderLockIfNeeded(
isolate, *this, initial_map, target_map,
[=](IsolateT* isolate, String string, StringShape initial_shape) {
if (initial_shape.IsExternal()) { if (initial_shape.IsExternal()) {
// TODO(v8:12007): Support external strings. // TODO(v8:12007): Support external strings.
DCHECK(!initial_shape.IsShared()); DCHECK(!initial_shape.IsShared());
MigrateExternalString(isolate->AsIsolate(), string, internalized); MigrateExternalString(isolate->AsIsolate(), *this, internalized);
} }
ThinString::unchecked_cast(string).set_actual(internalized); // Update actual first and then do release store on the map word. This ensures
DCHECK_GE(old_size, ThinString::kSize); // that the concurrent marker will read the pointer when visiting a
}, // ThinString.
no_gc)) { ThinString thin = ThinString::unchecked_cast(*this);
case StringMigrationResult::kThisThreadMigrated: thin.set_actual(internalized);
// Overwrite character data with the filler below. set_map_safe_transition(target_map, kReleaseStore);
break;
case StringMigrationResult::kAnotherThreadMigrated:
// Nothing to do.
//
// TODO(v8:12007): Support shared ThinStrings.
UNREACHABLE();
}
ThinString thin = ThinString::cast(*this); DCHECK_GE(old_size, ThinString::kSize);
int size_delta = old_size - ThinString::kSize; int size_delta = old_size - ThinString::kSize;
if (size_delta != 0) { if (size_delta != 0) {
if (!Heap::IsLargeObject(thin)) { if (!Heap::IsLargeObject(thin)) {
......
...@@ -61,7 +61,6 @@ class StringShape { ...@@ -61,7 +61,6 @@ class StringShape {
V8_INLINE bool IsSequentialTwoByte() const; V8_INLINE bool IsSequentialTwoByte() const;
V8_INLINE bool IsInternalized() const; V8_INLINE bool IsInternalized() const;
V8_INLINE bool IsShared() const; V8_INLINE bool IsShared() const;
V8_INLINE bool CanMigrateInParallel() const;
V8_INLINE StringRepresentationTag representation_tag() const; V8_INLINE StringRepresentationTag representation_tag() const;
V8_INLINE uint32_t encoding_tag() const; V8_INLINE uint32_t encoding_tag() const;
V8_INLINE uint32_t representation_and_encoding_tag() const; V8_INLINE uint32_t representation_and_encoding_tag() const;
......
...@@ -150,10 +150,6 @@ class Symbol; ...@@ -150,10 +150,6 @@ class Symbol;
V(Map, shared_string_map, SharedStringMap) \ V(Map, shared_string_map, SharedStringMap) \
V(Map, shared_thin_one_byte_string_map, SharedThinOneByteStringMap) \ V(Map, shared_thin_one_byte_string_map, SharedThinOneByteStringMap) \
V(Map, shared_thin_string_map, SharedThinStringMap) \ V(Map, shared_thin_string_map, SharedThinStringMap) \
V(Map, seq_string_migration_sentinel_map, \
TwoByteSeqStringMigrationSentinelMap) \
V(Map, one_byte_seq_string_migration_sentinel_map, \
OneByteSeqStringMigrationSentinelMap) \
/* Oddball maps */ \ /* Oddball maps */ \
V(Map, undefined_map, UndefinedMap) \ V(Map, undefined_map, UndefinedMap) \
V(Map, the_hole_map, TheHoleMap) \ V(Map, the_hole_map, TheHoleMap) \
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment