Commit e1981ff5 authored by Shu-yu Guo's avatar Shu-yu Guo Committed by V8 LUCI CQ

[strings] Share internalized and in-place internalizable strings

To prepare for prototyping shared memory features, all internalized and
in-place internalizable (1- and 2-byte seq strings and external strings)
will always be allocated in the shared old space.

Cons strings, thin strings, and sliced strings remain allocated in the
thread-local space. They are copied over to the shared space when
internalized, as internalization implies flattening, which for these
strings requires a copy already.

To make the in-place internalization threadsafe, updating the map of
such strings is now done with a release store.

This CL does not yet support external strings.

Bug: v8:12007
Change-Id: I982c35c5120bf4c0c70c5294ce011b47430414c8
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3140784
Commit-Queue: Shu-yu Guo <syg@chromium.org>
Reviewed-by: 's avatarToon Verwaest <verwaest@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarCamillo Bruni <cbruni@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77308}
parent 98d7bbb4
...@@ -1875,6 +1875,15 @@ enum PropertiesEnumerationMode { ...@@ -1875,6 +1875,15 @@ enum PropertiesEnumerationMode {
kPropertyAdditionOrder, kPropertyAdditionOrder,
}; };
enum class StringInternalizationStrategy {
// The string must be internalized by first copying.
kCopy,
// The string can be internalized in-place by changing its map.
kInPlace,
// The string is already internalized.
kAlreadyInternalized
};
} // namespace internal } // namespace internal
// Tag dispatching support for acquire loads and release stores. // Tag dispatching support for acquire loads and release stores.
......
...@@ -3008,6 +3008,7 @@ v8::PageAllocator* Isolate::page_allocator() const { ...@@ -3008,6 +3008,7 @@ v8::PageAllocator* Isolate::page_allocator() const {
Isolate::Isolate(std::unique_ptr<i::IsolateAllocator> isolate_allocator, Isolate::Isolate(std::unique_ptr<i::IsolateAllocator> isolate_allocator,
bool is_shared) bool is_shared)
: isolate_data_(this, isolate_allocator->GetPtrComprCageBase()), : isolate_data_(this, isolate_allocator->GetPtrComprCageBase()),
is_shared_(is_shared),
isolate_allocator_(std::move(isolate_allocator)), isolate_allocator_(std::move(isolate_allocator)),
id_(isolate_counter.fetch_add(1, std::memory_order_relaxed)), id_(isolate_counter.fetch_add(1, std::memory_order_relaxed)),
allocator_(new TracingAccountingAllocator(this)), allocator_(new TracingAccountingAllocator(this)),
...@@ -3025,8 +3026,7 @@ Isolate::Isolate(std::unique_ptr<i::IsolateAllocator> isolate_allocator, ...@@ -3025,8 +3026,7 @@ Isolate::Isolate(std::unique_ptr<i::IsolateAllocator> isolate_allocator,
#endif #endif
next_module_async_evaluating_ordinal_( next_module_async_evaluating_ordinal_(
SourceTextModule::kFirstAsyncEvaluatingOrdinal), SourceTextModule::kFirstAsyncEvaluatingOrdinal),
cancelable_task_manager_(new CancelableTaskManager()), cancelable_task_manager_(new CancelableTaskManager()) {
is_shared_(is_shared) {
TRACE_ISOLATE(constructor); TRACE_ISOLATE(constructor);
CheckIsolateLayout(); CheckIsolateLayout();
...@@ -3644,7 +3644,12 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data, ...@@ -3644,7 +3644,12 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data,
date_cache_ = new DateCache(); date_cache_ = new DateCache();
heap_profiler_ = new HeapProfiler(heap()); heap_profiler_ = new HeapProfiler(heap());
interpreter_ = new interpreter::Interpreter(this); interpreter_ = new interpreter::Interpreter(this);
string_table_.reset(new StringTable(this)); if (OwnsStringTable()) {
string_table_ = std::make_shared<StringTable>(this);
} else {
DCHECK_NOT_NULL(shared_isolate_);
string_table_ = shared_isolate_->string_table_;
}
bigint_processor_ = bigint::Processor::New(new BigIntPlatform(this)); bigint_processor_ = bigint::Processor::New(new BigIntPlatform(this));
compiler_dispatcher_ = new LazyCompileDispatcher( compiler_dispatcher_ = new LazyCompileDispatcher(
......
...@@ -1829,6 +1829,8 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory { ...@@ -1829,6 +1829,8 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
base::Mutex* client_isolate_mutex() { return &client_isolate_mutex_; } base::Mutex* client_isolate_mutex() { return &client_isolate_mutex_; }
bool OwnsStringTable() { return !FLAG_shared_string_table || is_shared(); }
private: private:
explicit Isolate(std::unique_ptr<IsolateAllocator> isolate_allocator, explicit Isolate(std::unique_ptr<IsolateAllocator> isolate_allocator,
bool is_shared); bool is_shared);
...@@ -1957,11 +1959,15 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory { ...@@ -1957,11 +1959,15 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
// handlers and optimized code). // handlers and optimized code).
IsolateData isolate_data_; IsolateData isolate_data_;
// Set to true if this isolate is used as shared heap. This field must be set
// before Heap is constructed, as Heap's constructor consults it.
const bool is_shared_;
std::unique_ptr<IsolateAllocator> isolate_allocator_; std::unique_ptr<IsolateAllocator> isolate_allocator_;
Heap heap_; Heap heap_;
ReadOnlyHeap* read_only_heap_ = nullptr; ReadOnlyHeap* read_only_heap_ = nullptr;
std::shared_ptr<ReadOnlyArtifacts> artifacts_; std::shared_ptr<ReadOnlyArtifacts> artifacts_;
std::unique_ptr<StringTable> string_table_; std::shared_ptr<StringTable> string_table_;
const int id_; const int id_;
EntryStackItem* entry_stack_ = nullptr; EntryStackItem* entry_stack_ = nullptr;
...@@ -2229,9 +2235,6 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory { ...@@ -2229,9 +2235,6 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
base::Mutex thread_data_table_mutex_; base::Mutex thread_data_table_mutex_;
ThreadDataTable thread_data_table_; ThreadDataTable thread_data_table_;
// Set to true if this isolate is used as shared heap.
const bool is_shared_;
// Stores the shared isolate for this client isolate. nullptr for shared // Stores the shared isolate for this client isolate. nullptr for shared
// isolates or when no shared isolate is used. // isolates or when no shared isolate is used.
Isolate* shared_isolate_ = nullptr; Isolate* shared_isolate_ = nullptr;
......
...@@ -46,6 +46,9 @@ bool HandleBase::IsDereferenceAllowed() const { ...@@ -46,6 +46,9 @@ bool HandleBase::IsDereferenceAllowed() const {
if (isolate->IsBuiltinTableHandleLocation(location_)) return true; if (isolate->IsBuiltinTableHandleLocation(location_)) return true;
if (!AllowHandleDereference::IsAllowed()) return false; if (!AllowHandleDereference::IsAllowed()) return false;
// Allocations in the shared heap may be dereferenced by multiple threads.
if (isolate->is_shared()) return true;
LocalHeap* local_heap = isolate->CurrentLocalHeap(); LocalHeap* local_heap = isolate->CurrentLocalHeap();
// Local heap can't access handles when parked // Local heap can't access handles when parked
......
...@@ -265,6 +265,9 @@ class ConcurrentMarkingVisitor final ...@@ -265,6 +265,9 @@ class ConcurrentMarkingVisitor final
DCHECK(!HasWeakHeapObjectTag(object)); DCHECK(!HasWeakHeapObjectTag(object));
if (!object.IsHeapObject()) continue; if (!object.IsHeapObject()) continue;
HeapObject heap_object = HeapObject::cast(object); HeapObject heap_object = HeapObject::cast(object);
BasicMemoryChunk* target_page =
BasicMemoryChunk::FromHeapObject(heap_object);
if (!is_shared_heap_ && target_page->InSharedHeap()) continue;
MarkObject(host, heap_object); MarkObject(host, heap_object);
RecordSlot(host, slot, heap_object); RecordSlot(host, slot, heap_object);
} }
......
...@@ -523,6 +523,8 @@ Handle<SeqOneByteString> FactoryBase<Impl>::NewOneByteInternalizedString( ...@@ -523,6 +523,8 @@ Handle<SeqOneByteString> FactoryBase<Impl>::NewOneByteInternalizedString(
const base::Vector<const uint8_t>& str, uint32_t raw_hash_field) { const base::Vector<const uint8_t>& str, uint32_t raw_hash_field) {
Handle<SeqOneByteString> result = Handle<SeqOneByteString> result =
AllocateRawOneByteInternalizedString(str.length(), raw_hash_field); AllocateRawOneByteInternalizedString(str.length(), raw_hash_field);
// No synchronization is needed since the shared string hasn't yet escaped to
// script.
DisallowGarbageCollection no_gc; DisallowGarbageCollection no_gc;
MemCopy(result->GetChars(no_gc, SharedStringAccessGuardIfNeeded::NotNeeded()), MemCopy(result->GetChars(no_gc, SharedStringAccessGuardIfNeeded::NotNeeded()),
str.begin(), str.length()); str.begin(), str.length());
...@@ -534,6 +536,8 @@ Handle<SeqTwoByteString> FactoryBase<Impl>::NewTwoByteInternalizedString( ...@@ -534,6 +536,8 @@ Handle<SeqTwoByteString> FactoryBase<Impl>::NewTwoByteInternalizedString(
const base::Vector<const base::uc16>& str, uint32_t raw_hash_field) { const base::Vector<const base::uc16>& str, uint32_t raw_hash_field) {
Handle<SeqTwoByteString> result = Handle<SeqTwoByteString> result =
AllocateRawTwoByteInternalizedString(str.length(), raw_hash_field); AllocateRawTwoByteInternalizedString(str.length(), raw_hash_field);
// No synchronization is needed since the shared string hasn't yet escaped to
// script.
DisallowGarbageCollection no_gc; DisallowGarbageCollection no_gc;
MemCopy(result->GetChars(no_gc, SharedStringAccessGuardIfNeeded::NotNeeded()), MemCopy(result->GetChars(no_gc, SharedStringAccessGuardIfNeeded::NotNeeded()),
str.begin(), str.length() * base::kUC16Size); str.begin(), str.length() * base::kUC16Size);
...@@ -550,8 +554,10 @@ MaybeHandle<SeqOneByteString> FactoryBase<Impl>::NewRawOneByteString( ...@@ -550,8 +554,10 @@ MaybeHandle<SeqOneByteString> FactoryBase<Impl>::NewRawOneByteString(
int size = SeqOneByteString::SizeFor(length); int size = SeqOneByteString::SizeFor(length);
DCHECK_GE(SeqOneByteString::kMaxSize, size); DCHECK_GE(SeqOneByteString::kMaxSize, size);
Map map = read_only_roots().one_byte_string_map();
SeqOneByteString string = SeqOneByteString::cast(AllocateRawWithImmortalMap( SeqOneByteString string = SeqOneByteString::cast(AllocateRawWithImmortalMap(
size, allocation, read_only_roots().one_byte_string_map())); size, RefineAllocationTypeForInPlaceInternalizableString(allocation, map),
map));
DisallowGarbageCollection no_gc; DisallowGarbageCollection no_gc;
string.set_length(length); string.set_length(length);
string.set_raw_hash_field(String::kEmptyHashField); string.set_raw_hash_field(String::kEmptyHashField);
...@@ -569,8 +575,10 @@ MaybeHandle<SeqTwoByteString> FactoryBase<Impl>::NewRawTwoByteString( ...@@ -569,8 +575,10 @@ MaybeHandle<SeqTwoByteString> FactoryBase<Impl>::NewRawTwoByteString(
int size = SeqTwoByteString::SizeFor(length); int size = SeqTwoByteString::SizeFor(length);
DCHECK_GE(SeqTwoByteString::kMaxSize, size); DCHECK_GE(SeqTwoByteString::kMaxSize, size);
Map map = read_only_roots().string_map();
SeqTwoByteString string = SeqTwoByteString::cast(AllocateRawWithImmortalMap( SeqTwoByteString string = SeqTwoByteString::cast(AllocateRawWithImmortalMap(
size, allocation, read_only_roots().string_map())); size, RefineAllocationTypeForInPlaceInternalizableString(allocation, map),
map));
DisallowGarbageCollection no_gc; DisallowGarbageCollection no_gc;
string.set_length(length); string.set_length(length);
string.set_raw_hash_field(String::kEmptyHashField); string.set_raw_hash_field(String::kEmptyHashField);
...@@ -773,8 +781,10 @@ FactoryBase<Impl>::AllocateRawOneByteInternalizedString( ...@@ -773,8 +781,10 @@ FactoryBase<Impl>::AllocateRawOneByteInternalizedString(
int size = SeqOneByteString::SizeFor(length); int size = SeqOneByteString::SizeFor(length);
HeapObject result = AllocateRawWithImmortalMap( HeapObject result = AllocateRawWithImmortalMap(
size, size,
impl()->CanAllocateInReadOnlySpace() ? AllocationType::kReadOnly RefineAllocationTypeForInPlaceInternalizableString(
: AllocationType::kOld, impl()->CanAllocateInReadOnlySpace() ? AllocationType::kReadOnly
: AllocationType::kOld,
map),
map); map);
SeqOneByteString answer = SeqOneByteString::cast(result); SeqOneByteString answer = SeqOneByteString::cast(result);
DisallowGarbageCollection no_gc; DisallowGarbageCollection no_gc;
...@@ -793,8 +803,11 @@ FactoryBase<Impl>::AllocateRawTwoByteInternalizedString( ...@@ -793,8 +803,11 @@ FactoryBase<Impl>::AllocateRawTwoByteInternalizedString(
Map map = read_only_roots().internalized_string_map(); Map map = read_only_roots().internalized_string_map();
int size = SeqTwoByteString::SizeFor(length); int size = SeqTwoByteString::SizeFor(length);
SeqTwoByteString answer = SeqTwoByteString::cast( SeqTwoByteString answer = SeqTwoByteString::cast(AllocateRawWithImmortalMap(
AllocateRawWithImmortalMap(size, AllocationType::kOld, map)); size,
RefineAllocationTypeForInPlaceInternalizableString(AllocationType::kOld,
map),
map));
DisallowGarbageCollection no_gc; DisallowGarbageCollection no_gc;
answer.set_length(length); answer.set_length(length);
answer.set_raw_hash_field(raw_hash_field); answer.set_raw_hash_field(raw_hash_field);
...@@ -911,6 +924,35 @@ FactoryBase<Impl>::NewFunctionTemplateRareData() { ...@@ -911,6 +924,35 @@ FactoryBase<Impl>::NewFunctionTemplateRareData() {
return handle(function_template_rare_data, isolate()); return handle(function_template_rare_data, isolate());
} }
template <typename Impl>
MaybeHandle<Map> FactoryBase<Impl>::GetInPlaceInternalizedStringMap(
Map from_string_map) {
switch (from_string_map.instance_type()) {
case STRING_TYPE:
return read_only_roots().internalized_string_map_handle();
case ONE_BYTE_STRING_TYPE:
return read_only_roots().one_byte_internalized_string_map_handle();
case EXTERNAL_STRING_TYPE:
return read_only_roots().external_internalized_string_map_handle();
case EXTERNAL_ONE_BYTE_STRING_TYPE:
return read_only_roots()
.external_one_byte_internalized_string_map_handle();
default:
return MaybeHandle<Map>(); // No match found.
}
}
template <typename Impl>
AllocationType
FactoryBase<Impl>::RefineAllocationTypeForInPlaceInternalizableString(
AllocationType allocation, Map string_map) {
DCHECK(
InstanceTypeChecker::IsInternalizedString(string_map.instance_type()) ||
!GetInPlaceInternalizedStringMap(string_map).is_null());
if (allocation != AllocationType::kOld) return allocation;
return impl()->AllocationTypeForInPlaceInternalizableString();
}
// Instantiate FactoryBase for the two variants we want. // Instantiate FactoryBase for the two variants we want.
template class EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) FactoryBase<Factory>; template class EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) FactoryBase<Factory>;
template class EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) template class EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE)
......
...@@ -234,6 +234,11 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) FactoryBase ...@@ -234,6 +234,11 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) FactoryBase
Handle<FunctionTemplateRareData> NewFunctionTemplateRareData(); Handle<FunctionTemplateRareData> NewFunctionTemplateRareData();
MaybeHandle<Map> GetInPlaceInternalizedStringMap(Map from_string_map);
AllocationType RefineAllocationTypeForInPlaceInternalizableString(
AllocationType allocation, Map string_map);
protected: protected:
// Allocate memory for an uninitialized array (e.g., a FixedArray or similar). // Allocate memory for an uninitialized array (e.g., a FixedArray or similar).
HeapObject AllocateRawArray(int size, AllocationType allocation); HeapObject AllocateRawArray(int size, AllocationType allocation);
......
...@@ -883,12 +883,14 @@ Handle<String> Factory::AllocateInternalizedStringImpl(T t, int chars, ...@@ -883,12 +883,14 @@ Handle<String> Factory::AllocateInternalizedStringImpl(T t, int chars,
size = SeqTwoByteString::SizeFor(chars); size = SeqTwoByteString::SizeFor(chars);
} }
String result = String::cast( String result = String::cast(AllocateRawWithImmortalMap(
AllocateRawWithImmortalMap(size, size,
isolate()->heap()->CanAllocateInReadOnlySpace() RefineAllocationTypeForInPlaceInternalizableString(
? AllocationType::kReadOnly isolate()->heap()->CanAllocateInReadOnlySpace()
: AllocationType::kOld, ? AllocationType::kReadOnly
map)); : AllocationType::kOld,
map),
map));
DisallowGarbageCollection no_gc; DisallowGarbageCollection no_gc;
result.set_length(chars); result.set_length(chars);
result.set_raw_hash_field(hash_field); result.set_raw_hash_field(hash_field);
...@@ -913,34 +915,35 @@ Handle<String> Factory::NewInternalizedStringImpl(Handle<String> string, ...@@ -913,34 +915,35 @@ Handle<String> Factory::NewInternalizedStringImpl(Handle<String> string,
namespace { namespace {
MaybeHandle<Map> GetInternalizedStringMap(Factory* f, Handle<String> string) {
switch (string->map().instance_type()) {
case STRING_TYPE:
return f->internalized_string_map();
case ONE_BYTE_STRING_TYPE:
return f->one_byte_internalized_string_map();
case EXTERNAL_STRING_TYPE:
return f->external_internalized_string_map();
case EXTERNAL_ONE_BYTE_STRING_TYPE:
return f->external_one_byte_internalized_string_map();
default:
return MaybeHandle<Map>(); // No match found.
}
}
} // namespace } // namespace
MaybeHandle<Map> Factory::InternalizedStringMapForString( StringInternalizationStrategy Factory::ComputeInternalizationStrategyForString(
Handle<String> string) { Handle<String> string, MaybeHandle<Map>* internalized_map) {
// Do not internalize young strings: This allows us to ignore both string // Do not internalize young strings in-place: This allows us to ignore both
// table and stub cache on scavenges. // string table and stub cache on scavenges.
if (Heap::InYoungGeneration(*string)) return MaybeHandle<Map>(); if (Heap::InYoungGeneration(*string)) {
return GetInternalizedStringMap(this, string); return StringInternalizationStrategy::kCopy;
}
DCHECK_NOT_NULL(internalized_map);
DisallowGarbageCollection no_gc;
// This method may be called concurrently, so snapshot the map from the input
// string instead of the calling IsType methods on HeapObject, which would
// reload the map each time.
Map map = string->map();
*internalized_map = GetInPlaceInternalizedStringMap(map);
if (!internalized_map->is_null()) {
return StringInternalizationStrategy::kInPlace;
}
if (InstanceTypeChecker::IsInternalizedString(map.instance_type())) {
return StringInternalizationStrategy::kAlreadyInternalized;
}
return StringInternalizationStrategy::kCopy;
} }
template <class StringClass> template <class StringClass>
Handle<StringClass> Factory::InternalizeExternalString(Handle<String> string) { Handle<StringClass> Factory::InternalizeExternalString(Handle<String> string) {
Handle<Map> map = GetInternalizedStringMap(this, string).ToHandleChecked(); Handle<Map> map =
GetInPlaceInternalizedStringMap(string->map()).ToHandleChecked();
StringClass external_string = StringClass external_string =
StringClass::cast(New(map, AllocationType::kOld)); StringClass::cast(New(map, AllocationType::kOld));
DisallowGarbageCollection no_gc; DisallowGarbageCollection no_gc;
...@@ -3738,6 +3741,12 @@ bool Factory::EmptyStringRootIsInitialized() { ...@@ -3738,6 +3741,12 @@ bool Factory::EmptyStringRootIsInitialized() {
return isolate()->roots_table()[RootIndex::kempty_string] != kNullAddress; return isolate()->roots_table()[RootIndex::kempty_string] != kNullAddress;
} }
AllocationType Factory::AllocationTypeForInPlaceInternalizableString() {
return isolate()
->heap()
->allocation_type_for_in_place_internalizable_strings();
}
Handle<JSFunction> Factory::NewFunctionForTesting(Handle<String> name) { Handle<JSFunction> Factory::NewFunctionForTesting(Handle<String> name) {
Handle<SharedFunctionInfo> info = Handle<SharedFunctionInfo> info =
NewSharedFunctionInfoForBuiltin(name, Builtin::kIllegal); NewSharedFunctionInfoForBuiltin(name, Builtin::kIllegal);
......
...@@ -277,10 +277,19 @@ class V8_EXPORT_PRIVATE Factory : public FactoryBase<Factory> { ...@@ -277,10 +277,19 @@ class V8_EXPORT_PRIVATE Factory : public FactoryBase<Factory> {
Handle<String> NewInternalizedStringImpl(Handle<String> string, int chars, Handle<String> NewInternalizedStringImpl(Handle<String> string, int chars,
uint32_t hash_field); uint32_t hash_field);
// Compute the matching internalized string map for a string if possible. // Compute the internalization strategy for the input string.
// Empty handle is returned if string is in new space or not flattened. //
V8_WARN_UNUSED_RESULT MaybeHandle<Map> InternalizedStringMapForString( // Old-generation flat strings can be internalized by mutating their map
Handle<String> string); // return kInPlace, along with the matching internalized string map for string
// is stored in internalized_map.
//
// Internalized strings return kAlreadyInternalized.
//
// All other strings are internalized by flattening and copying and return
// kCopy.
V8_WARN_UNUSED_RESULT StringInternalizationStrategy
ComputeInternalizationStrategyForString(Handle<String> string,
MaybeHandle<Map>* internalized_map);
// Creates an internalized copy of an external string. |string| must be // Creates an internalized copy of an external string. |string| must be
// of type StringClass. // of type StringClass.
...@@ -969,6 +978,7 @@ class V8_EXPORT_PRIVATE Factory : public FactoryBase<Factory> { ...@@ -969,6 +978,7 @@ class V8_EXPORT_PRIVATE Factory : public FactoryBase<Factory> {
} }
bool CanAllocateInReadOnlySpace(); bool CanAllocateInReadOnlySpace();
bool EmptyStringRootIsInitialized(); bool EmptyStringRootIsInitialized();
AllocationType AllocationTypeForInPlaceInternalizableString();
void AddToScriptList(Handle<Script> shared); void AddToScriptList(Handle<Script> shared);
// ------ // ------
......
...@@ -215,6 +215,9 @@ Heap::Heap() ...@@ -215,6 +215,9 @@ Heap::Heap()
global_pretenuring_feedback_(kInitialFeedbackCapacity), global_pretenuring_feedback_(kInitialFeedbackCapacity),
safepoint_(std::make_unique<IsolateSafepoint>(this)), safepoint_(std::make_unique<IsolateSafepoint>(this)),
external_string_table_(this), external_string_table_(this),
allocation_type_for_in_place_internalizable_strings_(
isolate()->OwnsStringTable() ? AllocationType::kOld
: AllocationType::kSharedOld),
collection_barrier_(new CollectionBarrier(this)) { collection_barrier_(new CollectionBarrier(this)) {
// Ensure old_generation_size_ is a multiple of kPageSize. // Ensure old_generation_size_ is a multiple of kPageSize.
DCHECK_EQ(0, max_old_generation_size() & (Page::kPageSize - 1)); DCHECK_EQ(0, max_old_generation_size() & (Page::kPageSize - 1));
...@@ -225,6 +228,7 @@ Heap::Heap() ...@@ -225,6 +228,7 @@ Heap::Heap()
set_allocation_sites_list(Smi::zero()); set_allocation_sites_list(Smi::zero());
set_dirty_js_finalization_registries_list(Smi::zero()); set_dirty_js_finalization_registries_list(Smi::zero());
set_dirty_js_finalization_registries_list_tail(Smi::zero()); set_dirty_js_finalization_registries_list_tail(Smi::zero());
// Put a dummy entry in the remembered pages so we can find the list the // Put a dummy entry in the remembered pages so we can find the list the
// minidump even if there are no real unmapped pages. // minidump even if there are no real unmapped pages.
RememberUnmappedPage(kNullAddress, false); RememberUnmappedPage(kNullAddress, false);
...@@ -3882,6 +3886,15 @@ void Heap::VerifyObjectLayoutChange(HeapObject object, Map new_map) { ...@@ -3882,6 +3886,15 @@ void Heap::VerifyObjectLayoutChange(HeapObject object, Map new_map) {
// tagged fields are introduced. // tagged fields are introduced.
return; return;
} }
if (FLAG_shared_string_table && object.IsString() &&
InstanceTypeChecker::IsInternalizedString(new_map.instance_type())) {
// In-place internalization does not change a string's fields.
//
// When sharing the string table, the setting and re-setting of maps below
// can race when there are parallel internalization operations, causing
// DCHECKs to fail.
return;
}
// Check that the set of slots before and after the transition match. // Check that the set of slots before and after the transition match.
SlotCollectingVisitor old_visitor; SlotCollectingVisitor old_visitor;
object.IterateFast(&old_visitor); object.IterateFast(&old_visitor);
...@@ -4437,7 +4450,7 @@ void Heap::Verify() { ...@@ -4437,7 +4450,7 @@ void Heap::Verify() {
lo_space_->Verify(isolate()); lo_space_->Verify(isolate());
code_lo_space_->Verify(isolate()); code_lo_space_->Verify(isolate());
if (new_lo_space_) new_lo_space_->Verify(isolate()); if (new_lo_space_) new_lo_space_->Verify(isolate());
VerifyStringTable(isolate()); if (isolate()->OwnsStringTable()) VerifyStringTable(isolate());
} }
void Heap::VerifyReadOnlyHeap() { void Heap::VerifyReadOnlyHeap() {
...@@ -4685,9 +4698,14 @@ void Heap::IterateWeakRoots(RootVisitor* v, base::EnumSet<SkipRoot> options) { ...@@ -4685,9 +4698,14 @@ void Heap::IterateWeakRoots(RootVisitor* v, base::EnumSet<SkipRoot> options) {
DCHECK(!options.contains(SkipRoot::kWeak)); DCHECK(!options.contains(SkipRoot::kWeak));
if (!options.contains(SkipRoot::kOldGeneration) && if (!options.contains(SkipRoot::kOldGeneration) &&
!options.contains(SkipRoot::kUnserializable)) { !options.contains(SkipRoot::kUnserializable) &&
// Do not visit for serialization, since the string table is custom isolate()->OwnsStringTable()) {
// serialized. Also do not visit if we are skipping old generation. // Do not visit for the following reasons.
// - Serialization, since the string table is custom serialized.
// - If we are skipping old generation, since all internalized strings
// are in old space.
// - If the string table is shared and this is not the shared heap,
// since all internalized strings are in the shared heap.
isolate()->string_table()->IterateElements(v); isolate()->string_table()->IterateElements(v);
} }
v->Synchronize(VisitorSynchronization::kStringTable); v->Synchronize(VisitorSynchronization::kStringTable);
......
...@@ -927,6 +927,7 @@ class Heap { ...@@ -927,6 +927,7 @@ class Heap {
CodeRange* code_range() { return code_range_.get(); } CodeRange* code_range() { return code_range_.get(); }
LocalHeap* main_thread_local_heap() { return main_thread_local_heap_; } LocalHeap* main_thread_local_heap() { return main_thread_local_heap_; }
Heap* AsHeap() { return this; } Heap* AsHeap() { return this; }
// =========================================================================== // ===========================================================================
...@@ -2181,6 +2182,10 @@ class Heap { ...@@ -2181,6 +2182,10 @@ class Heap {
std::vector<WeakArrayList> FindAllRetainedMaps(); std::vector<WeakArrayList> FindAllRetainedMaps();
MemoryMeasurement* memory_measurement() { return memory_measurement_.get(); } MemoryMeasurement* memory_measurement() { return memory_measurement_.get(); }
AllocationType allocation_type_for_in_place_internalizable_strings() const {
return allocation_type_for_in_place_internalizable_strings_;
}
ExternalMemoryAccounting external_memory_; ExternalMemoryAccounting external_memory_;
// This can be calculated directly from a pointer to the heap; however, it is // This can be calculated directly from a pointer to the heap; however, it is
...@@ -2461,6 +2466,8 @@ class Heap { ...@@ -2461,6 +2466,8 @@ class Heap {
ExternalStringTable external_string_table_; ExternalStringTable external_string_table_;
const AllocationType allocation_type_for_in_place_internalizable_strings_;
base::Mutex relocation_mutex_; base::Mutex relocation_mutex_;
std::unique_ptr<CollectionBarrier> collection_barrier_; std::unique_ptr<CollectionBarrier> collection_barrier_;
...@@ -2549,6 +2556,7 @@ class Heap { ...@@ -2549,6 +2556,7 @@ class Heap {
// The allocator interface. // The allocator interface.
friend class Factory; friend class Factory;
friend class LocalFactory;
template <typename IsolateT> template <typename IsolateT>
friend class Deserializer; friend class Deserializer;
......
...@@ -29,6 +29,13 @@ READ_ONLY_ROOT_LIST(ROOT_ACCESSOR) ...@@ -29,6 +29,13 @@ READ_ONLY_ROOT_LIST(ROOT_ACCESSOR)
ACCESSOR_INFO_ROOT_LIST(ACCESSOR_INFO_ACCESSOR) ACCESSOR_INFO_ROOT_LIST(ACCESSOR_INFO_ACCESSOR)
#undef ACCESSOR_INFO_ACCESSOR #undef ACCESSOR_INFO_ACCESSOR
AllocationType LocalFactory::AllocationTypeForInPlaceInternalizableString() {
return isolate()
->heap()
->AsHeap()
->allocation_type_for_in_place_internalizable_strings();
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -68,6 +68,7 @@ class V8_EXPORT_PRIVATE LocalFactory : public FactoryBase<LocalFactory> { ...@@ -68,6 +68,7 @@ class V8_EXPORT_PRIVATE LocalFactory : public FactoryBase<LocalFactory> {
} }
inline bool CanAllocateInReadOnlySpace() { return false; } inline bool CanAllocateInReadOnlySpace() { return false; }
inline bool EmptyStringRootIsInitialized() { return true; } inline bool EmptyStringRootIsInitialized() { return true; }
inline AllocationType AllocationTypeForInPlaceInternalizableString();
// ------ // ------
void AddToScriptList(Handle<Script> shared); void AddToScriptList(Handle<Script> shared);
......
...@@ -267,6 +267,11 @@ class FullMarkingVerifier : public MarkingVerifier { ...@@ -267,6 +267,11 @@ class FullMarkingVerifier : public MarkingVerifier {
BasicMemoryChunk::FromHeapObject(heap_object)->InSharedHeap()) BasicMemoryChunk::FromHeapObject(heap_object)->InSharedHeap())
return; return;
if (!heap_->isolate()->OwnsStringTable() && heap_object.IsString() &&
!Heap::InYoungGeneration(heap_object)) {
CHECK(BasicMemoryChunk::FromHeapObject(heap_object)->InSharedHeap());
}
CHECK(marking_state_->IsBlackOrGrey(heap_object)); CHECK(marking_state_->IsBlackOrGrey(heap_object));
} }
...@@ -2183,7 +2188,7 @@ void MarkCompactCollector::MarkLiveObjects() { ...@@ -2183,7 +2188,7 @@ void MarkCompactCollector::MarkLiveObjects() {
void MarkCompactCollector::ClearNonLiveReferences() { void MarkCompactCollector::ClearNonLiveReferences() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR); TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR);
{ if (isolate()->OwnsStringTable()) {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_STRING_TABLE); TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_STRING_TABLE);
// Prune the string table removing all strings only pointed to by the // Prune the string table removing all strings only pointed to by the
......
...@@ -40,7 +40,9 @@ class HeapObject : public Object { ...@@ -40,7 +40,9 @@ class HeapObject : public Object {
// The no-write-barrier version. This is OK if the object is white and in // The no-write-barrier version. This is OK if the object is white and in
// new space, or if the value is an immortal immutable object, like the maps // new space, or if the value is an immortal immutable object, like the maps
// of primitive (non-JS) objects like strings, heap numbers etc. // of primitive (non-JS) objects like strings, heap numbers etc.
inline void set_map_no_write_barrier(Map value); inline void set_map_no_write_barrier(Map value,
RelaxedStoreTag = kRelaxedStore);
inline void set_map_no_write_barrier(Map value, ReleaseStoreTag);
// Access the map using acquire load and release store. // Access the map using acquire load and release store.
DECL_ACQUIRE_GETTER(map, Map) DECL_ACQUIRE_GETTER(map, Map)
...@@ -73,6 +75,12 @@ class HeapObject : public Object { ...@@ -73,6 +75,12 @@ class HeapObject : public Object {
// i::GetPtrComprCageBase(HeapObject) function which may return nullptr. // i::GetPtrComprCageBase(HeapObject) function which may return nullptr.
inline ReadOnlyRoots GetReadOnlyRoots(PtrComprCageBase cage_base) const; inline ReadOnlyRoots GetReadOnlyRoots(PtrComprCageBase cage_base) const;
// Whether the object is in the RO heap and the RO heap is shared, or in the
// writable shared heap.
V8_INLINE bool InSharedHeap() const;
V8_INLINE bool InSharedWritableHeap() const;
#define IS_TYPE_FUNCTION_DECL(Type) \ #define IS_TYPE_FUNCTION_DECL(Type) \
V8_INLINE bool Is##Type() const; \ V8_INLINE bool Is##Type() const; \
V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const; V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const;
......
...@@ -35,6 +35,14 @@ class Name : public TorqueGeneratedName<Name, PrimitiveHeapObject> { ...@@ -35,6 +35,14 @@ class Name : public TorqueGeneratedName<Name, PrimitiveHeapObject> {
inline uint32_t EnsureHash(); inline uint32_t EnsureHash();
inline uint32_t EnsureHash(const SharedStringAccessGuardIfNeeded&); inline uint32_t EnsureHash(const SharedStringAccessGuardIfNeeded&);
inline uint32_t raw_hash_field() const {
return RELAXED_READ_UINT32_FIELD(*this, kRawHashFieldOffset);
}
inline void set_raw_hash_field(uint32_t hash) {
RELAXED_WRITE_UINT32_FIELD(*this, kRawHashFieldOffset, hash);
}
// Returns a hash value used for the property table (same as Hash()), assumes // Returns a hash value used for the property table (same as Hash()), assumes
// the hash is already computed. // the hash is already computed.
inline uint32_t hash() const; inline uint32_t hash() const;
......
...@@ -72,6 +72,14 @@ bool Object::IsTaggedIndex() const { ...@@ -72,6 +72,14 @@ bool Object::IsTaggedIndex() const {
return IsSmi() && TaggedIndex::IsValid(TaggedIndex(ptr()).value()); return IsSmi() && TaggedIndex::IsValid(TaggedIndex(ptr()).value());
} }
bool Object::InSharedHeap() const {
return IsHeapObject() && HeapObject::cast(*this).InSharedHeap();
}
bool Object::InSharedWritableHeap() const {
return IsHeapObject() && HeapObject::cast(*this).InSharedWritableHeap();
}
#define IS_TYPE_FUNCTION_DEF(type_) \ #define IS_TYPE_FUNCTION_DEF(type_) \
bool Object::Is##type_() const { \ bool Object::Is##type_() const { \
return IsHeapObject() && HeapObject::cast(*this).Is##type_(); \ return IsHeapObject() && HeapObject::cast(*this).Is##type_(); \
...@@ -136,6 +144,15 @@ bool Object::IsNoSharedNameSentinel() const { ...@@ -136,6 +144,15 @@ bool Object::IsNoSharedNameSentinel() const {
return *this == SharedFunctionInfo::kNoSharedNameSentinel; return *this == SharedFunctionInfo::kNoSharedNameSentinel;
} }
bool HeapObject::InSharedHeap() const {
if (IsReadOnlyHeapObject(*this)) return V8_SHARED_RO_HEAP_BOOL;
return InSharedWritableHeap();
}
bool HeapObject::InSharedWritableHeap() const {
return BasicMemoryChunk::FromHeapObject(*this)->InSharedHeap();
}
bool HeapObject::IsNullOrUndefined(Isolate* isolate) const { bool HeapObject::IsNullOrUndefined(Isolate* isolate) const {
return IsNullOrUndefined(ReadOnlyRoots(isolate)); return IsNullOrUndefined(ReadOnlyRoots(isolate));
} }
...@@ -772,13 +789,22 @@ void HeapObject::set_map(Map value, ReleaseStoreTag tag) { ...@@ -772,13 +789,22 @@ void HeapObject::set_map(Map value, ReleaseStoreTag tag) {
} }
// Unsafe accessor omitting write barrier. // Unsafe accessor omitting write barrier.
void HeapObject::set_map_no_write_barrier(Map value) { void HeapObject::set_map_no_write_barrier(Map value, RelaxedStoreTag tag) {
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
if (FLAG_verify_heap && !value.is_null()) { if (FLAG_verify_heap && !value.is_null()) {
GetHeapFromWritableObject(*this)->VerifyObjectLayoutChange(*this, value); GetHeapFromWritableObject(*this)->VerifyObjectLayoutChange(*this, value);
} }
#endif #endif
set_map_word(MapWord::FromMap(value), kRelaxedStore); set_map_word(MapWord::FromMap(value), tag);
}
void HeapObject::set_map_no_write_barrier(Map value, ReleaseStoreTag tag) {
#ifdef VERIFY_HEAP
if (FLAG_verify_heap && !value.is_null()) {
GetHeapFromWritableObject(*this)->VerifyObjectLayoutChange(*this, value);
}
#endif
set_map_word(MapWord::FromMap(value), tag);
} }
void HeapObject::set_map_after_allocation(Map value, WriteBarrierMode mode) { void HeapObject::set_map_after_allocation(Map value, WriteBarrierMode mode) {
......
...@@ -286,6 +286,12 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> { ...@@ -286,6 +286,12 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
V8_INLINE bool IsTaggedIndex() const; V8_INLINE bool IsTaggedIndex() const;
// Whether the object is in the RO heap and the RO heap is shared, or in the
// writable shared heap.
V8_INLINE bool InSharedHeap() const;
V8_INLINE bool InSharedWritableHeap() const;
#define IS_TYPE_FUNCTION_DECL(Type) \ #define IS_TYPE_FUNCTION_DECL(Type) \
V8_INLINE bool Is##Type() const; \ V8_INLINE bool Is##Type() const; \
V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const; V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const;
......
...@@ -605,6 +605,7 @@ const Char* String::GetChars( ...@@ -605,6 +605,7 @@ const Char* String::GetChars(
Handle<String> String::Flatten(Isolate* isolate, Handle<String> string, Handle<String> String::Flatten(Isolate* isolate, Handle<String> string,
AllocationType allocation) { AllocationType allocation) {
if (string->IsConsString()) { if (string->IsConsString()) {
DCHECK(!string->InSharedHeap());
Handle<ConsString> cons = Handle<ConsString>::cast(string); Handle<ConsString> cons = Handle<ConsString>::cast(string);
if (cons->IsFlat()) { if (cons->IsFlat()) {
string = handle(cons->first(), isolate); string = handle(cons->first(), isolate);
......
...@@ -346,7 +346,10 @@ class InternalizedStringKey final : public StringTableKey { ...@@ -346,7 +346,10 @@ class InternalizedStringKey final : public StringTableKey {
public: public:
explicit InternalizedStringKey(Handle<String> string) explicit InternalizedStringKey(Handle<String> string)
: StringTableKey(0, string->length()), string_(string) { : StringTableKey(0, string->length()), string_(string) {
DCHECK(!string->IsInternalizedString()); // When sharing the string table, it's possible that another thread already
// internalized the key, in which case StringTable::LookupKey will perform a
// redundant lookup and return the already internalized copy.
DCHECK_IMPLIES(!FLAG_shared_string_table, !string->IsInternalizedString());
DCHECK(string->IsFlat()); DCHECK(string->IsFlat());
// Make sure hash_field is computed. // Make sure hash_field is computed.
string->EnsureHash(); string->EnsureHash();
...@@ -359,22 +362,43 @@ class InternalizedStringKey final : public StringTableKey { ...@@ -359,22 +362,43 @@ class InternalizedStringKey final : public StringTableKey {
} }
Handle<String> AsHandle(Isolate* isolate) { Handle<String> AsHandle(Isolate* isolate) {
// Internalize the string if possible. // Internalize the string in-place if possible.
MaybeHandle<Map> maybe_map = MaybeHandle<Map> maybe_internalized_map;
isolate->factory()->InternalizedStringMapForString(string_); StringInternalizationStrategy strategy =
Handle<Map> map; isolate->factory()->ComputeInternalizationStrategyForString(
if (maybe_map.ToHandle(&map)) { string_, &maybe_internalized_map);
string_->set_map_no_write_barrier(*map); switch (strategy) {
DCHECK(string_->IsInternalizedString()); case StringInternalizationStrategy::kCopy:
return string_; break;
case StringInternalizationStrategy::kInPlace:
// A relaxed write is sufficient here even with concurrent
// internalization. Though it is not synchronizing, a thread that does
// not see the relaxed write will wait on the string table write
// mutex. When that thread acquires that mutex, the ordering of the
// mutex's underlying memory access will force this map update to become
// visible to it.
string_->set_map_no_write_barrier(
*maybe_internalized_map.ToHandleChecked());
DCHECK(string_->IsInternalizedString());
return string_;
case StringInternalizationStrategy::kAlreadyInternalized:
// We can see already internalized strings here only when sharing the
// string table and allowing concurrent internalization.
DCHECK(FLAG_shared_string_table);
return string_;
} }
// External strings get special treatment, to avoid copying their // External strings get special treatment, to avoid copying their
// contents as long as they are not uncached. // contents as long as they are not uncached.
StringShape shape(*string_); StringShape shape(*string_);
if (shape.IsExternalOneByte() && !shape.IsUncachedExternal()) { if (shape.IsExternalOneByte() && !shape.IsUncachedExternal()) {
// TODO(syg): External strings not yet supported.
DCHECK(!FLAG_shared_string_table);
return isolate->factory() return isolate->factory()
->InternalizeExternalString<ExternalOneByteString>(string_); ->InternalizeExternalString<ExternalOneByteString>(string_);
} else if (shape.IsExternalTwoByte() && !shape.IsUncachedExternal()) { } else if (shape.IsExternalTwoByte() && !shape.IsUncachedExternal()) {
// TODO(syg): External strings not yet supported.
DCHECK(!FLAG_shared_string_table);
return isolate->factory() return isolate->factory()
->InternalizeExternalString<ExternalTwoByteString>(string_); ->InternalizeExternalString<ExternalTwoByteString>(string_);
} else { } else {
...@@ -390,6 +414,37 @@ class InternalizedStringKey final : public StringTableKey { ...@@ -390,6 +414,37 @@ class InternalizedStringKey final : public StringTableKey {
Handle<String> StringTable::LookupString(Isolate* isolate, Handle<String> StringTable::LookupString(Isolate* isolate,
Handle<String> string) { Handle<String> string) {
// When sharing the string table, internalization is allowed to be concurrent
// from multiple Isolates, assuming that:
//
// - All in-place internalizable strings (i.e. old-generation flat strings)
// and internalized strings are in the shared heap.
// - LookupKey supports concurrent access (see comment below).
//
// These assumptions guarantee the following properties:
//
// - String::Flatten is not threadsafe but is only called on non-shared
// strings, since non-flat strings are not shared.
//
// - String::ComputeAndSetHash is threadsafe on flat strings. This is safe
// because the characters are immutable and the same hash will be
// computed. The hash field is set with relaxed memory order. A thread that
// doesn't see the hash may do redundant work but will not be incorrect.
//
// - In-place internalizable strings do not incur a copy regardless of string
// table sharing. The map mutation is threadsafe even with relaxed memory
// order, because for concurrent table lookups, the "losing" thread will be
// correctly ordered by LookupKey's write mutex and see the updated map
// during the re-lookup.
//
// For lookup misses, the internalized string map is the same map in RO
// space regardless of which thread is doing the lookup.
//
// For lookup hits, String::MakeThin is not threadsafe but is currently
// only called on strings that are not accessible from multiple threads,
// even if in the shared heap. TODO(v8:12007) Make String::MakeThin
// threadsafe so old- generation flat strings can be shared across threads.
string = String::Flatten(isolate, string); string = String::Flatten(isolate, string);
if (string->IsInternalizedString()) return string; if (string->IsInternalizedString()) return string;
...@@ -449,7 +504,9 @@ Handle<String> StringTable::LookupKey(IsolateT* isolate, StringTableKey* key) { ...@@ -449,7 +504,9 @@ Handle<String> StringTable::LookupKey(IsolateT* isolate, StringTableKey* key) {
// case we'll have a false miss. // case we'll have a false miss.
InternalIndex entry = data->FindEntry(isolate, key, key->hash()); InternalIndex entry = data->FindEntry(isolate, key, key->hash());
if (entry.is_found()) { if (entry.is_found()) {
return handle(String::cast(data->Get(isolate, entry)), isolate); Handle<String> result(String::cast(data->Get(isolate, entry)), isolate);
DCHECK_IMPLIES(FLAG_shared_string_table, result->InSharedHeap());
return result;
} }
// No entry found, so adding new string. // No entry found, so adding new string.
...@@ -459,6 +516,7 @@ Handle<String> StringTable::LookupKey(IsolateT* isolate, StringTableKey* key) { ...@@ -459,6 +516,7 @@ Handle<String> StringTable::LookupKey(IsolateT* isolate, StringTableKey* key) {
// allocates the same string, the insert will fail, the lookup above will // allocates the same string, the insert will fail, the lookup above will
// succeed, and this string will be discarded. // succeed, and this string will be discarded.
Handle<String> new_string = key->AsHandle(isolate); Handle<String> new_string = key->AsHandle(isolate);
DCHECK_IMPLIES(FLAG_shared_string_table, new_string->InSharedHeap());
{ {
base::MutexGuard table_write_guard(&write_mutex_); base::MutexGuard table_write_guard(&write_mutex_);
......
...@@ -34,6 +34,7 @@ namespace internal { ...@@ -34,6 +34,7 @@ namespace internal {
Handle<String> String::SlowFlatten(Isolate* isolate, Handle<ConsString> cons, Handle<String> String::SlowFlatten(Isolate* isolate, Handle<ConsString> cons,
AllocationType allocation) { AllocationType allocation) {
DCHECK_NE(cons->second().length(), 0); DCHECK_NE(cons->second().length(), 0);
DCHECK(!cons->InSharedHeap());
// TurboFan can create cons strings with empty first parts. // TurboFan can create cons strings with empty first parts.
while (cons->first().length() == 0) { while (cons->first().length() == 0) {
...@@ -43,14 +44,17 @@ Handle<String> String::SlowFlatten(Isolate* isolate, Handle<ConsString> cons, ...@@ -43,14 +44,17 @@ Handle<String> String::SlowFlatten(Isolate* isolate, Handle<ConsString> cons,
if (cons->second().IsConsString() && !cons->second().IsFlat()) { if (cons->second().IsConsString() && !cons->second().IsFlat()) {
cons = handle(ConsString::cast(cons->second()), isolate); cons = handle(ConsString::cast(cons->second()), isolate);
} else { } else {
return String::Flatten(isolate, handle(cons->second(), isolate)); return String::Flatten(isolate, handle(cons->second(), isolate),
allocation);
} }
} }
DCHECK(AllowGarbageCollection::IsAllowed()); DCHECK(AllowGarbageCollection::IsAllowed());
int length = cons->length(); int length = cons->length();
allocation = if (allocation != AllocationType::kSharedOld) {
ObjectInYoungGeneration(*cons) ? allocation : AllocationType::kOld; allocation =
ObjectInYoungGeneration(*cons) ? allocation : AllocationType::kOld;
}
Handle<SeqString> result; Handle<SeqString> result;
if (cons->IsOneByteRepresentation()) { if (cons->IsOneByteRepresentation()) {
Handle<SeqOneByteString> flat = Handle<SeqOneByteString> flat =
...@@ -75,6 +79,31 @@ Handle<String> String::SlowFlatten(Isolate* isolate, Handle<ConsString> cons, ...@@ -75,6 +79,31 @@ Handle<String> String::SlowFlatten(Isolate* isolate, Handle<ConsString> cons,
return result; return result;
} }
Handle<String> String::SlowCopy(Isolate* isolate, Handle<SeqString> source,
AllocationType allocation) {
int length = source->length();
Handle<String> copy;
if (source->IsOneByteRepresentation()) {
copy = isolate->factory()
->NewRawOneByteString(length, allocation)
.ToHandleChecked();
DisallowGarbageCollection no_gc;
String::FlatContent content = source->GetFlatContent(no_gc);
CopyChars(SeqOneByteString::cast(*copy).GetChars(no_gc),
content.ToOneByteVector().begin(), length);
return copy;
} else {
copy = isolate->factory()
->NewRawTwoByteString(length, allocation)
.ToHandleChecked();
DisallowGarbageCollection no_gc;
String::FlatContent content = source->GetFlatContent(no_gc);
CopyChars(SeqTwoByteString::cast(*copy).GetChars(no_gc),
content.ToUC16Vector().begin(), length);
}
return copy;
}
namespace { namespace {
template <class StringClass> template <class StringClass>
...@@ -119,6 +148,10 @@ void String::MakeThin(IsolateT* isolate, String internalized) { ...@@ -119,6 +148,10 @@ void String::MakeThin(IsolateT* isolate, String internalized) {
DisallowGarbageCollection no_gc; DisallowGarbageCollection no_gc;
DCHECK_NE(*this, internalized); DCHECK_NE(*this, internalized);
DCHECK(internalized.IsInternalizedString()); DCHECK(internalized.IsInternalizedString());
// TODO(v8:12007): Make this method threadsafe.
DCHECK_IMPLIES(
InSharedWritableHeap(),
ThreadId::Current() == GetIsolateFromWritableObject(*this)->thread_id());
if (this->IsExternalString()) { if (this->IsExternalString()) {
MigrateExternalString(isolate->AsIsolate(), *this, internalized); MigrateExternalString(isolate->AsIsolate(), *this, internalized);
...@@ -1399,7 +1432,12 @@ uint32_t String::ComputeAndSetHash( ...@@ -1399,7 +1432,12 @@ uint32_t String::ComputeAndSetHash(
const SharedStringAccessGuardIfNeeded& access_guard) { const SharedStringAccessGuardIfNeeded& access_guard) {
DisallowGarbageCollection no_gc; DisallowGarbageCollection no_gc;
// Should only be called if hash code has not yet been computed. // Should only be called if hash code has not yet been computed.
DCHECK(!HasHashCode()); //
// If in-place internalizable strings are shared, there may be calls to
// ComputeAndSetHash in parallel. Since only flat strings are in-place
// internalizable and their contents do not change, the result hash is the
// same. The raw hash field is stored with relaxed ordering.
DCHECK_IMPLIES(!FLAG_shared_string_table, !HasHashCode());
// Store the hash code in the object. // Store the hash code in the object.
uint64_t seed = HashSeed(GetReadOnlyRoots()); uint64_t seed = HashSeed(GetReadOnlyRoots());
......
...@@ -555,6 +555,7 @@ class String : public TorqueGeneratedString<String, Name> { ...@@ -555,6 +555,7 @@ class String : public TorqueGeneratedString<String, Name> {
private: private:
friend class Name; friend class Name;
friend class StringTableInsertionKey; friend class StringTableInsertionKey;
friend class SharedStringTableInsertionKey;
friend class InternalizedStringKey; friend class InternalizedStringKey;
// Implementation of the Get() public methods. Do not use directly. // Implementation of the Get() public methods. Do not use directly.
...@@ -578,6 +579,9 @@ class String : public TorqueGeneratedString<String, Name> { ...@@ -578,6 +579,9 @@ class String : public TorqueGeneratedString<String, Name> {
V8_EXPORT_PRIVATE static Handle<String> SlowFlatten( V8_EXPORT_PRIVATE static Handle<String> SlowFlatten(
Isolate* isolate, Handle<ConsString> cons, AllocationType allocation); Isolate* isolate, Handle<ConsString> cons, AllocationType allocation);
static Handle<String> SlowCopy(Isolate* isolate, Handle<SeqString> source,
AllocationType allocation);
// Slow case of String::Equals. This implementation works on any strings // Slow case of String::Equals. This implementation works on any strings
// but it is most efficient on strings that are almost flat. // but it is most efficient on strings that are almost flat.
V8_EXPORT_PRIVATE bool SlowEquals(String other) const; V8_EXPORT_PRIVATE bool SlowEquals(String other) const;
......
...@@ -343,17 +343,25 @@ uint32_t ComputeRawHashField(IsolateT* isolate, String string) { ...@@ -343,17 +343,25 @@ uint32_t ComputeRawHashField(IsolateT* isolate, String string) {
} }
} // namespace } // namespace
StringTableInsertionKey::StringTableInsertionKey(Isolate* isolate, StringTableInsertionKey::StringTableInsertionKey(
Handle<String> string) Isolate* isolate, Handle<String> string,
DeserializingUserCodeOption deserializing_user_code)
: StringTableKey(ComputeRawHashField(isolate, *string), string->length()), : StringTableKey(ComputeRawHashField(isolate, *string), string->length()),
string_(string) { string_(string) {
#ifdef DEBUG
deserializing_user_code_ = deserializing_user_code;
#endif
DCHECK(string->IsInternalizedString()); DCHECK(string->IsInternalizedString());
} }
StringTableInsertionKey::StringTableInsertionKey(LocalIsolate* isolate, StringTableInsertionKey::StringTableInsertionKey(
Handle<String> string) LocalIsolate* isolate, Handle<String> string,
DeserializingUserCodeOption deserializing_user_code)
: StringTableKey(ComputeRawHashField(isolate, *string), string->length()), : StringTableKey(ComputeRawHashField(isolate, *string), string->length()),
string_(string) { string_(string) {
#ifdef DEBUG
deserializing_user_code_ = deserializing_user_code;
#endif
DCHECK(string->IsInternalizedString()); DCHECK(string->IsInternalizedString());
} }
...@@ -415,7 +423,9 @@ void Deserializer<IsolateT>::PostProcessNewObject(Handle<Map> map, ...@@ -415,7 +423,9 @@ void Deserializer<IsolateT>::PostProcessNewObject(Handle<Map> map,
// be dead, try immediately freeing it. // be dead, try immediately freeing it.
Handle<String> string = Handle<String>::cast(obj); Handle<String> string = Handle<String>::cast(obj);
StringTableInsertionKey key(isolate(), string); StringTableInsertionKey key(
isolate(), string,
DeserializingUserCodeOption::kIsDeserializingUserCode);
Handle<String> result = Handle<String> result =
isolate()->string_table()->LookupKey(isolate(), &key); isolate()->string_table()->LookupKey(isolate(), &key);
...@@ -562,6 +572,21 @@ Handle<HeapObject> Deserializer<IsolateT>::ReadObject() { ...@@ -562,6 +572,21 @@ Handle<HeapObject> Deserializer<IsolateT>::ReadObject() {
return ret; return ret;
} }
namespace {
AllocationType SpaceToAllocation(SnapshotSpace space) {
switch (space) {
case SnapshotSpace::kCode:
return AllocationType::kCode;
case SnapshotSpace::kMap:
return AllocationType::kMap;
case SnapshotSpace::kOld:
return AllocationType::kOld;
case SnapshotSpace::kReadOnlyHeap:
return AllocationType::kReadOnly;
}
}
} // namespace
template <typename IsolateT> template <typename IsolateT>
Handle<HeapObject> Deserializer<IsolateT>::ReadObject(SnapshotSpace space) { Handle<HeapObject> Deserializer<IsolateT>::ReadObject(SnapshotSpace space) {
const int size_in_tagged = source_.GetInt(); const int size_in_tagged = source_.GetInt();
...@@ -573,6 +598,24 @@ Handle<HeapObject> Deserializer<IsolateT>::ReadObject(SnapshotSpace space) { ...@@ -573,6 +598,24 @@ Handle<HeapObject> Deserializer<IsolateT>::ReadObject(SnapshotSpace space) {
DCHECK_NE(source()->Peek(), kRegisterPendingForwardRef); DCHECK_NE(source()->Peek(), kRegisterPendingForwardRef);
Handle<Map> map = Handle<Map>::cast(ReadObject()); Handle<Map> map = Handle<Map>::cast(ReadObject());
AllocationType allocation = SpaceToAllocation(space);
// When sharing a string table, all in-place internalizable strings except
// internalized strings are allocated in the shared heap. Internalized strings
// are allocated in the local heap as an optimization, because they need to be
// looked up in the shared string table to get the canonical copy anyway, and
// the shared allocation is needless synchronization on the concurrent
// allocator.
if (FLAG_shared_string_table &&
!isolate()->factory()->GetInPlaceInternalizedStringMap(*map).is_null() &&
(!InstanceTypeChecker::IsInternalizedString(map->instance_type()) ||
deserializing_user_code())) {
allocation = isolate()
->factory()
->RefineAllocationTypeForInPlaceInternalizableString(
allocation, *map);
}
// Filling an object's fields can cause GCs and heap walks, so this object has // Filling an object's fields can cause GCs and heap walks, so this object has
// to be in a 'sufficiently initialised' state by the time the next allocation // to be in a 'sufficiently initialised' state by the time the next allocation
// can happen. For this to be the case, the object is carefully deserialized // can happen. For this to be the case, the object is carefully deserialized
...@@ -593,7 +636,7 @@ Handle<HeapObject> Deserializer<IsolateT>::ReadObject(SnapshotSpace space) { ...@@ -593,7 +636,7 @@ Handle<HeapObject> Deserializer<IsolateT>::ReadObject(SnapshotSpace space) {
// - We ensure this is the case by DCHECKing on object allocation that the // - We ensure this is the case by DCHECKing on object allocation that the
// previously allocated object has a valid size (see `Allocate`). // previously allocated object has a valid size (see `Allocate`).
HeapObject raw_obj = HeapObject raw_obj =
Allocate(space, size_in_bytes, HeapObject::RequiredAlignment(*map)); Allocate(allocation, size_in_bytes, HeapObject::RequiredAlignment(*map));
raw_obj.set_map_after_allocation(*map); raw_obj.set_map_after_allocation(*map);
MemsetTagged(raw_obj.RawField(kTaggedSize), MemsetTagged(raw_obj.RawField(kTaggedSize),
Smi::uninitialized_deserialization_value(), size_in_tagged - 1); Smi::uninitialized_deserialization_value(), size_in_tagged - 1);
...@@ -651,7 +694,8 @@ Handle<HeapObject> Deserializer<IsolateT>::ReadMetaMap() { ...@@ -651,7 +694,8 @@ Handle<HeapObject> Deserializer<IsolateT>::ReadMetaMap() {
const int size_in_bytes = Map::kSize; const int size_in_bytes = Map::kSize;
const int size_in_tagged = size_in_bytes / kTaggedSize; const int size_in_tagged = size_in_bytes / kTaggedSize;
HeapObject raw_obj = Allocate(space, size_in_bytes, kWordAligned); HeapObject raw_obj =
Allocate(SpaceToAllocation(space), size_in_bytes, kWordAligned);
raw_obj.set_map_after_allocation(Map::unchecked_cast(raw_obj)); raw_obj.set_map_after_allocation(Map::unchecked_cast(raw_obj));
MemsetTagged(raw_obj.RawField(kTaggedSize), MemsetTagged(raw_obj.RawField(kTaggedSize),
Smi::uninitialized_deserialization_value(), size_in_tagged - 1); Smi::uninitialized_deserialization_value(), size_in_tagged - 1);
...@@ -1191,23 +1235,8 @@ Address Deserializer<IsolateT>::ReadExternalReferenceCase() { ...@@ -1191,23 +1235,8 @@ Address Deserializer<IsolateT>::ReadExternalReferenceCase() {
reference_id); reference_id);
} }
namespace {
AllocationType SpaceToType(SnapshotSpace space) {
switch (space) {
case SnapshotSpace::kCode:
return AllocationType::kCode;
case SnapshotSpace::kMap:
return AllocationType::kMap;
case SnapshotSpace::kOld:
return AllocationType::kOld;
case SnapshotSpace::kReadOnlyHeap:
return AllocationType::kReadOnly;
}
}
} // namespace
template <typename IsolateT> template <typename IsolateT>
HeapObject Deserializer<IsolateT>::Allocate(SnapshotSpace space, int size, HeapObject Deserializer<IsolateT>::Allocate(AllocationType allocation, int size,
AllocationAlignment alignment) { AllocationAlignment alignment) {
#ifdef DEBUG #ifdef DEBUG
if (!previous_allocation_obj_.is_null()) { if (!previous_allocation_obj_.is_null()) {
...@@ -1219,7 +1248,7 @@ HeapObject Deserializer<IsolateT>::Allocate(SnapshotSpace space, int size, ...@@ -1219,7 +1248,7 @@ HeapObject Deserializer<IsolateT>::Allocate(SnapshotSpace space, int size,
#endif #endif
HeapObject obj = HeapObject::FromAddress(isolate()->heap()->AllocateRawOrFail( HeapObject obj = HeapObject::FromAddress(isolate()->heap()->AllocateRawOrFail(
size, SpaceToType(space), AllocationOrigin::kRuntime, alignment)); size, allocation, AllocationOrigin::kRuntime, alignment));
#ifdef DEBUG #ifdef DEBUG
previous_allocation_obj_ = handle(obj, isolate()); previous_allocation_obj_ = handle(obj, isolate());
......
...@@ -198,7 +198,7 @@ class Deserializer : public SerializerDeserializer { ...@@ -198,7 +198,7 @@ class Deserializer : public SerializerDeserializer {
void PostProcessNewObject(Handle<Map> map, Handle<HeapObject> obj, void PostProcessNewObject(Handle<Map> map, Handle<HeapObject> obj,
SnapshotSpace space); SnapshotSpace space);
HeapObject Allocate(SnapshotSpace space, int size, HeapObject Allocate(AllocationType allocation, int size,
AllocationAlignment alignment); AllocationAlignment alignment);
// Cached current isolate. // Cached current isolate.
...@@ -264,23 +264,41 @@ extern template class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) ...@@ -264,23 +264,41 @@ extern template class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
extern template class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) extern template class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
Deserializer<LocalIsolate>; Deserializer<LocalIsolate>;
enum class DeserializingUserCodeOption {
kNotDeserializingUserCode,
kIsDeserializingUserCode
};
// Used to insert a deserialized internalized string into the string table. // Used to insert a deserialized internalized string into the string table.
class StringTableInsertionKey final : public StringTableKey { class StringTableInsertionKey final : public StringTableKey {
public: public:
explicit StringTableInsertionKey(Isolate* isolate, Handle<String> string); explicit StringTableInsertionKey(
explicit StringTableInsertionKey(LocalIsolate* isolate, Isolate* isolate, Handle<String> string,
Handle<String> string); DeserializingUserCodeOption deserializing_user_code);
explicit StringTableInsertionKey(
LocalIsolate* isolate, Handle<String> string,
DeserializingUserCodeOption deserializing_user_code);
template <typename IsolateT> template <typename IsolateT>
bool IsMatch(IsolateT* isolate, String string); bool IsMatch(IsolateT* isolate, String string);
template <typename IsolateT> V8_WARN_UNUSED_RESULT Handle<String> AsHandle(Isolate* isolate) {
V8_WARN_UNUSED_RESULT Handle<String> AsHandle(IsolateT* isolate) { // When sharing the string table, all string table lookups during snapshot
// deserialization are hits.
DCHECK(isolate->OwnsStringTable() ||
deserializing_user_code_ ==
DeserializingUserCodeOption::kIsDeserializingUserCode);
return string_;
}
V8_WARN_UNUSED_RESULT Handle<String> AsHandle(LocalIsolate* isolate) {
return string_; return string_;
} }
private: private:
Handle<String> string_; Handle<String> string_;
#ifdef DEBUG
DeserializingUserCodeOption deserializing_user_code_;
#endif
DISALLOW_GARBAGE_COLLECTION(no_gc) DISALLOW_GARBAGE_COLLECTION(no_gc)
}; };
......
...@@ -85,14 +85,24 @@ void StartupDeserializer::DeserializeStringTable() { ...@@ -85,14 +85,24 @@ void StartupDeserializer::DeserializeStringTable() {
// TODO(leszeks): Consider pre-sizing the string table. // TODO(leszeks): Consider pre-sizing the string table.
for (int i = 0; i < string_table_size; ++i) { for (int i = 0; i < string_table_size; ++i) {
Handle<String> string = Handle<String>::cast(ReadObject()); Handle<String> string = Handle<String>::cast(ReadObject());
StringTableInsertionKey key(isolate(), string); StringTableInsertionKey key(
isolate(), string,
DeserializingUserCodeOption::kNotDeserializingUserCode);
Handle<String> result = Handle<String> result =
isolate()->string_table()->LookupKey(isolate(), &key); isolate()->string_table()->LookupKey(isolate(), &key);
USE(result);
// This is startup, so there should be no duplicate entries in the string if (isolate()->OwnsStringTable()) {
// table, and the lookup should unconditionally add the given string. // When not sharing the string table, since this is startup, there should
DCHECK_EQ(*result, *string); // be no duplicate entries in the string table, and the lookup should
// unconditionally add the given string.
DCHECK_EQ(*result, *string);
USE(result);
} else if (*result != *string) {
DCHECK(!string->InSharedHeap());
DCHECK(result->InSharedHeap());
string->MakeThin(isolate(), *result);
string.PatchValue(*result);
}
} }
DCHECK_EQ(string_table_size, isolate()->string_table()->NumberOfElements()); DCHECK_EQ(string_table_size, isolate()->string_table()->NumberOfElements());
......
...@@ -269,6 +269,7 @@ v8_source_set("cctest_sources") { ...@@ -269,6 +269,7 @@ v8_source_set("cctest_sources") {
"test-roots.cc", "test-roots.cc",
"test-sampler-api.cc", "test-sampler-api.cc",
"test-serialize.cc", "test-serialize.cc",
"test-shared-strings.cc",
"test-smi-lexicographic-compare.cc", "test-smi-lexicographic-compare.cc",
"test-strings.cc", "test-strings.cc",
"test-strtod.cc", "test-strtod.cc",
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment