Commit a57d9aab authored by Shu-yu Guo's avatar Shu-yu Guo Committed by V8 LUCI CQ

[heap] Promote shareable strings into shared heap

When --shared-string-table is passed, in-place-internalizable strings
are promoted into the shared old space to maintain the invariant that
in-place internalization can be done without copying.

Also some drive-by comment fixes and removal of unnecessary 'explicit'
on multi-parameter constructors.

Bug: v8:12007
Change-Id: I467d865e41934b1d5cdf85cbecc85c4befbfeb21
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3193591
Auto-Submit: Shu-yu Guo <syg@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Commit-Queue: Shu-yu Guo <syg@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77326}
parent c07c0e76
...@@ -833,9 +833,8 @@ enum class AllocationType : uint8_t { ...@@ -833,9 +833,8 @@ enum class AllocationType : uint8_t {
kCode, // Code object allocated in CODE_SPACE or CODE_LO_SPACE kCode, // Code object allocated in CODE_SPACE or CODE_LO_SPACE
kMap, // Map object allocated in MAP_SPACE kMap, // Map object allocated in MAP_SPACE
kReadOnly, // Object allocated in RO_SPACE kReadOnly, // Object allocated in RO_SPACE
kSharedOld, // Regular object allocated in SHARED_OLD_SPACE or kSharedOld, // Regular object allocated in OLD_SPACE in the shared heap
// SHARED_LO_SPACE kSharedMap, // Map object in MAP_SPACE in the shared heap
kSharedMap, // Map object in SHARED_MAP_SPACE
}; };
inline size_t hash_value(AllocationType kind) { inline size_t hash_value(AllocationType kind) {
......
...@@ -1789,6 +1789,41 @@ void FunctionTemplateRareData::FunctionTemplateRareDataVerify( ...@@ -1789,6 +1789,41 @@ void FunctionTemplateRareData::FunctionTemplateRareDataVerify(
c_function_overloads().IsUndefined(isolate)); c_function_overloads().IsUndefined(isolate));
} }
// Helper class for verifying the string table.
class StringTableVerifier : public RootVisitor {
public:
explicit StringTableVerifier(Isolate* isolate) : isolate_(isolate) {}
void VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) override {
UNREACHABLE();
}
void VisitRootPointers(Root root, const char* description,
OffHeapObjectSlot start,
OffHeapObjectSlot end) override {
// Visit all HeapObject pointers in [start, end).
for (OffHeapObjectSlot p = start; p < end; ++p) {
Object o = p.load(isolate_);
DCHECK(!HasWeakHeapObjectTag(o));
if (o.IsHeapObject()) {
HeapObject object = HeapObject::cast(o);
// Check that the string is actually internalized.
CHECK(object.IsInternalizedString());
}
}
}
private:
Isolate* isolate_;
};
void StringTable::VerifyIfOwnedBy(Isolate* isolate) {
DCHECK_EQ(isolate->string_table(), this);
if (!isolate->OwnsStringTable()) return;
StringTableVerifier verifier(isolate);
IterateElements(&verifier);
}
#endif // VERIFY_HEAP #endif // VERIFY_HEAP
#ifdef DEBUG #ifdef DEBUG
......
...@@ -927,28 +927,38 @@ FactoryBase<Impl>::NewFunctionTemplateRareData() { ...@@ -927,28 +927,38 @@ FactoryBase<Impl>::NewFunctionTemplateRareData() {
template <typename Impl> template <typename Impl>
MaybeHandle<Map> FactoryBase<Impl>::GetInPlaceInternalizedStringMap( MaybeHandle<Map> FactoryBase<Impl>::GetInPlaceInternalizedStringMap(
Map from_string_map) { Map from_string_map) {
switch (from_string_map.instance_type()) { InstanceType instance_type = from_string_map.instance_type();
MaybeHandle<Map> map;
switch (instance_type) {
case STRING_TYPE: case STRING_TYPE:
return read_only_roots().internalized_string_map_handle(); map = read_only_roots().internalized_string_map_handle();
break;
case ONE_BYTE_STRING_TYPE: case ONE_BYTE_STRING_TYPE:
return read_only_roots().one_byte_internalized_string_map_handle(); map = read_only_roots().one_byte_internalized_string_map_handle();
break;
case EXTERNAL_STRING_TYPE: case EXTERNAL_STRING_TYPE:
return read_only_roots().external_internalized_string_map_handle(); map = read_only_roots().external_internalized_string_map_handle();
break;
case EXTERNAL_ONE_BYTE_STRING_TYPE: case EXTERNAL_ONE_BYTE_STRING_TYPE:
return read_only_roots() map =
.external_one_byte_internalized_string_map_handle(); read_only_roots().external_one_byte_internalized_string_map_handle();
break;
default: default:
return MaybeHandle<Map>(); break;
} }
DCHECK_EQ(!map.is_null(), String::IsInPlaceInternalizable(instance_type));
return map;
} }
template <typename Impl> template <typename Impl>
AllocationType AllocationType
FactoryBase<Impl>::RefineAllocationTypeForInPlaceInternalizableString( FactoryBase<Impl>::RefineAllocationTypeForInPlaceInternalizableString(
AllocationType allocation, Map string_map) { AllocationType allocation, Map string_map) {
DCHECK( #ifdef DEBUG
InstanceTypeChecker::IsInternalizedString(string_map.instance_type()) || InstanceType instance_type = string_map.instance_type();
!GetInPlaceInternalizedStringMap(string_map).is_null()); DCHECK(InstanceTypeChecker::IsInternalizedString(instance_type) ||
String::IsInPlaceInternalizable(instance_type));
#endif
if (allocation != AllocationType::kOld) return allocation; if (allocation != AllocationType::kOld) return allocation;
return impl()->AllocationTypeForInPlaceInternalizableString(); return impl()->AllocationTypeForInPlaceInternalizableString();
} }
......
...@@ -781,6 +781,15 @@ AlwaysAllocateScope::~AlwaysAllocateScope() { ...@@ -781,6 +781,15 @@ AlwaysAllocateScope::~AlwaysAllocateScope() {
heap_->always_allocate_scope_count_--; heap_->always_allocate_scope_count_--;
} }
OptionalAlwaysAllocateScope::OptionalAlwaysAllocateScope(Heap* heap)
: heap_(heap) {
if (heap_) heap_->always_allocate_scope_count_++;
}
OptionalAlwaysAllocateScope::~OptionalAlwaysAllocateScope() {
if (heap_) heap_->always_allocate_scope_count_--;
}
AlwaysAllocateScopeForTesting::AlwaysAllocateScopeForTesting(Heap* heap) AlwaysAllocateScopeForTesting::AlwaysAllocateScopeForTesting(Heap* heap)
: scope_(heap) {} : scope_(heap) {}
......
...@@ -1342,10 +1342,7 @@ void Heap::GarbageCollectionEpilogue() { ...@@ -1342,10 +1342,7 @@ void Heap::GarbageCollectionEpilogue() {
isolate_->counters()->alive_after_last_gc()->Set( isolate_->counters()->alive_after_last_gc()->Set(
static_cast<int>(SizeOfObjects())); static_cast<int>(SizeOfObjects()));
isolate_->counters()->string_table_capacity()->Set( isolate_->string_table()->UpdateCountersIfOwnedBy(isolate_);
isolate()->string_table()->Capacity());
isolate_->counters()->number_of_symbols()->Set(
isolate()->string_table()->NumberOfElements());
if (CommittedMemory() > 0) { if (CommittedMemory() > 0) {
isolate_->counters()->external_fragmentation_total()->AddSample( isolate_->counters()->external_fragmentation_total()->AddSample(
...@@ -2071,41 +2068,6 @@ void Heap::CopyRange(HeapObject dst_object, const TSlot dst_slot, ...@@ -2071,41 +2068,6 @@ void Heap::CopyRange(HeapObject dst_object, const TSlot dst_slot,
WriteBarrierForRange(dst_object, dst_slot, dst_end); WriteBarrierForRange(dst_object, dst_slot, dst_end);
} }
#ifdef VERIFY_HEAP
// Helper class for verifying the string table.
class StringTableVerifier : public RootVisitor {
public:
explicit StringTableVerifier(Isolate* isolate) : isolate_(isolate) {}
void VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) override {
UNREACHABLE();
}
void VisitRootPointers(Root root, const char* description,
OffHeapObjectSlot start,
OffHeapObjectSlot end) override {
// Visit all HeapObject pointers in [start, end).
for (OffHeapObjectSlot p = start; p < end; ++p) {
Object o = p.load(isolate_);
DCHECK(!HasWeakHeapObjectTag(o));
if (o.IsHeapObject()) {
HeapObject object = HeapObject::cast(o);
// Check that the string is actually internalized.
CHECK(object.IsInternalizedString());
}
}
}
private:
Isolate* isolate_;
};
static void VerifyStringTable(Isolate* isolate) {
StringTableVerifier verifier(isolate);
isolate->string_table()->IterateElements(&verifier);
}
#endif // VERIFY_HEAP
void Heap::EnsureFromSpaceIsCommitted() { void Heap::EnsureFromSpaceIsCommitted() {
if (!new_space_) return; if (!new_space_) return;
if (new_space_->CommitFromSpaceIfNeeded()) return; if (new_space_->CommitFromSpaceIfNeeded()) return;
...@@ -2471,6 +2433,12 @@ void Heap::MarkCompact() { ...@@ -2471,6 +2433,12 @@ void Heap::MarkCompact() {
CodeSpaceMemoryModificationScope code_modifcation(this); CodeSpaceMemoryModificationScope code_modifcation(this);
// Disable soft allocation limits in the shared heap, if one exists, as
// promotions into the shared heap should always succeed.
OptionalAlwaysAllocateScope always_allocate_shared_heap(
isolate()->shared_isolate() ? isolate()->shared_isolate()->heap()
: nullptr);
UpdateOldGenerationAllocationCounter(); UpdateOldGenerationAllocationCounter();
uint64_t size_of_objects_before_gc = SizeOfObjects(); uint64_t size_of_objects_before_gc = SizeOfObjects();
...@@ -2511,6 +2479,11 @@ void Heap::MinorMarkCompact() { ...@@ -2511,6 +2479,11 @@ void Heap::MinorMarkCompact() {
TRACE_GC(tracer(), GCTracer::Scope::MINOR_MC); TRACE_GC(tracer(), GCTracer::Scope::MINOR_MC);
AlwaysAllocateScope always_allocate(this); AlwaysAllocateScope always_allocate(this);
// Disable soft allocation limits in the shared heap, if one exists, as
// promotions into the shared heap should always succeed.
OptionalAlwaysAllocateScope always_allocate_shared_heap(
isolate()->shared_isolate() ? isolate()->shared_isolate()->heap()
: nullptr);
IncrementalMarking::PauseBlackAllocationScope pause_black_allocation( IncrementalMarking::PauseBlackAllocationScope pause_black_allocation(
incremental_marking()); incremental_marking());
ConcurrentMarking::PauseScope pause_scope(concurrent_marking()); ConcurrentMarking::PauseScope pause_scope(concurrent_marking());
...@@ -2629,6 +2602,12 @@ void Heap::Scavenge() { ...@@ -2629,6 +2602,12 @@ void Heap::Scavenge() {
// trigger one during scavenge: scavenges allocation should always succeed. // trigger one during scavenge: scavenges allocation should always succeed.
AlwaysAllocateScope scope(this); AlwaysAllocateScope scope(this);
// Disable soft allocation limits in the shared heap, if one exists, as
// promotions into the shared heap should always succeed.
OptionalAlwaysAllocateScope always_allocate_shared_heap(
isolate()->shared_isolate() ? isolate()->shared_isolate()->heap()
: nullptr);
// Bump-pointer allocations done during scavenge are not real allocations. // Bump-pointer allocations done during scavenge are not real allocations.
// Pause the inline allocation steps. // Pause the inline allocation steps.
PauseAllocationObserversScope pause_observers(this); PauseAllocationObserversScope pause_observers(this);
...@@ -4429,7 +4408,7 @@ void Heap::Verify() { ...@@ -4429,7 +4408,7 @@ void Heap::Verify() {
lo_space_->Verify(isolate()); lo_space_->Verify(isolate());
code_lo_space_->Verify(isolate()); code_lo_space_->Verify(isolate());
if (new_lo_space_) new_lo_space_->Verify(isolate()); if (new_lo_space_) new_lo_space_->Verify(isolate());
if (isolate()->OwnsStringTable()) VerifyStringTable(isolate()); isolate()->string_table()->VerifyIfOwnedBy(isolate());
} }
void Heap::VerifyReadOnlyHeap() { void Heap::VerifyReadOnlyHeap() {
......
...@@ -2521,6 +2521,7 @@ class Heap { ...@@ -2521,6 +2521,7 @@ class Heap {
friend class ArrayBufferCollector; friend class ArrayBufferCollector;
friend class ArrayBufferSweeper; friend class ArrayBufferSweeper;
friend class ConcurrentMarking; friend class ConcurrentMarking;
friend class EvacuateVisitorBase;
friend class GCCallbacksScope; friend class GCCallbacksScope;
friend class GCTracer; friend class GCTracer;
friend class HeapObjectIterator; friend class HeapObjectIterator;
...@@ -2531,6 +2532,7 @@ class Heap { ...@@ -2531,6 +2532,7 @@ class Heap {
friend class LocalHeap; friend class LocalHeap;
friend class MarkingBarrier; friend class MarkingBarrier;
friend class OldLargeObjectSpace; friend class OldLargeObjectSpace;
friend class OptionalAlwaysAllocateScope;
template <typename ConcreteVisitor, typename MarkingState> template <typename ConcreteVisitor, typename MarkingState>
friend class MarkingVisitorBase; friend class MarkingVisitorBase;
friend class MarkCompactCollector; friend class MarkCompactCollector;
...@@ -2616,6 +2618,24 @@ class V8_NODISCARD AlwaysAllocateScope { ...@@ -2616,6 +2618,24 @@ class V8_NODISCARD AlwaysAllocateScope {
Heap* heap_; Heap* heap_;
}; };
// Like AlwaysAllocateScope if the heap argument to the constructor is
// non-null. No-op otherwise.
//
// This class exists because AlwaysAllocateScope doesn't compose with
// base::Optional, since supporting that composition requires making
// base::Optional a friend class, defeating the purpose of hiding its
// constructor.
class V8_NODISCARD OptionalAlwaysAllocateScope {
public:
inline ~OptionalAlwaysAllocateScope();
private:
friend class Heap;
explicit inline OptionalAlwaysAllocateScope(Heap* heap);
Heap* heap_;
};
class V8_NODISCARD AlwaysAllocateScopeForTesting { class V8_NODISCARD AlwaysAllocateScopeForTesting {
public: public:
explicit inline AlwaysAllocateScopeForTesting(Heap* heap); explicit inline AlwaysAllocateScopeForTesting(Heap* heap);
......
...@@ -53,6 +53,7 @@ ...@@ -53,6 +53,7 @@
#include "src/objects/smi.h" #include "src/objects/smi.h"
#include "src/objects/transitions-inl.h" #include "src/objects/transitions-inl.h"
#include "src/objects/visitors.h" #include "src/objects/visitors.h"
#include "src/snapshot/shared-heap-serializer.h"
#include "src/tasks/cancelable-task.h" #include "src/tasks/cancelable-task.h"
#include "src/tracing/tracing-category-observer.h" #include "src/tracing/tracing-category-observer.h"
#include "src/utils/utils-inl.h" #include "src/utils/utils-inl.h"
...@@ -267,9 +268,11 @@ class FullMarkingVerifier : public MarkingVerifier { ...@@ -267,9 +268,11 @@ class FullMarkingVerifier : public MarkingVerifier {
BasicMemoryChunk::FromHeapObject(heap_object)->InSharedHeap()) BasicMemoryChunk::FromHeapObject(heap_object)->InSharedHeap())
return; return;
if (!heap_->isolate()->OwnsStringTable() && heap_object.IsString() && if (!heap_->isolate()->OwnsStringTable() &&
!Heap::InYoungGeneration(heap_object)) { !Heap::InYoungGeneration(heap_object) &&
CHECK(BasicMemoryChunk::FromHeapObject(heap_object)->InSharedHeap()); SharedHeapSerializer::ShouldBeInSharedOldSpace(heap_->isolate(),
heap_object)) {
CHECK(heap_->SharedHeapContains(heap_object));
} }
CHECK(marking_state_->IsBlackOrGrey(heap_object)); CHECK(marking_state_->IsBlackOrGrey(heap_object));
...@@ -1435,6 +1438,12 @@ class EvacuateVisitorBase : public HeapObjectVisitor { ...@@ -1435,6 +1438,12 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
: heap_(heap), : heap_(heap),
local_allocator_(local_allocator), local_allocator_(local_allocator),
record_visitor_(record_visitor) { record_visitor_(record_visitor) {
if (FLAG_shared_string_table) {
if (Isolate* shared_isolate = heap->isolate()->shared_isolate()) {
shared_string_table_ = true;
shared_old_allocator_ = heap_->shared_old_allocator_.get();
}
}
migration_function_ = RawMigrateObject<MigrationMode::kFast>; migration_function_ = RawMigrateObject<MigrationMode::kFast>;
} }
...@@ -1444,9 +1453,19 @@ class EvacuateVisitorBase : public HeapObjectVisitor { ...@@ -1444,9 +1453,19 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
if (FLAG_stress_compaction && AbortCompactionForTesting(object)) if (FLAG_stress_compaction && AbortCompactionForTesting(object))
return false; return false;
#endif // DEBUG #endif // DEBUG
AllocationAlignment alignment = HeapObject::RequiredAlignment(object.map()); Map map = object.map();
AllocationResult allocation = local_allocator_->Allocate( AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
target_space, size, AllocationOrigin::kGC, alignment); AllocationResult allocation;
if (ShouldPromoteIntoSharedHeap(map)) {
DCHECK_EQ(target_space, OLD_SPACE);
DCHECK(Heap::InYoungGeneration(object));
DCHECK_NOT_NULL(shared_old_allocator_);
allocation = shared_old_allocator_->AllocateRaw(size, alignment,
AllocationOrigin::kGC);
} else {
allocation = local_allocator_->Allocate(target_space, size,
AllocationOrigin::kGC, alignment);
}
if (allocation.To(target_object)) { if (allocation.To(target_object)) {
MigrateObject(*target_object, object, size, target_space); MigrateObject(*target_object, object, size, target_space);
if (target_space == CODE_SPACE) if (target_space == CODE_SPACE)
...@@ -1458,6 +1477,13 @@ class EvacuateVisitorBase : public HeapObjectVisitor { ...@@ -1458,6 +1477,13 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
return false; return false;
} }
inline bool ShouldPromoteIntoSharedHeap(Map map) {
if (shared_string_table_) {
return String::IsInPlaceInternalizable(map.instance_type());
}
return false;
}
inline void ExecuteMigrationObservers(AllocationSpace dest, HeapObject src, inline void ExecuteMigrationObservers(AllocationSpace dest, HeapObject src,
HeapObject dst, int size) { HeapObject dst, int size) {
for (MigrationObserver* obs : observers_) { for (MigrationObserver* obs : observers_) {
...@@ -1491,9 +1517,11 @@ class EvacuateVisitorBase : public HeapObjectVisitor { ...@@ -1491,9 +1517,11 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
Heap* heap_; Heap* heap_;
EvacuationAllocator* local_allocator_; EvacuationAllocator* local_allocator_;
ConcurrentAllocator* shared_old_allocator_ = nullptr;
RecordMigratedSlotVisitor* record_visitor_; RecordMigratedSlotVisitor* record_visitor_;
std::vector<MigrationObserver*> observers_; std::vector<MigrationObserver*> observers_;
MigrateFunction migration_function_; MigrateFunction migration_function_;
bool shared_string_table_ = false;
}; };
class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase { class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase {
......
...@@ -569,7 +569,8 @@ base::Optional<std::pair<Address, size_t>> PagedSpace::RawRefillLabBackground( ...@@ -569,7 +569,8 @@ base::Optional<std::pair<Address, size_t>> PagedSpace::RawRefillLabBackground(
AllocationAlignment alignment, AllocationOrigin origin) { AllocationAlignment alignment, AllocationOrigin origin) {
DCHECK(!is_compaction_space()); DCHECK(!is_compaction_space());
DCHECK(identity() == OLD_SPACE || identity() == MAP_SPACE); DCHECK(identity() == OLD_SPACE || identity() == MAP_SPACE);
DCHECK_EQ(origin, AllocationOrigin::kRuntime); DCHECK(origin == AllocationOrigin::kRuntime ||
origin == AllocationOrigin::kGC);
base::Optional<std::pair<Address, size_t>> result = base::Optional<std::pair<Address, size_t>> result =
TryAllocationFromFreeListBackground(local_heap, min_size_in_bytes, TryAllocationFromFreeListBackground(local_heap, min_size_in_bytes,
......
...@@ -177,7 +177,8 @@ CopyAndForwardResult Scavenger::SemiSpaceCopyObject( ...@@ -177,7 +177,8 @@ CopyAndForwardResult Scavenger::SemiSpaceCopyObject(
return CopyAndForwardResult::FAILURE; return CopyAndForwardResult::FAILURE;
} }
template <typename THeapObjectSlot> template <typename THeapObjectSlot,
Scavenger::PromotionHeapChoice promotion_heap_choice>
CopyAndForwardResult Scavenger::PromoteObject(Map map, THeapObjectSlot slot, CopyAndForwardResult Scavenger::PromoteObject(Map map, THeapObjectSlot slot,
HeapObject object, HeapObject object,
int object_size, int object_size,
...@@ -187,8 +188,18 @@ CopyAndForwardResult Scavenger::PromoteObject(Map map, THeapObjectSlot slot, ...@@ -187,8 +188,18 @@ CopyAndForwardResult Scavenger::PromoteObject(Map map, THeapObjectSlot slot,
"Only FullHeapObjectSlot and HeapObjectSlot are expected here"); "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
DCHECK_GE(object_size, Heap::kMinObjectSizeInTaggedWords * kTaggedSize); DCHECK_GE(object_size, Heap::kMinObjectSizeInTaggedWords * kTaggedSize);
AllocationAlignment alignment = HeapObject::RequiredAlignment(map); AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
AllocationResult allocation = allocator_.Allocate( AllocationResult allocation;
OLD_SPACE, object_size, AllocationOrigin::kGC, alignment); switch (promotion_heap_choice) {
case kPromoteIntoLocalHeap:
allocation = allocator_.Allocate(OLD_SPACE, object_size,
AllocationOrigin::kGC, alignment);
break;
case kPromoteIntoSharedHeap:
DCHECK_NOT_NULL(shared_old_allocator_);
allocation = shared_old_allocator_->AllocateRaw(object_size, alignment,
AllocationOrigin::kGC);
break;
}
HeapObject target; HeapObject target;
if (allocation.To(&target)) { if (allocation.To(&target)) {
...@@ -243,7 +254,8 @@ bool Scavenger::HandleLargeObject(Map map, HeapObject object, int object_size, ...@@ -243,7 +254,8 @@ bool Scavenger::HandleLargeObject(Map map, HeapObject object, int object_size,
return false; return false;
} }
template <typename THeapObjectSlot> template <typename THeapObjectSlot,
Scavenger::PromotionHeapChoice promotion_heap_choice>
SlotCallbackResult Scavenger::EvacuateObjectDefault( SlotCallbackResult Scavenger::EvacuateObjectDefault(
Map map, THeapObjectSlot slot, HeapObject object, int object_size, Map map, THeapObjectSlot slot, HeapObject object, int object_size,
ObjectFields object_fields) { ObjectFields object_fields) {
...@@ -270,9 +282,10 @@ SlotCallbackResult Scavenger::EvacuateObjectDefault( ...@@ -270,9 +282,10 @@ SlotCallbackResult Scavenger::EvacuateObjectDefault(
} }
// We may want to promote this object if the object was already semi-space // We may want to promote this object if the object was already semi-space
// copied in a previes young generation GC or if the semi-space copy above // copied in a previous young generation GC or if the semi-space copy above
// failed. // failed.
result = PromoteObject(map, slot, object, object_size, object_fields); result = PromoteObject<THeapObjectSlot, promotion_heap_choice>(
map, slot, object, object_size, object_fields);
if (result != CopyAndForwardResult::FAILURE) { if (result != CopyAndForwardResult::FAILURE) {
return RememberedSetEntryNeeded(result); return RememberedSetEntryNeeded(result);
} }
...@@ -355,6 +368,19 @@ SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map map, ...@@ -355,6 +368,19 @@ SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map map,
ObjectFields::kMaybePointers); ObjectFields::kMaybePointers);
} }
template <typename THeapObjectSlot>
SlotCallbackResult Scavenger::EvacuateInPlaceInternalizableString(
Map map, THeapObjectSlot slot, String object, int object_size,
ObjectFields object_fields) {
DCHECK(String::IsInPlaceInternalizable(map.instance_type()));
DCHECK_EQ(object_fields, Map::ObjectFieldsFrom(map.visitor_id()));
if (shared_string_table_) {
return EvacuateObjectDefault<THeapObjectSlot, kPromoteIntoSharedHeap>(
map, slot, object, object_size, object_fields);
}
return EvacuateObjectDefault(map, slot, object, object_size, object_fields);
}
template <typename THeapObjectSlot> template <typename THeapObjectSlot>
SlotCallbackResult Scavenger::EvacuateObject(THeapObjectSlot slot, Map map, SlotCallbackResult Scavenger::EvacuateObject(THeapObjectSlot slot, Map map,
HeapObject source) { HeapObject source) {
...@@ -378,6 +404,19 @@ SlotCallbackResult Scavenger::EvacuateObject(THeapObjectSlot slot, Map map, ...@@ -378,6 +404,19 @@ SlotCallbackResult Scavenger::EvacuateObject(THeapObjectSlot slot, Map map,
// At the moment we don't allow weak pointers to cons strings. // At the moment we don't allow weak pointers to cons strings.
return EvacuateShortcutCandidate( return EvacuateShortcutCandidate(
map, slot, ConsString::unchecked_cast(source), size); map, slot, ConsString::unchecked_cast(source), size);
case kVisitSeqOneByteString:
case kVisitSeqTwoByteString:
DCHECK(String::IsInPlaceInternalizable(map.instance_type()));
return EvacuateInPlaceInternalizableString(
map, slot, String::unchecked_cast(source), size,
ObjectFields::kMaybePointers);
case kVisitDataObject: // External strings have kVisitDataObject.
if (String::IsInPlaceInternalizable(map.instance_type())) {
return EvacuateInPlaceInternalizableString(
map, slot, String::unchecked_cast(source), size,
ObjectFields::kDataOnly);
}
V8_FALLTHROUGH;
default: default:
return EvacuateObjectDefault(map, slot, source, size, return EvacuateObjectDefault(map, slot, source, size,
Map::ObjectFieldsFrom(visitor_id)); Map::ObjectFieldsFrom(visitor_id));
......
...@@ -546,9 +546,12 @@ Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging, ...@@ -546,9 +546,12 @@ Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging,
copied_size_(0), copied_size_(0),
promoted_size_(0), promoted_size_(0),
allocator_(heap, CompactionSpaceKind::kCompactionSpaceForScavenge), allocator_(heap, CompactionSpaceKind::kCompactionSpaceForScavenge),
shared_old_allocator_(heap_->shared_old_allocator_.get()),
is_logging_(is_logging), is_logging_(is_logging),
is_incremental_marking_(heap->incremental_marking()->IsMarking()), is_incremental_marking_(heap->incremental_marking()->IsMarking()),
is_compacting_(heap->incremental_marking()->IsCompacting()) {} is_compacting_(heap->incremental_marking()->IsCompacting()),
shared_string_table_(FLAG_shared_string_table &&
(heap->isolate()->shared_isolate() != nullptr)) {}
void Scavenger::IterateAndScavengePromotedObject(HeapObject target, Map map, void Scavenger::IterateAndScavengePromotedObject(HeapObject target, Map map,
int size) { int size) {
......
...@@ -154,7 +154,10 @@ class Scavenger { ...@@ -154,7 +154,10 @@ class Scavenger {
SemiSpaceCopyObject(Map map, THeapObjectSlot slot, HeapObject object, SemiSpaceCopyObject(Map map, THeapObjectSlot slot, HeapObject object,
int object_size, ObjectFields object_fields); int object_size, ObjectFields object_fields);
template <typename THeapObjectSlot> enum PromotionHeapChoice { kPromoteIntoLocalHeap, kPromoteIntoSharedHeap };
template <typename THeapObjectSlot,
PromotionHeapChoice promotion_heap_choice = kPromoteIntoLocalHeap>
V8_INLINE CopyAndForwardResult PromoteObject(Map map, THeapObjectSlot slot, V8_INLINE CopyAndForwardResult PromoteObject(Map map, THeapObjectSlot slot,
HeapObject object, HeapObject object,
int object_size, int object_size,
...@@ -168,7 +171,8 @@ class Scavenger { ...@@ -168,7 +171,8 @@ class Scavenger {
ObjectFields object_fields); ObjectFields object_fields);
// Different cases for object evacuation. // Different cases for object evacuation.
template <typename THeapObjectSlot> template <typename THeapObjectSlot,
PromotionHeapChoice promotion_heap_choice = kPromoteIntoLocalHeap>
V8_INLINE SlotCallbackResult V8_INLINE SlotCallbackResult
EvacuateObjectDefault(Map map, THeapObjectSlot slot, HeapObject object, EvacuateObjectDefault(Map map, THeapObjectSlot slot, HeapObject object,
int object_size, ObjectFields object_fields); int object_size, ObjectFields object_fields);
...@@ -184,6 +188,11 @@ class Scavenger { ...@@ -184,6 +188,11 @@ class Scavenger {
ConsString object, ConsString object,
int object_size); int object_size);
template <typename THeapObjectSlot>
inline SlotCallbackResult EvacuateInPlaceInternalizableString(
Map map, THeapObjectSlot slot, String string, int object_size,
ObjectFields object_fields);
void IterateAndScavengePromotedObject(HeapObject target, Map map, int size); void IterateAndScavengePromotedObject(HeapObject target, Map map, int size);
void RememberPromotedEphemeron(EphemeronHashTable table, int index); void RememberPromotedEphemeron(EphemeronHashTable table, int index);
...@@ -197,12 +206,14 @@ class Scavenger { ...@@ -197,12 +206,14 @@ class Scavenger {
size_t copied_size_; size_t copied_size_;
size_t promoted_size_; size_t promoted_size_;
EvacuationAllocator allocator_; EvacuationAllocator allocator_;
ConcurrentAllocator* shared_old_allocator_ = nullptr;
SurvivingNewLargeObjectsMap surviving_new_large_objects_; SurvivingNewLargeObjectsMap surviving_new_large_objects_;
EphemeronRememberedSet ephemeron_remembered_set_; EphemeronRememberedSet ephemeron_remembered_set_;
const bool is_logging_; const bool is_logging_;
const bool is_incremental_marking_; const bool is_incremental_marking_;
const bool is_compacting_; const bool is_compacting_;
const bool shared_string_table_;
friend class IterateAndScavengePromotedObjectsVisitor; friend class IterateAndScavengePromotedObjectsVisitor;
friend class RootScavengeVisitor; friend class RootScavengeVisitor;
......
...@@ -1263,10 +1263,21 @@ SubStringRange::iterator SubStringRange::end() { ...@@ -1263,10 +1263,21 @@ SubStringRange::iterator SubStringRange::end() {
} }
// static // static
bool String::IsInPlaceInternalizable(Isolate* isolate, String string) { bool String::IsInPlaceInternalizable(String string) {
return !isolate->factory() return IsInPlaceInternalizable(string.map().instance_type());
->GetInPlaceInternalizedStringMap(string.map()) }
.is_null();
// static
bool String::IsInPlaceInternalizable(InstanceType instance_type) {
switch (instance_type) {
case STRING_TYPE:
case ONE_BYTE_STRING_TYPE:
case EXTERNAL_STRING_TYPE:
case EXTERNAL_ONE_BYTE_STRING_TYPE:
return true;
default:
return false;
}
} }
} // namespace internal } // namespace internal
......
...@@ -736,5 +736,12 @@ void StringTable::NotifyElementsRemoved(int count) { ...@@ -736,5 +736,12 @@ void StringTable::NotifyElementsRemoved(int count) {
data_.load(std::memory_order_relaxed)->ElementsRemoved(count); data_.load(std::memory_order_relaxed)->ElementsRemoved(count);
} }
void StringTable::UpdateCountersIfOwnedBy(Isolate* isolate) {
DCHECK_EQ(isolate->string_table(), this);
if (!isolate->OwnsStringTable()) return;
isolate->counters()->string_table_capacity()->Set(Capacity());
isolate->counters()->number_of_symbols()->Set(NumberOfElements());
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -81,6 +81,9 @@ class V8_EXPORT_PRIVATE StringTable { ...@@ -81,6 +81,9 @@ class V8_EXPORT_PRIVATE StringTable {
void DropOldData(); void DropOldData();
void NotifyElementsRemoved(int count); void NotifyElementsRemoved(int count);
void VerifyIfOwnedBy(Isolate* isolate);
void UpdateCountersIfOwnedBy(Isolate* isolate);
private: private:
class Data; class Data;
......
...@@ -555,7 +555,8 @@ class String : public TorqueGeneratedString<String, Name> { ...@@ -555,7 +555,8 @@ class String : public TorqueGeneratedString<String, Name> {
// Returns true if string can be internalized without copying. In such cases // Returns true if string can be internalized without copying. In such cases
// the string is inserted into the string table and its map is changed to an // the string is inserted into the string table and its map is changed to an
// internalized equivalent. // internalized equivalent.
static inline bool IsInPlaceInternalizable(Isolate* isolate, String string); static inline bool IsInPlaceInternalizable(String string);
static inline bool IsInPlaceInternalizable(InstanceType instance_type);
private: private:
friend class Name; friend class Name;
......
...@@ -606,13 +606,15 @@ Handle<HeapObject> Deserializer<IsolateT>::ReadObject(SnapshotSpace space) { ...@@ -606,13 +606,15 @@ Handle<HeapObject> Deserializer<IsolateT>::ReadObject(SnapshotSpace space) {
// strings internalized strings are allocated in the shared heap. // strings internalized strings are allocated in the shared heap.
// //
// TODO(12007): When shipping, add a new SharedOld SnapshotSpace. // TODO(12007): When shipping, add a new SharedOld SnapshotSpace.
if (FLAG_shared_string_table && if (FLAG_shared_string_table) {
(!isolate()->factory()->GetInPlaceInternalizedStringMap(*map).is_null() || InstanceType instance_type = map->instance_type();
InstanceTypeChecker::IsInternalizedString(map->instance_type()))) { if (InstanceTypeChecker::IsInternalizedString(instance_type) ||
allocation = isolate() String::IsInPlaceInternalizable(instance_type)) {
->factory() allocation = isolate()
->RefineAllocationTypeForInPlaceInternalizableString( ->factory()
allocation, *map); ->RefineAllocationTypeForInPlaceInternalizableString(
allocation, *map);
}
} }
// Filling an object's fields can cause GCs and heap walks, so this object has // Filling an object's fields can cause GCs and heap walks, so this object has
......
...@@ -15,9 +15,10 @@ namespace internal { ...@@ -15,9 +15,10 @@ namespace internal {
// static // static
bool SharedHeapSerializer::ShouldBeInSharedOldSpace(Isolate* isolate, bool SharedHeapSerializer::ShouldBeInSharedOldSpace(Isolate* isolate,
HeapObject obj) { HeapObject obj) {
if (ReadOnlyHeap::Contains(obj)) return false;
if (obj.IsString()) { if (obj.IsString()) {
return obj.IsInternalizedString() || return obj.IsInternalizedString() ||
String::IsInPlaceInternalizable(isolate, String::cast(obj)); String::IsInPlaceInternalizable(String::cast(obj));
} }
return false; return false;
} }
......
...@@ -328,6 +328,87 @@ UNINITIALIZED_TEST(ConcurrentInternalization) { ...@@ -328,6 +328,87 @@ UNINITIALIZED_TEST(ConcurrentInternalization) {
} }
} }
UNINITIALIZED_TEST(PromotionMarkCompact) {
if (FLAG_single_generation) return;
if (!ReadOnlyHeap::IsReadOnlySpaceShared()) return;
if (!COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL) return;
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
FLAG_shared_string_table = true;
MultiClientIsolateTest test;
v8::Isolate* isolate = test.NewClientIsolate();
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
Factory* factory = i_isolate->factory();
Heap* heap = i_isolate->heap();
// Heap* shared_heap = test.i_shared_isolate()->heap();
const char raw_one_byte[] = "foo";
{
HandleScope scope(i_isolate);
// heap::SealCurrentObjects(heap);
// heap::SealCurrentObjects(shared_heap);
Handle<String> one_byte_seq = factory->NewStringFromAsciiChecked(
raw_one_byte, AllocationType::kYoung);
CHECK(String::IsInPlaceInternalizable(*one_byte_seq));
CHECK(heap->InSpace(*one_byte_seq, NEW_SPACE));
for (int i = 0; i < 2; i++) {
heap->CollectAllGarbage(Heap::kNoGCFlags,
GarbageCollectionReason::kTesting);
}
// In-place-internalizable strings are promoted into the shared heap when
// sharing.
CHECK(!heap->Contains(*one_byte_seq));
CHECK(heap->SharedHeapContains(*one_byte_seq));
}
}
UNINITIALIZED_TEST(PromotionScavenge) {
if (FLAG_single_generation) return;
if (!ReadOnlyHeap::IsReadOnlySpaceShared()) return;
if (!COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL) return;
FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
FLAG_shared_string_table = true;
MultiClientIsolateTest test;
v8::Isolate* isolate = test.NewClientIsolate();
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
Factory* factory = i_isolate->factory();
Heap* heap = i_isolate->heap();
// Heap* shared_heap = test.i_shared_isolate()->heap();
const char raw_one_byte[] = "foo";
{
HandleScope scope(i_isolate);
// heap::SealCurrentObjects(heap);
// heap::SealCurrentObjects(shared_heap);
Handle<String> one_byte_seq = factory->NewStringFromAsciiChecked(
raw_one_byte, AllocationType::kYoung);
CHECK(String::IsInPlaceInternalizable(*one_byte_seq));
CHECK(heap->InSpace(*one_byte_seq, NEW_SPACE));
for (int i = 0; i < 2; i++) {
heap->CollectGarbage(NEW_SPACE, GarbageCollectionReason::kTesting);
}
// In-place-internalizable strings are promoted into the shared heap when
// sharing.
CHECK(!heap->Contains(*one_byte_seq));
CHECK(heap->SharedHeapContains(*one_byte_seq));
}
}
} // namespace test_shared_strings } // namespace test_shared_strings
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment