Commit 10756bea authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

[heap] Add shared spaces for --shared-space

This CL adds shared spaces for regular and large objects in the shared
space isolate. Spaces aren't used for allocation yet.

Bug: v8:13267
Change-Id: If508144530f4c9a1b3c0567570165955b64cc200
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3876824Reviewed-by: 's avatarJakob Linke <jgruber@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83178}
parent ae2ef7d2
......@@ -969,20 +969,22 @@ using WeakSlotCallbackWithHeap = bool (*)(Heap* heap, FullObjectSlot pointer);
// NOTE: SpaceIterator depends on AllocationSpace enumeration values being
// consecutive.
enum AllocationSpace {
RO_SPACE, // Immortal, immovable and immutable objects,
OLD_SPACE, // Old generation regular object space.
CODE_SPACE, // Old generation code object space, marked executable.
MAP_SPACE, // Old generation map object space, non-movable.
NEW_SPACE, // Young generation space for regular objects collected
// with Scavenger/MinorMC.
LO_SPACE, // Old generation large object space.
CODE_LO_SPACE, // Old generation large code object space.
NEW_LO_SPACE, // Young generation large object space.
RO_SPACE, // Immortal, immovable and immutable objects,
OLD_SPACE, // Old generation regular object space.
CODE_SPACE, // Old generation code object space, marked executable.
MAP_SPACE, // Old generation map object space, non-movable.
NEW_SPACE, // Young generation space for regular objects collected
// with Scavenger/MinorMC.
SHARED_SPACE, // Space shared between multiple isolates. Optional.
LO_SPACE, // Old generation large object space.
CODE_LO_SPACE, // Old generation large code object space.
NEW_LO_SPACE, // Young generation large object space.
SHARED_LO_SPACE, // Space shared between multiple isolates. Optional.
FIRST_SPACE = RO_SPACE,
LAST_SPACE = NEW_LO_SPACE,
LAST_SPACE = SHARED_LO_SPACE,
FIRST_MUTABLE_SPACE = OLD_SPACE,
LAST_MUTABLE_SPACE = NEW_LO_SPACE,
LAST_MUTABLE_SPACE = SHARED_LO_SPACE,
FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE,
LAST_GROWABLE_PAGED_SPACE = MAP_SPACE,
FIRST_SWEEPABLE_SPACE = OLD_SPACE,
......
......@@ -1224,6 +1224,8 @@ DEFINE_BOOL(global_gc_scheduling, true,
DEFINE_BOOL(gc_global, false, "always perform global GCs")
DEFINE_BOOL(shared_space, false,
"Implement shared heap as shared space on a main isolate.")
// Don't use a map space with --shared-space in order to avoid shared map space.
DEFINE_NEG_IMPLICATION(shared_space, use_map_space)
// TODO(12950): The next two flags only have an effect if
// V8_ENABLE_ALLOCATION_TIMEOUT is set, so we should only define them in that
......
......@@ -17,12 +17,16 @@ const char* BaseSpace::GetSpaceName(AllocationSpace space) {
return "map_space";
case CODE_SPACE:
return "code_space";
case SHARED_SPACE:
return "shared_space";
case LO_SPACE:
return "large_object_space";
case NEW_LO_SPACE:
return "new_large_object_space";
case CODE_LO_SPACE:
return "code_large_object_space";
case SHARED_LO_SPACE:
return "shared_lo_space";
case RO_SPACE:
return "read_only_space";
}
......
......@@ -31,7 +31,7 @@ void HeapAllocator::Setup() {
shared_map_allocator_ = heap_->shared_map_allocator_
? heap_->shared_map_allocator_.get()
: shared_old_allocator_;
shared_lo_space_ = heap_->shared_lo_space();
shared_lo_space_ = heap_->shared_isolate_lo_space_;
}
void HeapAllocator::SetReadOnlySpace(ReadOnlySpace* read_only_space) {
......
......@@ -485,6 +485,8 @@ bool Heap::IsPendingAllocationInternal(HeapObject object) {
return addr == large_space->pending_object();
}
case SHARED_SPACE:
case SHARED_LO_SPACE:
case RO_SPACE:
UNREACHABLE();
}
......
......@@ -4317,9 +4317,10 @@ bool Heap::ContainsCode(HeapObject value) const {
}
bool Heap::SharedHeapContains(HeapObject value) const {
if (shared_old_space_)
return shared_old_space_->Contains(value) ||
(shared_map_space_ && shared_map_space_->Contains(value));
if (shared_isolate_old_space_)
return shared_isolate_old_space_->Contains(value) ||
(shared_isolate_map_space_ &&
shared_isolate_map_space_->Contains(value));
return false;
}
......@@ -4350,12 +4351,16 @@ bool Heap::InSpace(HeapObject value, AllocationSpace space) const {
case MAP_SPACE:
DCHECK(map_space_);
return map_space_->Contains(value);
case SHARED_SPACE:
return shared_space_->Contains(value);
case LO_SPACE:
return lo_space_->Contains(value);
case CODE_LO_SPACE:
return code_lo_space_->Contains(value);
case NEW_LO_SPACE:
return new_lo_space_->Contains(value);
case SHARED_LO_SPACE:
return shared_lo_space_->Contains(value);
case RO_SPACE:
return ReadOnlyHeap::Contains(value);
}
......@@ -4380,12 +4385,16 @@ bool Heap::InSpaceSlow(Address addr, AllocationSpace space) const {
case MAP_SPACE:
DCHECK(map_space_);
return map_space_->ContainsSlow(addr);
case SHARED_SPACE:
return shared_space_->ContainsSlow(addr);
case LO_SPACE:
return lo_space_->ContainsSlow(addr);
case CODE_LO_SPACE:
return code_lo_space_->ContainsSlow(addr);
case NEW_LO_SPACE:
return new_lo_space_->ContainsSlow(addr);
case SHARED_LO_SPACE:
return shared_lo_space_->ContainsSlow(addr);
case RO_SPACE:
return read_only_space_->ContainsSlow(addr);
}
......@@ -4398,9 +4407,11 @@ bool Heap::IsValidAllocationSpace(AllocationSpace space) {
case OLD_SPACE:
case CODE_SPACE:
case MAP_SPACE:
case SHARED_SPACE:
case LO_SPACE:
case NEW_LO_SPACE:
case CODE_LO_SPACE:
case SHARED_LO_SPACE:
case RO_SPACE:
return true;
default:
......@@ -5438,8 +5449,15 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info,
if (v8_flags.use_map_space) {
space_[MAP_SPACE] = map_space_ = new MapSpace(this);
}
if (v8_flags.shared_space && isolate()->is_shared_space_isolate()) {
space_[SHARED_SPACE] = shared_space_ = new SharedSpace(this);
}
space_[LO_SPACE] = lo_space_ = new OldLargeObjectSpace(this);
space_[CODE_LO_SPACE] = code_lo_space_ = new CodeLargeObjectSpace(this);
if (v8_flags.shared_space && isolate()->is_shared_space_isolate()) {
space_[SHARED_LO_SPACE] = shared_lo_space_ =
new SharedLargeObjectSpace(this);
}
for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount);
i++) {
......@@ -5517,15 +5535,15 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info,
if (isolate()->shared_isolate()) {
Heap* shared_heap = isolate()->shared_isolate()->heap();
shared_old_space_ = shared_heap->old_space();
shared_lo_space_ = shared_heap->lo_space();
shared_old_allocator_.reset(
new ConcurrentAllocator(main_thread_local_heap(), shared_old_space_));
shared_isolate_old_space_ = shared_heap->old_space();
shared_isolate_lo_space_ = shared_heap->lo_space();
shared_old_allocator_.reset(new ConcurrentAllocator(
main_thread_local_heap(), shared_isolate_old_space_));
if (shared_heap->map_space()) {
shared_map_space_ = shared_heap->map_space();
shared_map_allocator_.reset(
new ConcurrentAllocator(main_thread_local_heap(), shared_map_space_));
shared_isolate_map_space_ = shared_heap->map_space();
shared_map_allocator_.reset(new ConcurrentAllocator(
main_thread_local_heap(), shared_isolate_map_space_));
}
}
......@@ -5834,10 +5852,10 @@ void Heap::TearDown() {
allocation_sites_to_pretenure_.reset();
shared_old_space_ = nullptr;
shared_isolate_old_space_ = nullptr;
shared_old_allocator_.reset();
shared_map_space_ = nullptr;
shared_isolate_map_space_ = nullptr;
shared_map_allocator_.reset();
{
......@@ -6771,9 +6789,12 @@ bool Heap::AllowedToBeMigrated(Map map, HeapObject obj, AllocationSpace dst) {
return dst == CODE_SPACE && type == CODE_TYPE;
case MAP_SPACE:
return dst == MAP_SPACE && type == MAP_TYPE;
case SHARED_SPACE:
return dst == SHARED_SPACE;
case LO_SPACE:
case CODE_LO_SPACE:
case NEW_LO_SPACE:
case SHARED_LO_SPACE:
case RO_SPACE:
return false;
}
......
......@@ -127,7 +127,9 @@ class SafepointScope;
class ScavengeJob;
class Scavenger;
class ScavengerCollector;
class SharedLargeObjectSpace;
class SharedReadOnlySpace;
class SharedSpace;
class Space;
class StressScavengeObserver;
class TimedHistogram;
......@@ -876,12 +878,11 @@ class Heap {
NewSpace* new_space() const { return new_space_; }
inline PagedNewSpace* paged_new_space() const;
OldSpace* old_space() const { return old_space_; }
OldSpace* shared_old_space() const { return shared_old_space_; }
OldSpace* shared_old_space() const { return shared_isolate_old_space_; }
CodeSpace* code_space() const { return code_space_; }
MapSpace* map_space() const { return map_space_; }
inline PagedSpace* space_for_maps();
OldLargeObjectSpace* lo_space() const { return lo_space_; }
OldLargeObjectSpace* shared_lo_space() const { return shared_lo_space_; }
CodeLargeObjectSpace* code_lo_space() const { return code_lo_space_; }
NewLargeObjectSpace* new_lo_space() const { return new_lo_space_; }
ReadOnlySpace* read_only_space() const { return read_only_space_; }
......@@ -2190,14 +2191,16 @@ class Heap {
OldSpace* old_space_ = nullptr;
CodeSpace* code_space_ = nullptr;
MapSpace* map_space_ = nullptr;
SharedSpace* shared_space_ = nullptr;
OldLargeObjectSpace* lo_space_ = nullptr;
CodeLargeObjectSpace* code_lo_space_ = nullptr;
NewLargeObjectSpace* new_lo_space_ = nullptr;
SharedLargeObjectSpace* shared_lo_space_ = nullptr;
ReadOnlySpace* read_only_space_ = nullptr;
OldSpace* shared_old_space_ = nullptr;
OldLargeObjectSpace* shared_lo_space_ = nullptr;
MapSpace* shared_map_space_ = nullptr;
OldSpace* shared_isolate_old_space_ = nullptr;
OldLargeObjectSpace* shared_isolate_lo_space_ = nullptr;
MapSpace* shared_isolate_map_space_ = nullptr;
std::unique_ptr<ConcurrentAllocator> shared_old_allocator_;
std::unique_ptr<ConcurrentAllocator> shared_map_allocator_;
......
......@@ -582,5 +582,15 @@ void CodeLargeObjectSpace::RemovePage(LargePage* page) {
OldLargeObjectSpace::RemovePage(page);
}
SharedLargeObjectSpace::SharedLargeObjectSpace(Heap* heap)
: OldLargeObjectSpace(heap, SHARED_LO_SPACE) {}
AllocationResult SharedLargeObjectSpace::AllocateRawBackground(
LocalHeap* local_heap, int object_size) {
DCHECK(!v8_flags.enable_third_party_heap);
return OldLargeObjectSpace::AllocateRawBackground(local_heap, object_size,
NOT_EXECUTABLE);
}
} // namespace internal
} // namespace v8
......@@ -190,6 +190,14 @@ class OldLargeObjectSpace : public LargeObjectSpace {
LocalHeap* local_heap, int object_size, Executability executable);
};
class SharedLargeObjectSpace : public OldLargeObjectSpace {
public:
explicit SharedLargeObjectSpace(Heap* heap);
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT AllocationResult
AllocateRawBackground(LocalHeap* local_heap, int object_size);
};
class NewLargeObjectSpace : public LargeObjectSpace {
public:
NewLargeObjectSpace(Heap* heap, size_t capacity);
......
......@@ -571,6 +571,32 @@ class MapSpace final : public PagedSpace {
LinearAllocationArea paged_allocation_info_;
};
// -----------------------------------------------------------------------------
// Shared space regular object space.
class SharedSpace final : public PagedSpace {
public:
// Creates an old space object. The constructor does not allocate pages
// from OS.
explicit SharedSpace(Heap* heap)
: PagedSpace(heap, SHARED_SPACE, NOT_EXECUTABLE,
FreeList::CreateFreeList(), allocation_info) {}
static bool IsAtPageStart(Address addr) {
return static_cast<intptr_t>(addr & kPageAlignmentMask) ==
MemoryChunkLayout::ObjectStartOffsetInDataPage();
}
size_t ExternalBackingStoreBytes(ExternalBackingStoreType type) const final {
if (type == ExternalBackingStoreType::kArrayBuffer) return 0;
DCHECK_EQ(type, ExternalBackingStoreType::kExternalString);
return external_backing_store_bytes_[type];
}
private:
LinearAllocationArea allocation_info;
};
// Iterates over the chunks (pages and large object pages) that can contain
// pointers to new space or to evacuation candidates.
class OldGenerationMemoryChunkIterator {
......
......@@ -788,6 +788,8 @@ SnapshotSpace GetSnapshotSpace(HeapObject object) {
return SnapshotSpace::kCode;
case MAP_SPACE:
return SnapshotSpace::kMap;
case SHARED_SPACE:
case SHARED_LO_SPACE:
case CODE_LO_SPACE:
case RO_SPACE:
UNREACHABLE();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment