Commit 353d1009 authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

Reland "[heap] Add shared spaces for --shared-space"

This is a reland of commit 10756bea

The reland is mostly unchanged except for changing the name for
the shared large object space. The name should use the same style
as other large object spaces.

The main reason for reverting was fixed in
https://crrev.com/c/3894303.

Original change's description:
> [heap] Add shared spaces for --shared-space
>
> This CL adds shared spaces for regular and large objects in the shared
> space isolate. Spaces aren't used for allocation yet.
>
> Bug: v8:13267
> Change-Id: If508144530f4c9a1b3c0567570165955b64cc200
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3876824
> Reviewed-by: Jakob Linke <jgruber@chromium.org>
> Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
> Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
> Cr-Commit-Position: refs/heads/main@{#83178}

Bug: v8:13267
Change-Id: I3de586c1e141fb5f7693e2d6972db251b4a4f434
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3892950Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarJakob Linke <jgruber@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83252}
parent c0ba94db
...@@ -969,20 +969,22 @@ using WeakSlotCallbackWithHeap = bool (*)(Heap* heap, FullObjectSlot pointer); ...@@ -969,20 +969,22 @@ using WeakSlotCallbackWithHeap = bool (*)(Heap* heap, FullObjectSlot pointer);
// NOTE: SpaceIterator depends on AllocationSpace enumeration values being // NOTE: SpaceIterator depends on AllocationSpace enumeration values being
// consecutive. // consecutive.
enum AllocationSpace { enum AllocationSpace {
RO_SPACE, // Immortal, immovable and immutable objects, RO_SPACE, // Immortal, immovable and immutable objects,
OLD_SPACE, // Old generation regular object space. OLD_SPACE, // Old generation regular object space.
CODE_SPACE, // Old generation code object space, marked executable. CODE_SPACE, // Old generation code object space, marked executable.
MAP_SPACE, // Old generation map object space, non-movable. MAP_SPACE, // Old generation map object space, non-movable.
NEW_SPACE, // Young generation space for regular objects collected NEW_SPACE, // Young generation space for regular objects collected
// with Scavenger/MinorMC. // with Scavenger/MinorMC.
LO_SPACE, // Old generation large object space. SHARED_SPACE, // Space shared between multiple isolates. Optional.
CODE_LO_SPACE, // Old generation large code object space. LO_SPACE, // Old generation large object space.
NEW_LO_SPACE, // Young generation large object space. CODE_LO_SPACE, // Old generation large code object space.
NEW_LO_SPACE, // Young generation large object space.
SHARED_LO_SPACE, // Space shared between multiple isolates. Optional.
FIRST_SPACE = RO_SPACE, FIRST_SPACE = RO_SPACE,
LAST_SPACE = NEW_LO_SPACE, LAST_SPACE = SHARED_LO_SPACE,
FIRST_MUTABLE_SPACE = OLD_SPACE, FIRST_MUTABLE_SPACE = OLD_SPACE,
LAST_MUTABLE_SPACE = NEW_LO_SPACE, LAST_MUTABLE_SPACE = SHARED_LO_SPACE,
FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE, FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE,
LAST_GROWABLE_PAGED_SPACE = MAP_SPACE, LAST_GROWABLE_PAGED_SPACE = MAP_SPACE,
FIRST_SWEEPABLE_SPACE = OLD_SPACE, FIRST_SWEEPABLE_SPACE = OLD_SPACE,
......
...@@ -1220,6 +1220,8 @@ DEFINE_BOOL(global_gc_scheduling, true, ...@@ -1220,6 +1220,8 @@ DEFINE_BOOL(global_gc_scheduling, true,
DEFINE_BOOL(gc_global, false, "always perform global GCs") DEFINE_BOOL(gc_global, false, "always perform global GCs")
DEFINE_BOOL(shared_space, false, DEFINE_BOOL(shared_space, false,
"Implement shared heap as shared space on a main isolate.") "Implement shared heap as shared space on a main isolate.")
// Don't use a map space with --shared-space in order to avoid shared map space.
DEFINE_NEG_IMPLICATION(shared_space, use_map_space)
// TODO(12950): The next two flags only have an effect if // TODO(12950): The next two flags only have an effect if
// V8_ENABLE_ALLOCATION_TIMEOUT is set, so we should only define them in that // V8_ENABLE_ALLOCATION_TIMEOUT is set, so we should only define them in that
......
...@@ -17,12 +17,16 @@ const char* BaseSpace::GetSpaceName(AllocationSpace space) { ...@@ -17,12 +17,16 @@ const char* BaseSpace::GetSpaceName(AllocationSpace space) {
return "map_space"; return "map_space";
case CODE_SPACE: case CODE_SPACE:
return "code_space"; return "code_space";
case SHARED_SPACE:
return "shared_space";
case LO_SPACE: case LO_SPACE:
return "large_object_space"; return "large_object_space";
case NEW_LO_SPACE: case NEW_LO_SPACE:
return "new_large_object_space"; return "new_large_object_space";
case CODE_LO_SPACE: case CODE_LO_SPACE:
return "code_large_object_space"; return "code_large_object_space";
case SHARED_LO_SPACE:
return "shared_large_object_space";
case RO_SPACE: case RO_SPACE:
return "read_only_space"; return "read_only_space";
} }
......
...@@ -31,7 +31,7 @@ void HeapAllocator::Setup() { ...@@ -31,7 +31,7 @@ void HeapAllocator::Setup() {
shared_map_allocator_ = heap_->shared_map_allocator_ shared_map_allocator_ = heap_->shared_map_allocator_
? heap_->shared_map_allocator_.get() ? heap_->shared_map_allocator_.get()
: shared_old_allocator_; : shared_old_allocator_;
shared_lo_space_ = heap_->shared_lo_space(); shared_lo_space_ = heap_->shared_isolate_lo_space_;
} }
void HeapAllocator::SetReadOnlySpace(ReadOnlySpace* read_only_space) { void HeapAllocator::SetReadOnlySpace(ReadOnlySpace* read_only_space) {
......
...@@ -485,6 +485,8 @@ bool Heap::IsPendingAllocationInternal(HeapObject object) { ...@@ -485,6 +485,8 @@ bool Heap::IsPendingAllocationInternal(HeapObject object) {
return addr == large_space->pending_object(); return addr == large_space->pending_object();
} }
case SHARED_SPACE:
case SHARED_LO_SPACE:
case RO_SPACE: case RO_SPACE:
UNREACHABLE(); UNREACHABLE();
} }
......
...@@ -4327,9 +4327,10 @@ bool Heap::ContainsCode(HeapObject value) const { ...@@ -4327,9 +4327,10 @@ bool Heap::ContainsCode(HeapObject value) const {
} }
bool Heap::SharedHeapContains(HeapObject value) const { bool Heap::SharedHeapContains(HeapObject value) const {
if (shared_old_space_) if (shared_isolate_old_space_)
return shared_old_space_->Contains(value) || return shared_isolate_old_space_->Contains(value) ||
(shared_map_space_ && shared_map_space_->Contains(value)); (shared_isolate_map_space_ &&
shared_isolate_map_space_->Contains(value));
return false; return false;
} }
...@@ -4360,12 +4361,16 @@ bool Heap::InSpace(HeapObject value, AllocationSpace space) const { ...@@ -4360,12 +4361,16 @@ bool Heap::InSpace(HeapObject value, AllocationSpace space) const {
case MAP_SPACE: case MAP_SPACE:
DCHECK(map_space_); DCHECK(map_space_);
return map_space_->Contains(value); return map_space_->Contains(value);
case SHARED_SPACE:
return shared_space_->Contains(value);
case LO_SPACE: case LO_SPACE:
return lo_space_->Contains(value); return lo_space_->Contains(value);
case CODE_LO_SPACE: case CODE_LO_SPACE:
return code_lo_space_->Contains(value); return code_lo_space_->Contains(value);
case NEW_LO_SPACE: case NEW_LO_SPACE:
return new_lo_space_->Contains(value); return new_lo_space_->Contains(value);
case SHARED_LO_SPACE:
return shared_lo_space_->Contains(value);
case RO_SPACE: case RO_SPACE:
return ReadOnlyHeap::Contains(value); return ReadOnlyHeap::Contains(value);
} }
...@@ -4390,12 +4395,16 @@ bool Heap::InSpaceSlow(Address addr, AllocationSpace space) const { ...@@ -4390,12 +4395,16 @@ bool Heap::InSpaceSlow(Address addr, AllocationSpace space) const {
case MAP_SPACE: case MAP_SPACE:
DCHECK(map_space_); DCHECK(map_space_);
return map_space_->ContainsSlow(addr); return map_space_->ContainsSlow(addr);
case SHARED_SPACE:
return shared_space_->ContainsSlow(addr);
case LO_SPACE: case LO_SPACE:
return lo_space_->ContainsSlow(addr); return lo_space_->ContainsSlow(addr);
case CODE_LO_SPACE: case CODE_LO_SPACE:
return code_lo_space_->ContainsSlow(addr); return code_lo_space_->ContainsSlow(addr);
case NEW_LO_SPACE: case NEW_LO_SPACE:
return new_lo_space_->ContainsSlow(addr); return new_lo_space_->ContainsSlow(addr);
case SHARED_LO_SPACE:
return shared_lo_space_->ContainsSlow(addr);
case RO_SPACE: case RO_SPACE:
return read_only_space_->ContainsSlow(addr); return read_only_space_->ContainsSlow(addr);
} }
...@@ -4408,9 +4417,11 @@ bool Heap::IsValidAllocationSpace(AllocationSpace space) { ...@@ -4408,9 +4417,11 @@ bool Heap::IsValidAllocationSpace(AllocationSpace space) {
case OLD_SPACE: case OLD_SPACE:
case CODE_SPACE: case CODE_SPACE:
case MAP_SPACE: case MAP_SPACE:
case SHARED_SPACE:
case LO_SPACE: case LO_SPACE:
case NEW_LO_SPACE: case NEW_LO_SPACE:
case CODE_LO_SPACE: case CODE_LO_SPACE:
case SHARED_LO_SPACE:
case RO_SPACE: case RO_SPACE:
return true; return true;
default: default:
...@@ -5448,8 +5459,15 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info, ...@@ -5448,8 +5459,15 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info,
if (v8_flags.use_map_space) { if (v8_flags.use_map_space) {
space_[MAP_SPACE] = map_space_ = new MapSpace(this); space_[MAP_SPACE] = map_space_ = new MapSpace(this);
} }
if (v8_flags.shared_space && isolate()->is_shared_space_isolate()) {
space_[SHARED_SPACE] = shared_space_ = new SharedSpace(this);
}
space_[LO_SPACE] = lo_space_ = new OldLargeObjectSpace(this); space_[LO_SPACE] = lo_space_ = new OldLargeObjectSpace(this);
space_[CODE_LO_SPACE] = code_lo_space_ = new CodeLargeObjectSpace(this); space_[CODE_LO_SPACE] = code_lo_space_ = new CodeLargeObjectSpace(this);
if (v8_flags.shared_space && isolate()->is_shared_space_isolate()) {
space_[SHARED_LO_SPACE] = shared_lo_space_ =
new SharedLargeObjectSpace(this);
}
for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount); for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount);
i++) { i++) {
...@@ -5527,15 +5545,15 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info, ...@@ -5527,15 +5545,15 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info,
if (isolate()->shared_isolate()) { if (isolate()->shared_isolate()) {
Heap* shared_heap = isolate()->shared_isolate()->heap(); Heap* shared_heap = isolate()->shared_isolate()->heap();
shared_old_space_ = shared_heap->old_space(); shared_isolate_old_space_ = shared_heap->old_space();
shared_lo_space_ = shared_heap->lo_space(); shared_isolate_lo_space_ = shared_heap->lo_space();
shared_old_allocator_.reset( shared_old_allocator_.reset(new ConcurrentAllocator(
new ConcurrentAllocator(main_thread_local_heap(), shared_old_space_)); main_thread_local_heap(), shared_isolate_old_space_));
if (shared_heap->map_space()) { if (shared_heap->map_space()) {
shared_map_space_ = shared_heap->map_space(); shared_isolate_map_space_ = shared_heap->map_space();
shared_map_allocator_.reset( shared_map_allocator_.reset(new ConcurrentAllocator(
new ConcurrentAllocator(main_thread_local_heap(), shared_map_space_)); main_thread_local_heap(), shared_isolate_map_space_));
} }
} }
...@@ -5844,10 +5862,10 @@ void Heap::TearDown() { ...@@ -5844,10 +5862,10 @@ void Heap::TearDown() {
allocation_sites_to_pretenure_.reset(); allocation_sites_to_pretenure_.reset();
shared_old_space_ = nullptr; shared_isolate_old_space_ = nullptr;
shared_old_allocator_.reset(); shared_old_allocator_.reset();
shared_map_space_ = nullptr; shared_isolate_map_space_ = nullptr;
shared_map_allocator_.reset(); shared_map_allocator_.reset();
{ {
...@@ -6781,9 +6799,12 @@ bool Heap::AllowedToBeMigrated(Map map, HeapObject obj, AllocationSpace dst) { ...@@ -6781,9 +6799,12 @@ bool Heap::AllowedToBeMigrated(Map map, HeapObject obj, AllocationSpace dst) {
return dst == CODE_SPACE && type == CODE_TYPE; return dst == CODE_SPACE && type == CODE_TYPE;
case MAP_SPACE: case MAP_SPACE:
return dst == MAP_SPACE && type == MAP_TYPE; return dst == MAP_SPACE && type == MAP_TYPE;
case SHARED_SPACE:
return dst == SHARED_SPACE;
case LO_SPACE: case LO_SPACE:
case CODE_LO_SPACE: case CODE_LO_SPACE:
case NEW_LO_SPACE: case NEW_LO_SPACE:
case SHARED_LO_SPACE:
case RO_SPACE: case RO_SPACE:
return false; return false;
} }
......
...@@ -127,7 +127,9 @@ class SafepointScope; ...@@ -127,7 +127,9 @@ class SafepointScope;
class ScavengeJob; class ScavengeJob;
class Scavenger; class Scavenger;
class ScavengerCollector; class ScavengerCollector;
class SharedLargeObjectSpace;
class SharedReadOnlySpace; class SharedReadOnlySpace;
class SharedSpace;
class Space; class Space;
class StressScavengeObserver; class StressScavengeObserver;
class TimedHistogram; class TimedHistogram;
...@@ -876,12 +878,11 @@ class Heap { ...@@ -876,12 +878,11 @@ class Heap {
NewSpace* new_space() const { return new_space_; } NewSpace* new_space() const { return new_space_; }
inline PagedNewSpace* paged_new_space() const; inline PagedNewSpace* paged_new_space() const;
OldSpace* old_space() const { return old_space_; } OldSpace* old_space() const { return old_space_; }
OldSpace* shared_old_space() const { return shared_old_space_; } OldSpace* shared_old_space() const { return shared_isolate_old_space_; }
CodeSpace* code_space() const { return code_space_; } CodeSpace* code_space() const { return code_space_; }
MapSpace* map_space() const { return map_space_; } MapSpace* map_space() const { return map_space_; }
inline PagedSpace* space_for_maps(); inline PagedSpace* space_for_maps();
OldLargeObjectSpace* lo_space() const { return lo_space_; } OldLargeObjectSpace* lo_space() const { return lo_space_; }
OldLargeObjectSpace* shared_lo_space() const { return shared_lo_space_; }
CodeLargeObjectSpace* code_lo_space() const { return code_lo_space_; } CodeLargeObjectSpace* code_lo_space() const { return code_lo_space_; }
NewLargeObjectSpace* new_lo_space() const { return new_lo_space_; } NewLargeObjectSpace* new_lo_space() const { return new_lo_space_; }
ReadOnlySpace* read_only_space() const { return read_only_space_; } ReadOnlySpace* read_only_space() const { return read_only_space_; }
...@@ -2194,14 +2195,16 @@ class Heap { ...@@ -2194,14 +2195,16 @@ class Heap {
OldSpace* old_space_ = nullptr; OldSpace* old_space_ = nullptr;
CodeSpace* code_space_ = nullptr; CodeSpace* code_space_ = nullptr;
MapSpace* map_space_ = nullptr; MapSpace* map_space_ = nullptr;
SharedSpace* shared_space_ = nullptr;
OldLargeObjectSpace* lo_space_ = nullptr; OldLargeObjectSpace* lo_space_ = nullptr;
CodeLargeObjectSpace* code_lo_space_ = nullptr; CodeLargeObjectSpace* code_lo_space_ = nullptr;
NewLargeObjectSpace* new_lo_space_ = nullptr; NewLargeObjectSpace* new_lo_space_ = nullptr;
SharedLargeObjectSpace* shared_lo_space_ = nullptr;
ReadOnlySpace* read_only_space_ = nullptr; ReadOnlySpace* read_only_space_ = nullptr;
OldSpace* shared_old_space_ = nullptr; OldSpace* shared_isolate_old_space_ = nullptr;
OldLargeObjectSpace* shared_lo_space_ = nullptr; OldLargeObjectSpace* shared_isolate_lo_space_ = nullptr;
MapSpace* shared_map_space_ = nullptr; MapSpace* shared_isolate_map_space_ = nullptr;
std::unique_ptr<ConcurrentAllocator> shared_old_allocator_; std::unique_ptr<ConcurrentAllocator> shared_old_allocator_;
std::unique_ptr<ConcurrentAllocator> shared_map_allocator_; std::unique_ptr<ConcurrentAllocator> shared_map_allocator_;
......
...@@ -582,5 +582,15 @@ void CodeLargeObjectSpace::RemovePage(LargePage* page) { ...@@ -582,5 +582,15 @@ void CodeLargeObjectSpace::RemovePage(LargePage* page) {
OldLargeObjectSpace::RemovePage(page); OldLargeObjectSpace::RemovePage(page);
} }
SharedLargeObjectSpace::SharedLargeObjectSpace(Heap* heap)
: OldLargeObjectSpace(heap, SHARED_LO_SPACE) {}
AllocationResult SharedLargeObjectSpace::AllocateRawBackground(
LocalHeap* local_heap, int object_size) {
DCHECK(!v8_flags.enable_third_party_heap);
return OldLargeObjectSpace::AllocateRawBackground(local_heap, object_size,
NOT_EXECUTABLE);
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -190,6 +190,14 @@ class OldLargeObjectSpace : public LargeObjectSpace { ...@@ -190,6 +190,14 @@ class OldLargeObjectSpace : public LargeObjectSpace {
LocalHeap* local_heap, int object_size, Executability executable); LocalHeap* local_heap, int object_size, Executability executable);
}; };
class SharedLargeObjectSpace : public OldLargeObjectSpace {
public:
explicit SharedLargeObjectSpace(Heap* heap);
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT AllocationResult
AllocateRawBackground(LocalHeap* local_heap, int object_size);
};
class NewLargeObjectSpace : public LargeObjectSpace { class NewLargeObjectSpace : public LargeObjectSpace {
public: public:
NewLargeObjectSpace(Heap* heap, size_t capacity); NewLargeObjectSpace(Heap* heap, size_t capacity);
......
...@@ -571,6 +571,32 @@ class MapSpace final : public PagedSpace { ...@@ -571,6 +571,32 @@ class MapSpace final : public PagedSpace {
LinearAllocationArea paged_allocation_info_; LinearAllocationArea paged_allocation_info_;
}; };
// -----------------------------------------------------------------------------
// Shared space regular object space.
class SharedSpace final : public PagedSpace {
public:
// Creates an old space object. The constructor does not allocate pages
// from OS.
explicit SharedSpace(Heap* heap)
: PagedSpace(heap, SHARED_SPACE, NOT_EXECUTABLE,
FreeList::CreateFreeList(), allocation_info) {}
static bool IsAtPageStart(Address addr) {
return static_cast<intptr_t>(addr & kPageAlignmentMask) ==
MemoryChunkLayout::ObjectStartOffsetInDataPage();
}
size_t ExternalBackingStoreBytes(ExternalBackingStoreType type) const final {
if (type == ExternalBackingStoreType::kArrayBuffer) return 0;
DCHECK_EQ(type, ExternalBackingStoreType::kExternalString);
return external_backing_store_bytes_[type];
}
private:
LinearAllocationArea allocation_info;
};
// Iterates over the chunks (pages and large object pages) that can contain // Iterates over the chunks (pages and large object pages) that can contain
// pointers to new space or to evacuation candidates. // pointers to new space or to evacuation candidates.
class OldGenerationMemoryChunkIterator { class OldGenerationMemoryChunkIterator {
......
...@@ -788,6 +788,8 @@ SnapshotSpace GetSnapshotSpace(HeapObject object) { ...@@ -788,6 +788,8 @@ SnapshotSpace GetSnapshotSpace(HeapObject object) {
return SnapshotSpace::kCode; return SnapshotSpace::kCode;
case MAP_SPACE: case MAP_SPACE:
return SnapshotSpace::kMap; return SnapshotSpace::kMap;
case SHARED_SPACE:
case SHARED_LO_SPACE:
case CODE_LO_SPACE: case CODE_LO_SPACE:
case RO_SPACE: case RO_SPACE:
UNREACHABLE(); UNREACHABLE();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment