Commit c91c7270 authored by Dominik Inführ's avatar Dominik Inführ Committed by V8 LUCI CQ

Reland "[heap] Remove unused LocalSpace class"

This is a reland of b0c70710

The first CL got reverted because of build errors. This CL replaces the
remaining usage of is_local_space() with is_compaction_space().
Supposedly this was a leftover because https://crrev.com/c/2928189
landed at roughly the same time.

Original change's description:
> [heap] Remove unused LocalSpace class
>
> LocalSpace was introduced for off-heap spaces with concurrent bytecode
> compilation finalization. However, finalization ended up using
> LocalHeap for concurrent allocations. LocalSpace is therefore unused
> and can be removed.
>
> This CL removes LocalSpace and renames all mentions of local space to
> compaction space. Compaction space was the only local space left.
>
> Change-Id: I12a8a2724f777a77ddb9957fe2d8e89febfebbaf
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2930169
> Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
> Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#74914}

Change-Id: I993c47fe85f4140f5d6137afde2653a48047cafb
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2939983Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74957}
parent 83f12d9b
...@@ -848,14 +848,11 @@ enum MinimumCapacity { ...@@ -848,14 +848,11 @@ enum MinimumCapacity {
enum GarbageCollector { SCAVENGER, MARK_COMPACTOR, MINOR_MARK_COMPACTOR }; enum GarbageCollector { SCAVENGER, MARK_COMPACTOR, MINOR_MARK_COMPACTOR };
enum class LocalSpaceKind { enum class CompactionSpaceKind {
kNone, kNone,
kCompactionSpaceForScavenge, kCompactionSpaceForScavenge,
kCompactionSpaceForMarkCompact, kCompactionSpaceForMarkCompact,
kCompactionSpaceForMinorMarkCompact, kCompactionSpaceForMinorMarkCompact,
kFirstCompactionSpace = kCompactionSpaceForScavenge,
kLastCompactionSpace = kCompactionSpaceForMinorMarkCompact,
}; };
enum Executability { NOT_EXECUTABLE, EXECUTABLE }; enum Executability { NOT_EXECUTABLE, EXECUTABLE };
......
...@@ -21,18 +21,20 @@ class EvacuationAllocator { ...@@ -21,18 +21,20 @@ class EvacuationAllocator {
static const int kLabSize = 32 * KB; static const int kLabSize = 32 * KB;
static const int kMaxLabObjectSize = 8 * KB; static const int kMaxLabObjectSize = 8 * KB;
explicit EvacuationAllocator(Heap* heap, LocalSpaceKind local_space_kind) explicit EvacuationAllocator(Heap* heap,
CompactionSpaceKind compaction_space_kind)
: heap_(heap), : heap_(heap),
new_space_(heap->new_space()), new_space_(heap->new_space()),
compaction_spaces_(heap, local_space_kind), compaction_spaces_(heap, compaction_space_kind),
new_space_lab_(LocalAllocationBuffer::InvalidBuffer()), new_space_lab_(LocalAllocationBuffer::InvalidBuffer()),
lab_allocation_will_fail_(false) {} lab_allocation_will_fail_(false) {}
// Needs to be called from the main thread to finalize this // Needs to be called from the main thread to finalize this
// EvacuationAllocator. // EvacuationAllocator.
void Finalize() { void Finalize() {
heap_->old_space()->MergeLocalSpace(compaction_spaces_.Get(OLD_SPACE)); heap_->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE));
heap_->code_space()->MergeLocalSpace(compaction_spaces_.Get(CODE_SPACE)); heap_->code_space()->MergeCompactionSpace(
compaction_spaces_.Get(CODE_SPACE));
// Give back remaining LAB space if this EvacuationAllocator's new space LAB // Give back remaining LAB space if this EvacuationAllocator's new space LAB
// sits right next to new space allocation top. // sits right next to new space allocation top.
const LinearAllocationArea info = new_space_lab_.CloseAndMakeIterable(); const LinearAllocationArea info = new_space_lab_.CloseAndMakeIterable();
......
...@@ -3085,7 +3085,8 @@ class FullEvacuator : public Evacuator { ...@@ -3085,7 +3085,8 @@ class FullEvacuator : public Evacuator {
: Evacuator(collector->heap(), &record_visitor_, &local_allocator_, : Evacuator(collector->heap(), &record_visitor_, &local_allocator_,
FLAG_always_promote_young_mc), FLAG_always_promote_young_mc),
record_visitor_(collector, &ephemeron_remembered_set_), record_visitor_(collector, &ephemeron_remembered_set_),
local_allocator_(heap_, LocalSpaceKind::kCompactionSpaceForMarkCompact), local_allocator_(heap_,
CompactionSpaceKind::kCompactionSpaceForMarkCompact),
collector_(collector) {} collector_(collector) {}
GCTracer::Scope::ScopeId GetBackgroundTracingScope() override { GCTracer::Scope::ScopeId GetBackgroundTracingScope() override {
...@@ -5157,8 +5158,8 @@ class YoungGenerationEvacuator : public Evacuator { ...@@ -5157,8 +5158,8 @@ class YoungGenerationEvacuator : public Evacuator {
: Evacuator(collector->heap(), &record_visitor_, &local_allocator_, : Evacuator(collector->heap(), &record_visitor_, &local_allocator_,
false), false),
record_visitor_(collector->heap()->mark_compact_collector()), record_visitor_(collector->heap()->mark_compact_collector()),
local_allocator_(heap_, local_allocator_(
LocalSpaceKind::kCompactionSpaceForMinorMarkCompact), heap_, CompactionSpaceKind::kCompactionSpaceForMinorMarkCompact),
collector_(collector) {} collector_(collector) {}
GCTracer::Scope::ScopeId GetBackgroundTracingScope() override { GCTracer::Scope::ScopeId GetBackgroundTracingScope() override {
......
...@@ -81,10 +81,10 @@ Page* PagedSpace::InitializePage(MemoryChunk* chunk) { ...@@ -81,10 +81,10 @@ Page* PagedSpace::InitializePage(MemoryChunk* chunk) {
PagedSpace::PagedSpace(Heap* heap, AllocationSpace space, PagedSpace::PagedSpace(Heap* heap, AllocationSpace space,
Executability executable, FreeList* free_list, Executability executable, FreeList* free_list,
LocalSpaceKind local_space_kind) CompactionSpaceKind compaction_space_kind)
: SpaceWithLinearArea(heap, space, free_list), : SpaceWithLinearArea(heap, space, free_list),
executable_(executable), executable_(executable),
local_space_kind_(local_space_kind) { compaction_space_kind_(compaction_space_kind) {
area_size_ = MemoryChunkLayout::AllocatableMemoryInMemoryChunk(space); area_size_ = MemoryChunkLayout::AllocatableMemoryInMemoryChunk(space);
accounting_stats_.Clear(); accounting_stats_.Clear();
} }
...@@ -105,7 +105,6 @@ void PagedSpace::RefillFreeList() { ...@@ -105,7 +105,6 @@ void PagedSpace::RefillFreeList() {
identity() != MAP_SPACE) { identity() != MAP_SPACE) {
return; return;
} }
DCHECK_IMPLIES(is_local_space(), is_compaction_space());
MarkCompactCollector* collector = heap()->mark_compact_collector(); MarkCompactCollector* collector = heap()->mark_compact_collector();
size_t added = 0; size_t added = 0;
...@@ -123,7 +122,8 @@ void PagedSpace::RefillFreeList() { ...@@ -123,7 +122,8 @@ void PagedSpace::RefillFreeList() {
// Also merge old-to-new remembered sets if not scavenging because of // Also merge old-to-new remembered sets if not scavenging because of
// data races: One thread might iterate remembered set, while another // data races: One thread might iterate remembered set, while another
// thread merges them. // thread merges them.
if (local_space_kind() != LocalSpaceKind::kCompactionSpaceForScavenge) { if (compaction_space_kind() !=
CompactionSpaceKind::kCompactionSpaceForScavenge) {
p->MergeOldToNewRememberedSets(); p->MergeOldToNewRememberedSets();
} }
...@@ -150,7 +150,7 @@ void PagedSpace::RefillFreeList() { ...@@ -150,7 +150,7 @@ void PagedSpace::RefillFreeList() {
} }
} }
void PagedSpace::MergeLocalSpace(LocalSpace* other) { void PagedSpace::MergeCompactionSpace(CompactionSpace* other) {
base::MutexGuard guard(mutex()); base::MutexGuard guard(mutex());
DCHECK(identity() == other->identity()); DCHECK(identity() == other->identity());
...@@ -277,7 +277,7 @@ void PagedSpace::SetTopAndLimit(Address top, Address limit) { ...@@ -277,7 +277,7 @@ void PagedSpace::SetTopAndLimit(Address top, Address limit) {
allocation_info_.Reset(top, limit); allocation_info_.Reset(top, limit);
base::Optional<base::SharedMutexGuard<base::kExclusive>> optional_guard; base::Optional<base::SharedMutexGuard<base::kExclusive>> optional_guard;
if (!is_local_space()) if (!is_compaction_space())
optional_guard.emplace(&heap_->pending_allocation_mutex_); optional_guard.emplace(&heap_->pending_allocation_mutex_);
original_limit_ = limit; original_limit_ = limit;
original_top_ = top; original_top_ = top;
...@@ -555,7 +555,7 @@ bool PagedSpace::TryAllocationFromFreeListMain(size_t size_in_bytes, ...@@ -555,7 +555,7 @@ bool PagedSpace::TryAllocationFromFreeListMain(size_t size_in_bytes,
base::Optional<std::pair<Address, size_t>> PagedSpace::RawRefillLabBackground( base::Optional<std::pair<Address, size_t>> PagedSpace::RawRefillLabBackground(
LocalHeap* local_heap, size_t min_size_in_bytes, size_t max_size_in_bytes, LocalHeap* local_heap, size_t min_size_in_bytes, size_t max_size_in_bytes,
AllocationAlignment alignment, AllocationOrigin origin) { AllocationAlignment alignment, AllocationOrigin origin) {
DCHECK(!is_local_space() && identity() == OLD_SPACE); DCHECK(!is_compaction_space() && identity() == OLD_SPACE);
DCHECK_EQ(origin, AllocationOrigin::kRuntime); DCHECK_EQ(origin, AllocationOrigin::kRuntime);
auto result = TryAllocationFromFreeListBackground( auto result = TryAllocationFromFreeListBackground(
...@@ -840,7 +840,7 @@ bool PagedSpace::RefillLabMain(int size_in_bytes, AllocationOrigin origin) { ...@@ -840,7 +840,7 @@ bool PagedSpace::RefillLabMain(int size_in_bytes, AllocationOrigin origin) {
return RawRefillLabMain(size_in_bytes, origin); return RawRefillLabMain(size_in_bytes, origin);
} }
Page* LocalSpace::Expand() { Page* CompactionSpace::Expand() {
Page* page = PagedSpace::Expand(); Page* page = PagedSpace::Expand();
new_pages_.push_back(page); new_pages_.push_back(page);
return page; return page;
...@@ -864,9 +864,6 @@ bool PagedSpace::TryExpand(int size_in_bytes, AllocationOrigin origin) { ...@@ -864,9 +864,6 @@ bool PagedSpace::TryExpand(int size_in_bytes, AllocationOrigin origin) {
} }
bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) { bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) {
// Non-compaction local spaces are not supported.
DCHECK_IMPLIES(is_local_space(), is_compaction_space());
// Allocation in this space has failed. // Allocation in this space has failed.
DCHECK_GE(size_in_bytes, 0); DCHECK_GE(size_in_bytes, 0);
const int kMaxPagesToSweep = 1; const int kMaxPagesToSweep = 1;
...@@ -946,7 +943,7 @@ bool PagedSpace::ContributeToSweepingMain(int required_freed_bytes, ...@@ -946,7 +943,7 @@ bool PagedSpace::ContributeToSweepingMain(int required_freed_bytes,
AllocationResult PagedSpace::AllocateRawSlow(int size_in_bytes, AllocationResult PagedSpace::AllocateRawSlow(int size_in_bytes,
AllocationAlignment alignment, AllocationAlignment alignment,
AllocationOrigin origin) { AllocationOrigin origin) {
if (!is_local_space()) { if (!is_compaction_space()) {
// Start incremental marking before the actual allocation, this allows the // Start incremental marking before the actual allocation, this allows the
// allocation function to mark the object black when incremental marking is // allocation function to mark the object black when incremental marking is
// running. // running.
......
...@@ -21,10 +21,10 @@ ...@@ -21,10 +21,10 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
class CompactionSpace;
class Heap; class Heap;
class HeapObject; class HeapObject;
class Isolate; class Isolate;
class LocalSpace;
class ObjectVisitor; class ObjectVisitor;
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
...@@ -71,9 +71,10 @@ class V8_EXPORT_PRIVATE PagedSpace ...@@ -71,9 +71,10 @@ class V8_EXPORT_PRIVATE PagedSpace
static const size_t kCompactionMemoryWanted = 500 * KB; static const size_t kCompactionMemoryWanted = 500 * KB;
// Creates a space with an id. // Creates a space with an id.
PagedSpace(Heap* heap, AllocationSpace id, Executability executable, PagedSpace(
FreeList* free_list, Heap* heap, AllocationSpace id, Executability executable,
LocalSpaceKind local_space_kind = LocalSpaceKind::kNone); FreeList* free_list,
CompactionSpaceKind compaction_space_kind = CompactionSpaceKind::kNone);
~PagedSpace() override { TearDown(); } ~PagedSpace() override { TearDown(); }
...@@ -256,19 +257,15 @@ class V8_EXPORT_PRIVATE PagedSpace ...@@ -256,19 +257,15 @@ class V8_EXPORT_PRIVATE PagedSpace
// Return size of allocatable area on a page in this space. // Return size of allocatable area on a page in this space.
inline int AreaSize() { return static_cast<int>(area_size_); } inline int AreaSize() { return static_cast<int>(area_size_); }
bool is_local_space() { return local_space_kind_ != LocalSpaceKind::kNone; }
bool is_compaction_space() { bool is_compaction_space() {
return base::IsInRange(local_space_kind_, return compaction_space_kind_ != CompactionSpaceKind::kNone;
LocalSpaceKind::kFirstCompactionSpace,
LocalSpaceKind::kLastCompactionSpace);
} }
LocalSpaceKind local_space_kind() { return local_space_kind_; } CompactionSpaceKind compaction_space_kind() { return compaction_space_kind_; }
// Merges {other} into the current space. Note that this modifies {other}, // Merges {other} into the current space. Note that this modifies {other},
// e.g., removes its bump pointer area and resets statistics. // e.g., removes its bump pointer area and resets statistics.
void MergeLocalSpace(LocalSpace* other); void MergeCompactionSpace(CompactionSpace* other);
// Refills the free list from the corresponding free list filled by the // Refills the free list from the corresponding free list filled by the
// sweeper. // sweeper.
...@@ -324,13 +321,13 @@ class V8_EXPORT_PRIVATE PagedSpace ...@@ -324,13 +321,13 @@ class V8_EXPORT_PRIVATE PagedSpace
base::Optional<base::MutexGuard> guard_; base::Optional<base::MutexGuard> guard_;
}; };
bool SupportsConcurrentAllocation() { return !is_local_space(); } bool SupportsConcurrentAllocation() { return !is_compaction_space(); }
// Set space linear allocation area. // Set space linear allocation area.
void SetTopAndLimit(Address top, Address limit); void SetTopAndLimit(Address top, Address limit);
void DecreaseLimit(Address new_limit); void DecreaseLimit(Address new_limit);
void UpdateInlineAllocationLimit(size_t min_size) override; void UpdateInlineAllocationLimit(size_t min_size) override;
bool SupportsAllocationObserver() override { return !is_local_space(); } bool SupportsAllocationObserver() override { return !is_compaction_space(); }
// Slow path of allocation function // Slow path of allocation function
V8_WARN_UNUSED_RESULT AllocationResult V8_WARN_UNUSED_RESULT AllocationResult
...@@ -407,7 +404,7 @@ class V8_EXPORT_PRIVATE PagedSpace ...@@ -407,7 +404,7 @@ class V8_EXPORT_PRIVATE PagedSpace
Executability executable_; Executability executable_;
LocalSpaceKind local_space_kind_; CompactionSpaceKind compaction_space_kind_;
size_t area_size_; size_t area_size_;
...@@ -430,20 +427,23 @@ class V8_EXPORT_PRIVATE PagedSpace ...@@ -430,20 +427,23 @@ class V8_EXPORT_PRIVATE PagedSpace
}; };
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// Base class for compaction space and off-thread space. // Compaction space that is used temporarily during compaction.
class V8_EXPORT_PRIVATE LocalSpace : public PagedSpace { class V8_EXPORT_PRIVATE CompactionSpace : public PagedSpace {
public: public:
LocalSpace(Heap* heap, AllocationSpace id, Executability executable, CompactionSpace(Heap* heap, AllocationSpace id, Executability executable,
LocalSpaceKind local_space_kind) CompactionSpaceKind compaction_space_kind)
: PagedSpace(heap, id, executable, FreeList::CreateFreeList(), : PagedSpace(heap, id, executable, FreeList::CreateFreeList(),
local_space_kind) { compaction_space_kind) {
DCHECK_NE(local_space_kind, LocalSpaceKind::kNone); DCHECK(is_compaction_space());
} }
const std::vector<Page*>& GetNewPages() { return new_pages_; } const std::vector<Page*>& GetNewPages() { return new_pages_; }
protected: protected:
V8_WARN_UNUSED_RESULT bool RefillLabMain(int size_in_bytes,
AllocationOrigin origin) override;
Page* Expand() override; Page* Expand() override;
// The space is temporary and not included in any snapshots. // The space is temporary and not included in any snapshots.
bool snapshotable() override { return false; } bool snapshotable() override { return false; }
...@@ -452,31 +452,15 @@ class V8_EXPORT_PRIVATE LocalSpace : public PagedSpace { ...@@ -452,31 +452,15 @@ class V8_EXPORT_PRIVATE LocalSpace : public PagedSpace {
std::vector<Page*> new_pages_; std::vector<Page*> new_pages_;
}; };
// -----------------------------------------------------------------------------
// Compaction space that is used temporarily during compaction.
class V8_EXPORT_PRIVATE CompactionSpace : public LocalSpace {
public:
CompactionSpace(Heap* heap, AllocationSpace id, Executability executable,
LocalSpaceKind local_space_kind)
: LocalSpace(heap, id, executable, local_space_kind) {
DCHECK(is_compaction_space());
}
protected:
V8_WARN_UNUSED_RESULT bool RefillLabMain(int size_in_bytes,
AllocationOrigin origin) override;
};
// A collection of |CompactionSpace|s used by a single compaction task. // A collection of |CompactionSpace|s used by a single compaction task.
class CompactionSpaceCollection : public Malloced { class CompactionSpaceCollection : public Malloced {
public: public:
explicit CompactionSpaceCollection(Heap* heap, explicit CompactionSpaceCollection(Heap* heap,
LocalSpaceKind local_space_kind) CompactionSpaceKind compaction_space_kind)
: old_space_(heap, OLD_SPACE, Executability::NOT_EXECUTABLE, : old_space_(heap, OLD_SPACE, Executability::NOT_EXECUTABLE,
local_space_kind), compaction_space_kind),
code_space_(heap, CODE_SPACE, Executability::EXECUTABLE, code_space_(heap, CODE_SPACE, Executability::EXECUTABLE,
local_space_kind) {} compaction_space_kind) {}
CompactionSpace* Get(AllocationSpace space) { CompactionSpace* Get(AllocationSpace space) {
switch (space) { switch (space) {
......
...@@ -529,7 +529,7 @@ Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging, ...@@ -529,7 +529,7 @@ Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging,
local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity), local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity),
copied_size_(0), copied_size_(0),
promoted_size_(0), promoted_size_(0),
allocator_(heap, LocalSpaceKind::kCompactionSpaceForScavenge), allocator_(heap, CompactionSpaceKind::kCompactionSpaceForScavenge),
is_logging_(is_logging), is_logging_(is_logging),
is_incremental_marking_(heap->incremental_marking()->IsMarking()), is_incremental_marking_(heap->incremental_marking()->IsMarking()),
is_compacting_(heap->incremental_marking()->IsCompacting()) {} is_compacting_(heap->incremental_marking()->IsCompacting()) {}
......
...@@ -28,7 +28,7 @@ TEST_F(SpacesTest, CompactionSpaceMerge) { ...@@ -28,7 +28,7 @@ TEST_F(SpacesTest, CompactionSpaceMerge) {
CompactionSpace* compaction_space = CompactionSpace* compaction_space =
new CompactionSpace(heap, OLD_SPACE, NOT_EXECUTABLE, new CompactionSpace(heap, OLD_SPACE, NOT_EXECUTABLE,
LocalSpaceKind::kCompactionSpaceForMarkCompact); CompactionSpaceKind::kCompactionSpaceForMarkCompact);
EXPECT_TRUE(compaction_space != nullptr); EXPECT_TRUE(compaction_space != nullptr);
for (Page* p : *old_space) { for (Page* p : *old_space) {
...@@ -54,7 +54,7 @@ TEST_F(SpacesTest, CompactionSpaceMerge) { ...@@ -54,7 +54,7 @@ TEST_F(SpacesTest, CompactionSpaceMerge) {
int pages_in_old_space = old_space->CountTotalPages(); int pages_in_old_space = old_space->CountTotalPages();
int pages_in_compaction_space = compaction_space->CountTotalPages(); int pages_in_compaction_space = compaction_space->CountTotalPages();
EXPECT_EQ(kExpectedPages, pages_in_compaction_space); EXPECT_EQ(kExpectedPages, pages_in_compaction_space);
old_space->MergeLocalSpace(compaction_space); old_space->MergeCompactionSpace(compaction_space);
EXPECT_EQ(pages_in_old_space + pages_in_compaction_space, EXPECT_EQ(pages_in_old_space + pages_in_compaction_space,
old_space->CountTotalPages()); old_space->CountTotalPages());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment