Commit c538b9b4 authored by Clemens Backes's avatar Clemens Backes Committed by V8 LUCI CQ

Revert "[heap] Remove unused LocalSpace class"

This reverts commit b0c70710.

Reason for revert: Lots of compile errors.

Original change's description:
> [heap] Remove unused LocalSpace class
>
> LocalSpace was introduced for off-heap spaces with concurrent bytecode
> compilation finalization. However, finalization ended up using
> LocalHeap for concurrent allocations. LocalSpace is therefore unused
> and can be removed.
>
> This CL removes LocalSpace and renames all mentions of local space to
> compaction space. Compaction space was the only local space left.
>
> Change-Id: I12a8a2724f777a77ddb9957fe2d8e89febfebbaf
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2930169
> Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
> Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#74914}

Change-Id: I3a654da0ddb556c1fb8767f8401ecd3b46786bea
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2933140
Auto-Submit: Clemens Backes <clemensb@chromium.org>
Commit-Queue: Rubber Stamper <rubber-stamper@appspot.gserviceaccount.com>
Bot-Commit: Rubber Stamper <rubber-stamper@appspot.gserviceaccount.com>
Cr-Commit-Position: refs/heads/master@{#74915}
parent b0c70710
......@@ -848,11 +848,14 @@ enum MinimumCapacity {
enum GarbageCollector { SCAVENGER, MARK_COMPACTOR, MINOR_MARK_COMPACTOR };
enum class CompactionSpaceKind {
enum class LocalSpaceKind {
kNone,
kCompactionSpaceForScavenge,
kCompactionSpaceForMarkCompact,
kCompactionSpaceForMinorMarkCompact,
kFirstCompactionSpace = kCompactionSpaceForScavenge,
kLastCompactionSpace = kCompactionSpaceForMinorMarkCompact,
};
enum Executability { NOT_EXECUTABLE, EXECUTABLE };
......
......@@ -21,20 +21,18 @@ class EvacuationAllocator {
static const int kLabSize = 32 * KB;
static const int kMaxLabObjectSize = 8 * KB;
explicit EvacuationAllocator(Heap* heap,
CompactionSpaceKind compaction_space_kind)
explicit EvacuationAllocator(Heap* heap, LocalSpaceKind local_space_kind)
: heap_(heap),
new_space_(heap->new_space()),
compaction_spaces_(heap, compaction_space_kind),
compaction_spaces_(heap, local_space_kind),
new_space_lab_(LocalAllocationBuffer::InvalidBuffer()),
lab_allocation_will_fail_(false) {}
// Needs to be called from the main thread to finalize this
// EvacuationAllocator.
void Finalize() {
heap_->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE));
heap_->code_space()->MergeCompactionSpace(
compaction_spaces_.Get(CODE_SPACE));
heap_->old_space()->MergeLocalSpace(compaction_spaces_.Get(OLD_SPACE));
heap_->code_space()->MergeLocalSpace(compaction_spaces_.Get(CODE_SPACE));
// Give back remaining LAB space if this EvacuationAllocator's new space LAB
// sits right next to new space allocation top.
const LinearAllocationArea info = new_space_lab_.CloseAndMakeIterable();
......
......@@ -3091,8 +3091,7 @@ class FullEvacuator : public Evacuator {
: Evacuator(collector->heap(), &record_visitor_, &local_allocator_,
FLAG_always_promote_young_mc),
record_visitor_(collector, &ephemeron_remembered_set_),
local_allocator_(heap_,
CompactionSpaceKind::kCompactionSpaceForMarkCompact),
local_allocator_(heap_, LocalSpaceKind::kCompactionSpaceForMarkCompact),
collector_(collector) {}
GCTracer::Scope::ScopeId GetBackgroundTracingScope() override {
......@@ -5164,8 +5163,8 @@ class YoungGenerationEvacuator : public Evacuator {
: Evacuator(collector->heap(), &record_visitor_, &local_allocator_,
false),
record_visitor_(collector->heap()->mark_compact_collector()),
local_allocator_(
heap_, CompactionSpaceKind::kCompactionSpaceForMinorMarkCompact),
local_allocator_(heap_,
LocalSpaceKind::kCompactionSpaceForMinorMarkCompact),
collector_(collector) {}
GCTracer::Scope::ScopeId GetBackgroundTracingScope() override {
......
......@@ -81,10 +81,10 @@ Page* PagedSpace::InitializePage(MemoryChunk* chunk) {
PagedSpace::PagedSpace(Heap* heap, AllocationSpace space,
Executability executable, FreeList* free_list,
CompactionSpaceKind compaction_space_kind)
LocalSpaceKind local_space_kind)
: SpaceWithLinearArea(heap, space, free_list),
executable_(executable),
compaction_space_kind_(compaction_space_kind) {
local_space_kind_(local_space_kind) {
area_size_ = MemoryChunkLayout::AllocatableMemoryInMemoryChunk(space);
accounting_stats_.Clear();
}
......@@ -105,6 +105,7 @@ void PagedSpace::RefillFreeList() {
identity() != MAP_SPACE) {
return;
}
DCHECK_IMPLIES(is_local_space(), is_compaction_space());
MarkCompactCollector* collector = heap()->mark_compact_collector();
size_t added = 0;
......@@ -122,8 +123,7 @@ void PagedSpace::RefillFreeList() {
// Also merge old-to-new remembered sets if not scavenging because of
// data races: One thread might iterate remembered set, while another
// thread merges them.
if (compaction_space_kind() !=
CompactionSpaceKind::kCompactionSpaceForScavenge) {
if (local_space_kind() != LocalSpaceKind::kCompactionSpaceForScavenge) {
p->MergeOldToNewRememberedSets();
}
......@@ -150,7 +150,7 @@ void PagedSpace::RefillFreeList() {
}
}
void PagedSpace::MergeCompactionSpace(CompactionSpace* other) {
void PagedSpace::MergeLocalSpace(LocalSpace* other) {
base::MutexGuard guard(mutex());
DCHECK(identity() == other->identity());
......@@ -555,7 +555,7 @@ bool PagedSpace::TryAllocationFromFreeListMain(size_t size_in_bytes,
base::Optional<std::pair<Address, size_t>> PagedSpace::RawRefillLabBackground(
LocalHeap* local_heap, size_t min_size_in_bytes, size_t max_size_in_bytes,
AllocationAlignment alignment, AllocationOrigin origin) {
DCHECK(!is_compaction_space() && identity() == OLD_SPACE);
DCHECK(!is_local_space() && identity() == OLD_SPACE);
DCHECK_EQ(origin, AllocationOrigin::kRuntime);
auto result = TryAllocationFromFreeListBackground(
......@@ -840,7 +840,7 @@ bool PagedSpace::RefillLabMain(int size_in_bytes, AllocationOrigin origin) {
return RawRefillLabMain(size_in_bytes, origin);
}
Page* CompactionSpace::Expand() {
Page* LocalSpace::Expand() {
Page* page = PagedSpace::Expand();
new_pages_.push_back(page);
return page;
......@@ -864,6 +864,9 @@ bool PagedSpace::TryExpand(int size_in_bytes, AllocationOrigin origin) {
}
bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) {
// Non-compaction local spaces are not supported.
DCHECK_IMPLIES(is_local_space(), is_compaction_space());
// Allocation in this space has failed.
DCHECK_GE(size_in_bytes, 0);
const int kMaxPagesToSweep = 1;
......@@ -943,7 +946,7 @@ bool PagedSpace::ContributeToSweepingMain(int required_freed_bytes,
AllocationResult PagedSpace::AllocateRawSlow(int size_in_bytes,
AllocationAlignment alignment,
AllocationOrigin origin) {
if (!is_compaction_space()) {
if (!is_local_space()) {
// Start incremental marking before the actual allocation, this allows the
// allocation function to mark the object black when incremental marking is
// running.
......
......@@ -21,10 +21,10 @@
namespace v8 {
namespace internal {
class CompactionSpace;
class Heap;
class HeapObject;
class Isolate;
class LocalSpace;
class ObjectVisitor;
// -----------------------------------------------------------------------------
......@@ -71,10 +71,9 @@ class V8_EXPORT_PRIVATE PagedSpace
static const size_t kCompactionMemoryWanted = 500 * KB;
// Creates a space with an id.
PagedSpace(
Heap* heap, AllocationSpace id, Executability executable,
FreeList* free_list,
CompactionSpaceKind compaction_space_kind = CompactionSpaceKind::kNone);
PagedSpace(Heap* heap, AllocationSpace id, Executability executable,
FreeList* free_list,
LocalSpaceKind local_space_kind = LocalSpaceKind::kNone);
~PagedSpace() override { TearDown(); }
......@@ -257,15 +256,19 @@ class V8_EXPORT_PRIVATE PagedSpace
// Return size of allocatable area on a page in this space.
inline int AreaSize() { return static_cast<int>(area_size_); }
bool is_local_space() { return local_space_kind_ != LocalSpaceKind::kNone; }
bool is_compaction_space() {
return compaction_space_kind_ != CompactionSpaceKind::kNone;
return base::IsInRange(local_space_kind_,
LocalSpaceKind::kFirstCompactionSpace,
LocalSpaceKind::kLastCompactionSpace);
}
CompactionSpaceKind compaction_space_kind() { return compaction_space_kind_; }
LocalSpaceKind local_space_kind() { return local_space_kind_; }
// Merges {other} into the current space. Note that this modifies {other},
// e.g., removes its bump pointer area and resets statistics.
void MergeCompactionSpace(CompactionSpace* other);
void MergeLocalSpace(LocalSpace* other);
// Refills the free list from the corresponding free list filled by the
// sweeper.
......@@ -321,13 +324,13 @@ class V8_EXPORT_PRIVATE PagedSpace
base::Optional<base::MutexGuard> guard_;
};
bool SupportsConcurrentAllocation() { return !is_compaction_space(); }
bool SupportsConcurrentAllocation() { return !is_local_space(); }
// Set space linear allocation area.
void SetTopAndLimit(Address top, Address limit);
void DecreaseLimit(Address new_limit);
void UpdateInlineAllocationLimit(size_t min_size) override;
bool SupportsAllocationObserver() override { return !is_compaction_space(); }
bool SupportsAllocationObserver() override { return !is_local_space(); }
// Slow path of allocation function
V8_WARN_UNUSED_RESULT AllocationResult
......@@ -404,7 +407,7 @@ class V8_EXPORT_PRIVATE PagedSpace
Executability executable_;
CompactionSpaceKind compaction_space_kind_;
LocalSpaceKind local_space_kind_;
size_t area_size_;
......@@ -427,23 +430,20 @@ class V8_EXPORT_PRIVATE PagedSpace
};
// -----------------------------------------------------------------------------
// Compaction space that is used temporarily during compaction.
// Base class for compaction space and off-thread space.
class V8_EXPORT_PRIVATE CompactionSpace : public PagedSpace {
class V8_EXPORT_PRIVATE LocalSpace : public PagedSpace {
public:
CompactionSpace(Heap* heap, AllocationSpace id, Executability executable,
CompactionSpaceKind compaction_space_kind)
LocalSpace(Heap* heap, AllocationSpace id, Executability executable,
LocalSpaceKind local_space_kind)
: PagedSpace(heap, id, executable, FreeList::CreateFreeList(),
compaction_space_kind) {
DCHECK(is_compaction_space());
local_space_kind) {
DCHECK_NE(local_space_kind, LocalSpaceKind::kNone);
}
const std::vector<Page*>& GetNewPages() { return new_pages_; }
protected:
V8_WARN_UNUSED_RESULT bool RefillLabMain(int size_in_bytes,
AllocationOrigin origin) override;
Page* Expand() override;
// The space is temporary and not included in any snapshots.
bool snapshotable() override { return false; }
......@@ -452,15 +452,31 @@ class V8_EXPORT_PRIVATE CompactionSpace : public PagedSpace {
std::vector<Page*> new_pages_;
};
// -----------------------------------------------------------------------------
// Compaction space that is used temporarily during compaction.
class V8_EXPORT_PRIVATE CompactionSpace : public LocalSpace {
public:
CompactionSpace(Heap* heap, AllocationSpace id, Executability executable,
LocalSpaceKind local_space_kind)
: LocalSpace(heap, id, executable, local_space_kind) {
DCHECK(is_compaction_space());
}
protected:
V8_WARN_UNUSED_RESULT bool RefillLabMain(int size_in_bytes,
AllocationOrigin origin) override;
};
// A collection of |CompactionSpace|s used by a single compaction task.
class CompactionSpaceCollection : public Malloced {
public:
explicit CompactionSpaceCollection(Heap* heap,
CompactionSpaceKind compaction_space_kind)
LocalSpaceKind local_space_kind)
: old_space_(heap, OLD_SPACE, Executability::NOT_EXECUTABLE,
compaction_space_kind),
local_space_kind),
code_space_(heap, CODE_SPACE, Executability::EXECUTABLE,
compaction_space_kind) {}
local_space_kind) {}
CompactionSpace* Get(AllocationSpace space) {
switch (space) {
......
......@@ -529,7 +529,7 @@ Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging,
local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity),
copied_size_(0),
promoted_size_(0),
allocator_(heap, CompactionSpaceKind::kCompactionSpaceForScavenge),
allocator_(heap, LocalSpaceKind::kCompactionSpaceForScavenge),
is_logging_(is_logging),
is_incremental_marking_(heap->incremental_marking()->IsMarking()),
is_compacting_(heap->incremental_marking()->IsCompacting()) {}
......
......@@ -28,7 +28,7 @@ TEST_F(SpacesTest, CompactionSpaceMerge) {
CompactionSpace* compaction_space =
new CompactionSpace(heap, OLD_SPACE, NOT_EXECUTABLE,
CompactionSpaceKind::kCompactionSpaceForMarkCompact);
LocalSpaceKind::kCompactionSpaceForMarkCompact);
EXPECT_TRUE(compaction_space != nullptr);
for (Page* p : *old_space) {
......@@ -54,7 +54,7 @@ TEST_F(SpacesTest, CompactionSpaceMerge) {
int pages_in_old_space = old_space->CountTotalPages();
int pages_in_compaction_space = compaction_space->CountTotalPages();
EXPECT_EQ(kExpectedPages, pages_in_compaction_space);
old_space->MergeCompactionSpace(compaction_space);
old_space->MergeLocalSpace(compaction_space);
EXPECT_EQ(pages_in_old_space + pages_in_compaction_space,
old_space->CountTotalPages());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment