Commit eb667651 authored by Leszek Swirski's avatar Leszek Swirski Committed by Commit Bot

[heap] Add base class for LargeObjectSpaces

Both LO_SPACE and NEW_LO_SPACE use the basic page management system of
LargeObjectSpace, but implement different AllocateRaw methods (with
the NEW_LO_SPACE version shadowing the LO_SPACE version).

To clean this up, and allow other future LargeObjectSpace implementations
(in particular, an off-thread variant), refactored the current
LargeObjectSpace into a base class, and make both LargeObjectSpace
(renamed to OldLargeObjectSpace) and NewLargeObjectSpace extend this
class.

Bug: chromium:1011762
Change-Id: I41b45b97f2611611dcfde677213131396df03a5e
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1876824
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Auto-Submit: Leszek Swirski <leszeks@chromium.org>
Reviewed-by: 's avatarPeter Marshall <petermarshall@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#64560}
parent 731e3015
...@@ -598,7 +598,6 @@ class JSReceiver; ...@@ -598,7 +598,6 @@ class JSReceiver;
class JSArray; class JSArray;
class JSFunction; class JSFunction;
class JSObject; class JSObject;
class LargeObjectSpace;
class MacroAssembler; class MacroAssembler;
class Map; class Map;
class MapSpace; class MapSpace;
...@@ -616,6 +615,7 @@ class NewSpace; ...@@ -616,6 +615,7 @@ class NewSpace;
class NewLargeObjectSpace; class NewLargeObjectSpace;
class NumberDictionary; class NumberDictionary;
class Object; class Object;
class OldLargeObjectSpace;
template <HeapObjectReferenceType kRefType, typename StorageType> template <HeapObjectReferenceType kRefType, typename StorageType>
class TaggedImpl; class TaggedImpl;
class StrongTaggedValue; class StrongTaggedValue;
......
...@@ -67,11 +67,11 @@ void CodeStatistics::CollectCodeStatistics(PagedSpace* space, ...@@ -67,11 +67,11 @@ void CodeStatistics::CollectCodeStatistics(PagedSpace* space,
} }
} }
// Collects code size statistics in LargeObjectSpace: // Collects code size statistics in OldLargeObjectSpace:
// - code and metadata size // - code and metadata size
// - by code kind (only in debug mode) // - by code kind (only in debug mode)
// - by code comment (only in debug mode) // - by code comment (only in debug mode)
void CodeStatistics::CollectCodeStatistics(LargeObjectSpace* space, void CodeStatistics::CollectCodeStatistics(OldLargeObjectSpace* space,
Isolate* isolate) { Isolate* isolate) {
LargeObjectSpaceObjectIterator obj_it(space); LargeObjectSpaceObjectIterator obj_it(space);
for (HeapObject obj = obj_it.Next(); !obj.is_null(); obj = obj_it.Next()) { for (HeapObject obj = obj_it.Next(); !obj.is_null(); obj = obj_it.Next()) {
......
...@@ -11,7 +11,7 @@ namespace internal { ...@@ -11,7 +11,7 @@ namespace internal {
class CodeCommentsIterator; class CodeCommentsIterator;
class HeapObject; class HeapObject;
class Isolate; class Isolate;
class LargeObjectSpace; class OldLargeObjectSpace;
class PagedSpace; class PagedSpace;
class CodeStatistics { class CodeStatistics {
...@@ -20,7 +20,8 @@ class CodeStatistics { ...@@ -20,7 +20,8 @@ class CodeStatistics {
static void CollectCodeStatistics(PagedSpace* space, Isolate* isolate); static void CollectCodeStatistics(PagedSpace* space, Isolate* isolate);
// Collect statistics related to code size from large object space. // Collect statistics related to code size from large object space.
static void CollectCodeStatistics(LargeObjectSpace* space, Isolate* isolate); static void CollectCodeStatistics(OldLargeObjectSpace* space,
Isolate* isolate);
// Reset code size related statistics // Reset code size related statistics
static void ResetCodeAndMetadataStatistics(Isolate* isolate); static void ResetCodeAndMetadataStatistics(Isolate* isolate);
......
...@@ -5036,7 +5036,7 @@ void Heap::SetUpSpaces() { ...@@ -5036,7 +5036,7 @@ void Heap::SetUpSpaces() {
space_[OLD_SPACE] = old_space_ = new OldSpace(this); space_[OLD_SPACE] = old_space_ = new OldSpace(this);
space_[CODE_SPACE] = code_space_ = new CodeSpace(this); space_[CODE_SPACE] = code_space_ = new CodeSpace(this);
space_[MAP_SPACE] = map_space_ = new MapSpace(this); space_[MAP_SPACE] = map_space_ = new MapSpace(this);
space_[LO_SPACE] = lo_space_ = new LargeObjectSpace(this); space_[LO_SPACE] = lo_space_ = new OldLargeObjectSpace(this);
space_[NEW_LO_SPACE] = new_lo_space_ = space_[NEW_LO_SPACE] = new_lo_space_ =
new NewLargeObjectSpace(this, new_space_->Capacity()); new NewLargeObjectSpace(this, new_space_->Capacity());
space_[CODE_LO_SPACE] = code_lo_space_ = new CodeLargeObjectSpace(this); space_[CODE_LO_SPACE] = code_lo_space_ = new CodeLargeObjectSpace(this);
......
...@@ -680,7 +680,7 @@ class Heap { ...@@ -680,7 +680,7 @@ class Heap {
OldSpace* old_space() { return old_space_; } OldSpace* old_space() { return old_space_; }
CodeSpace* code_space() { return code_space_; } CodeSpace* code_space() { return code_space_; }
MapSpace* map_space() { return map_space_; } MapSpace* map_space() { return map_space_; }
LargeObjectSpace* lo_space() { return lo_space_; } OldLargeObjectSpace* lo_space() { return lo_space_; }
CodeLargeObjectSpace* code_lo_space() { return code_lo_space_; } CodeLargeObjectSpace* code_lo_space() { return code_lo_space_; }
NewLargeObjectSpace* new_lo_space() { return new_lo_space_; } NewLargeObjectSpace* new_lo_space() { return new_lo_space_; }
ReadOnlySpace* read_only_space() { return read_only_space_; } ReadOnlySpace* read_only_space() { return read_only_space_; }
...@@ -1878,7 +1878,7 @@ class Heap { ...@@ -1878,7 +1878,7 @@ class Heap {
OldSpace* old_space_ = nullptr; OldSpace* old_space_ = nullptr;
CodeSpace* code_space_ = nullptr; CodeSpace* code_space_ = nullptr;
MapSpace* map_space_ = nullptr; MapSpace* map_space_ = nullptr;
LargeObjectSpace* lo_space_ = nullptr; OldLargeObjectSpace* lo_space_ = nullptr;
CodeLargeObjectSpace* code_lo_space_ = nullptr; CodeLargeObjectSpace* code_lo_space_ = nullptr;
NewLargeObjectSpace* new_lo_space_ = nullptr; NewLargeObjectSpace* new_lo_space_ = nullptr;
ReadOnlySpace* read_only_space_ = nullptr; ReadOnlySpace* read_only_space_ = nullptr;
...@@ -2098,7 +2098,7 @@ class Heap { ...@@ -2098,7 +2098,7 @@ class Heap {
friend class IdleScavengeObserver; friend class IdleScavengeObserver;
friend class IncrementalMarking; friend class IncrementalMarking;
friend class IncrementalMarkingJob; friend class IncrementalMarkingJob;
friend class LargeObjectSpace; friend class OldLargeObjectSpace;
template <FixedArrayVisitationMode fixed_array_mode, template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState> TraceRetainingPathMode retaining_path_mode, typename MarkingState>
friend class MarkingVisitor; friend class MarkingVisitor;
......
...@@ -136,11 +136,7 @@ class MarkBitCellIterator { ...@@ -136,11 +136,7 @@ class MarkBitCellIterator {
Address cell_base_; Address cell_base_;
}; };
enum LiveObjectIterationMode { enum LiveObjectIterationMode { kBlackObjects, kGreyObjects, kAllLiveObjects };
kBlackObjects,
kGreyObjects,
kAllLiveObjects
};
template <LiveObjectIterationMode mode> template <LiveObjectIterationMode mode>
class LiveObjectRange { class LiveObjectRange {
...@@ -867,15 +863,13 @@ class MarkCompactCollector final : public MarkCompactCollectorBase { ...@@ -867,15 +863,13 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
base::Semaphore page_parallel_job_semaphore_; base::Semaphore page_parallel_job_semaphore_;
#ifdef DEBUG #ifdef DEBUG
enum CollectorState { enum CollectorState{IDLE,
IDLE, PREPARE_GC,
PREPARE_GC, MARK_LIVE_OBJECTS,
MARK_LIVE_OBJECTS, SWEEP_SPACES,
SWEEP_SPACES, ENCODE_FORWARDING_ADDRESSES,
ENCODE_FORWARDING_ADDRESSES, UPDATE_POINTERS,
UPDATE_POINTERS, RELOCATE_OBJECTS};
RELOCATE_OBJECTS
};
// The current stage of the collector. // The current stage of the collector.
CollectorState state_; CollectorState state_;
......
...@@ -11,6 +11,7 @@ ...@@ -11,6 +11,7 @@
#include "src/base/lsan.h" #include "src/base/lsan.h"
#include "src/base/macros.h" #include "src/base/macros.h"
#include "src/base/platform/semaphore.h" #include "src/base/platform/semaphore.h"
#include "src/common/globals.h"
#include "src/execution/vm-state-inl.h" #include "src/execution/vm-state-inl.h"
#include "src/heap/array-buffer-tracker-inl.h" #include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/combined-heap.h" #include "src/heap/combined-heap.h"
...@@ -4001,10 +4002,7 @@ HeapObject LargeObjectSpaceObjectIterator::Next() { ...@@ -4001,10 +4002,7 @@ HeapObject LargeObjectSpaceObjectIterator::Next() {
} }
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// LargeObjectSpace // OldLargeObjectSpace
LargeObjectSpace::LargeObjectSpace(Heap* heap)
: LargeObjectSpace(heap, LO_SPACE) {}
LargeObjectSpace::LargeObjectSpace(Heap* heap, AllocationSpace id) LargeObjectSpace::LargeObjectSpace(Heap* heap, AllocationSpace id)
: Space(heap, id, new NoFreeList()), : Space(heap, id, new NoFreeList()),
...@@ -4023,12 +4021,12 @@ void LargeObjectSpace::TearDown() { ...@@ -4023,12 +4021,12 @@ void LargeObjectSpace::TearDown() {
} }
} }
AllocationResult LargeObjectSpace::AllocateRaw(int object_size) { AllocationResult OldLargeObjectSpace::AllocateRaw(int object_size) {
return AllocateRaw(object_size, NOT_EXECUTABLE); return AllocateRaw(object_size, NOT_EXECUTABLE);
} }
AllocationResult LargeObjectSpace::AllocateRaw(int object_size, AllocationResult OldLargeObjectSpace::AllocateRaw(int object_size,
Executability executable) { Executability executable) {
// Check if we want to force a GC before growing the old space further. // Check if we want to force a GC before growing the old space further.
// If so, fail the allocation. // If so, fail the allocation.
if (!heap()->CanExpandOldGeneration(object_size) || if (!heap()->CanExpandOldGeneration(object_size) ||
...@@ -4071,7 +4069,6 @@ LargePage* LargeObjectSpace::AllocateLargePage(int object_size, ...@@ -4071,7 +4069,6 @@ LargePage* LargeObjectSpace::AllocateLargePage(int object_size,
return page; return page;
} }
size_t LargeObjectSpace::CommittedPhysicalMemory() { size_t LargeObjectSpace::CommittedPhysicalMemory() {
// On a platform that provides lazy committing of memory, we over-account // On a platform that provides lazy committing of memory, we over-account
// the actually committed memory. There is no easy way right now to support // the actually committed memory. There is no easy way right now to support
...@@ -4327,6 +4324,12 @@ void Page::Print() { ...@@ -4327,6 +4324,12 @@ void Page::Print() {
#endif // DEBUG #endif // DEBUG
OldLargeObjectSpace::OldLargeObjectSpace(Heap* heap)
: LargeObjectSpace(heap, LO_SPACE) {}
OldLargeObjectSpace::OldLargeObjectSpace(Heap* heap, AllocationSpace id)
: LargeObjectSpace(heap, id) {}
NewLargeObjectSpace::NewLargeObjectSpace(Heap* heap, size_t capacity) NewLargeObjectSpace::NewLargeObjectSpace(Heap* heap, size_t capacity)
: LargeObjectSpace(heap, NEW_LO_SPACE), : LargeObjectSpace(heap, NEW_LO_SPACE),
pending_object_(0), pending_object_(0),
...@@ -4414,21 +4417,21 @@ void NewLargeObjectSpace::SetCapacity(size_t capacity) { ...@@ -4414,21 +4417,21 @@ void NewLargeObjectSpace::SetCapacity(size_t capacity) {
} }
CodeLargeObjectSpace::CodeLargeObjectSpace(Heap* heap) CodeLargeObjectSpace::CodeLargeObjectSpace(Heap* heap)
: LargeObjectSpace(heap, CODE_LO_SPACE), : OldLargeObjectSpace(heap, CODE_LO_SPACE),
chunk_map_(kInitialChunkMapCapacity) {} chunk_map_(kInitialChunkMapCapacity) {}
AllocationResult CodeLargeObjectSpace::AllocateRaw(int object_size) { AllocationResult CodeLargeObjectSpace::AllocateRaw(int object_size) {
return LargeObjectSpace::AllocateRaw(object_size, EXECUTABLE); return OldLargeObjectSpace::AllocateRaw(object_size, EXECUTABLE);
} }
void CodeLargeObjectSpace::AddPage(LargePage* page, size_t object_size) { void CodeLargeObjectSpace::AddPage(LargePage* page, size_t object_size) {
LargeObjectSpace::AddPage(page, object_size); OldLargeObjectSpace::AddPage(page, object_size);
InsertChunkMapEntries(page); InsertChunkMapEntries(page);
} }
void CodeLargeObjectSpace::RemovePage(LargePage* page, size_t object_size) { void CodeLargeObjectSpace::RemovePage(LargePage* page, size_t object_size) {
RemoveChunkMapEntries(page); RemoveChunkMapEntries(page);
LargeObjectSpace::RemovePage(page, object_size); OldLargeObjectSpace::RemovePage(page, object_size);
} }
} // namespace internal } // namespace internal
......
...@@ -46,6 +46,7 @@ class CompactionSpace; ...@@ -46,6 +46,7 @@ class CompactionSpace;
class CompactionSpaceCollection; class CompactionSpaceCollection;
class FreeList; class FreeList;
class Isolate; class Isolate;
class LargeObjectSpace;
class LinearAllocationArea; class LinearAllocationArea;
class LocalArrayBufferTracker; class LocalArrayBufferTracker;
class LocalSpace; class LocalSpace;
...@@ -437,7 +438,7 @@ class V8_EXPORT_PRIVATE Space : public Malloced { ...@@ -437,7 +438,7 @@ class V8_EXPORT_PRIVATE Space : public Malloced {
virtual size_t Size() = 0; virtual size_t Size() = 0;
// Returns size of objects. Can differ from the allocated size // Returns size of objects. Can differ from the allocated size
// (e.g. see LargeObjectSpace). // (e.g. see OldLargeObjectSpace).
virtual size_t SizeOfObjects() { return Size(); } virtual size_t SizeOfObjects() { return Size(); }
// Approximate amount of physical memory committed for this space. // Approximate amount of physical memory committed for this space.
...@@ -3229,7 +3230,6 @@ class LargeObjectSpace : public Space { ...@@ -3229,7 +3230,6 @@ class LargeObjectSpace : public Space {
public: public:
using iterator = LargePageIterator; using iterator = LargePageIterator;
explicit LargeObjectSpace(Heap* heap);
LargeObjectSpace(Heap* heap, AllocationSpace id); LargeObjectSpace(Heap* heap, AllocationSpace id);
~LargeObjectSpace() override { TearDown(); } ~LargeObjectSpace() override { TearDown(); }
...@@ -3237,9 +3237,6 @@ class LargeObjectSpace : public Space { ...@@ -3237,9 +3237,6 @@ class LargeObjectSpace : public Space {
// Releases internal resources, frees objects in this space. // Releases internal resources, frees objects in this space.
void TearDown(); void TearDown();
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT AllocationResult
AllocateRaw(int object_size);
// Available bytes for objects in this space. // Available bytes for objects in this space.
size_t Available() override; size_t Available() override;
...@@ -3293,8 +3290,6 @@ class LargeObjectSpace : public Space { ...@@ -3293,8 +3290,6 @@ class LargeObjectSpace : public Space {
protected: protected:
LargePage* AllocateLargePage(int object_size, Executability executable); LargePage* AllocateLargePage(int object_size, Executability executable);
V8_WARN_UNUSED_RESULT AllocationResult AllocateRaw(int object_size,
Executability executable);
size_t size_; // allocated bytes size_t size_; // allocated bytes
int page_count_; // number of chunks int page_count_; // number of chunks
...@@ -3304,6 +3299,19 @@ class LargeObjectSpace : public Space { ...@@ -3304,6 +3299,19 @@ class LargeObjectSpace : public Space {
friend class LargeObjectSpaceObjectIterator; friend class LargeObjectSpaceObjectIterator;
}; };
class OldLargeObjectSpace : public LargeObjectSpace {
public:
explicit OldLargeObjectSpace(Heap* heap);
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT AllocationResult
AllocateRaw(int object_size);
protected:
explicit OldLargeObjectSpace(Heap* heap, AllocationSpace id);
V8_WARN_UNUSED_RESULT AllocationResult AllocateRaw(int object_size,
Executability executable);
};
class NewLargeObjectSpace : public LargeObjectSpace { class NewLargeObjectSpace : public LargeObjectSpace {
public: public:
NewLargeObjectSpace(Heap* heap, size_t capacity); NewLargeObjectSpace(Heap* heap, size_t capacity);
...@@ -3333,7 +3341,7 @@ class NewLargeObjectSpace : public LargeObjectSpace { ...@@ -3333,7 +3341,7 @@ class NewLargeObjectSpace : public LargeObjectSpace {
size_t capacity_; size_t capacity_;
}; };
class CodeLargeObjectSpace : public LargeObjectSpace { class CodeLargeObjectSpace : public OldLargeObjectSpace {
public: public:
explicit CodeLargeObjectSpace(Heap* heap); explicit CodeLargeObjectSpace(Heap* heap);
......
...@@ -26,7 +26,7 @@ Address DeserializerAllocator::AllocateRaw(SnapshotSpace space, int size) { ...@@ -26,7 +26,7 @@ Address DeserializerAllocator::AllocateRaw(SnapshotSpace space, int size) {
AlwaysAllocateScope scope(heap_); AlwaysAllocateScope scope(heap_);
// Note that we currently do not support deserialization of large code // Note that we currently do not support deserialization of large code
// objects. // objects.
LargeObjectSpace* lo_space = heap_->lo_space(); OldLargeObjectSpace* lo_space = heap_->lo_space();
AllocationResult result = lo_space->AllocateRaw(size); AllocationResult result = lo_space->AllocateRaw(size);
HeapObject obj = result.ToObjectChecked(); HeapObject obj = result.ToObjectChecked();
deserialized_large_objects_.push_back(obj); deserialized_large_objects_.push_back(obj);
......
...@@ -290,13 +290,13 @@ TEST(OldSpace) { ...@@ -290,13 +290,13 @@ TEST(OldSpace) {
delete s; delete s;
} }
TEST(LargeObjectSpace) { TEST(OldLargeObjectSpace) {
// This test does not initialize allocated objects, which confuses the // This test does not initialize allocated objects, which confuses the
// incremental marker. // incremental marker.
FLAG_incremental_marking = false; FLAG_incremental_marking = false;
v8::V8::Initialize(); v8::V8::Initialize();
LargeObjectSpace* lo = CcTest::heap()->lo_space(); OldLargeObjectSpace* lo = CcTest::heap()->lo_space();
CHECK_NOT_NULL(lo); CHECK_NOT_NULL(lo);
int lo_size = Page::kPageSize; int lo_size = Page::kPageSize;
...@@ -406,7 +406,7 @@ static HeapObject AllocateUnaligned(PagedSpace* space, int size) { ...@@ -406,7 +406,7 @@ static HeapObject AllocateUnaligned(PagedSpace* space, int size) {
return filler; return filler;
} }
static HeapObject AllocateUnaligned(LargeObjectSpace* space, int size) { static HeapObject AllocateUnaligned(OldLargeObjectSpace* space, int size) {
AllocationResult allocation = space->AllocateRaw(size); AllocationResult allocation = space->AllocateRaw(size);
CHECK(!allocation.IsRetry()); CHECK(!allocation.IsRetry());
HeapObject filler; HeapObject filler;
...@@ -512,8 +512,8 @@ UNINITIALIZED_TEST(AllocationObserver) { ...@@ -512,8 +512,8 @@ UNINITIALIZED_TEST(AllocationObserver) {
// classes inheriting from PagedSpace. // classes inheriting from PagedSpace.
testAllocationObserver<PagedSpace>(i_isolate, testAllocationObserver<PagedSpace>(i_isolate,
i_isolate->heap()->old_space()); i_isolate->heap()->old_space());
testAllocationObserver<LargeObjectSpace>(i_isolate, testAllocationObserver<OldLargeObjectSpace>(i_isolate,
i_isolate->heap()->lo_space()); i_isolate->heap()->lo_space());
} }
isolate->Dispose(); isolate->Dispose();
} }
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
// found in the LICENSE file. // found in the LICENSE file.
// Test spreading of large holey arrays, which are supposedly allocated in // Test spreading of large holey arrays, which are supposedly allocated in
// LargeObjectSpace. Holes should be replaced with undefined. // OldLargeObjectSpace. Holes should be replaced with undefined.
var arr = new Array(2e5); var arr = new Array(2e5);
......
...@@ -647,7 +647,7 @@ function print_memory(space = "all") { ...@@ -647,7 +647,7 @@ function print_memory(space = "all") {
h.code_space_.allocation_info_.top_); h.code_space_.allocation_info_.top_);
} }
if (st.includes("all") || st.includes("large") || st.includes("lo")) { if (st.includes("all") || st.includes("large") || st.includes("lo")) {
print_memory_chunk_list("LargeObjectSpace", print_memory_chunk_list("OldLargeObjectSpace",
h.lo_space_.memory_chunk_list_.front_); h.lo_space_.memory_chunk_list_.front_);
} }
if (st.includes("all") || st.includes("newlarge") || st.includes("nlo")) { if (st.includes("all") || st.includes("newlarge") || st.includes("nlo")) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment