Commit fdf69d53 authored by Hannes Payer's avatar Hannes Payer Committed by Commit Bot

[heap] Adds a young generation large object space

This CL adds the young generation lage object spaces and a flag
--young-generation-large-objects that by default allocates all
large objects in this space. This is a preparation CL. The space
is not fully functional.

Bug: chromium:852420
Change-Id: Ib66d26fa52cda89bf04787084826aeb84b6ec1ac
Reviewed-on: https://chromium-review.googlesource.com/1099164
Commit-Queue: Hannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarYang Guo <yangguo@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#54056}
parent cf2f3066
......@@ -785,6 +785,10 @@ DEFINE_BOOL(optimize_ephemerons, true,
DEFINE_NEG_NEG_IMPLICATION(optimize_ephemerons, parallel_ephemeron_marking)
DEFINE_NEG_NEG_IMPLICATION(optimize_ephemerons, parallel_ephemeron_visiting)
DEFINE_BOOL(young_generation_large_objects, false,
"allocates large objects by default in the young generation large "
"object space")
// assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc
DEFINE_BOOL(debug_code, DEBUG_BOOL,
"generate extra code (assertions) for debugging")
......
......@@ -536,6 +536,7 @@ class MapSpace;
class MarkCompactCollector;
class MaybeObject;
class NewSpace;
class NewLargeObjectSpace;
class Object;
class OldSpace;
class ParameterCount;
......@@ -570,14 +571,16 @@ typedef bool (*WeakSlotCallbackWithHeap)(Heap* heap, Object** pointer);
enum AllocationSpace {
// TODO(v8:7464): Actually map this space's memory as read-only.
RO_SPACE, // Immortal, immovable and immutable objects,
NEW_SPACE, // Semispaces collected with copying collector.
OLD_SPACE, // May contain pointers to new space.
CODE_SPACE, // No pointers to new space, marked executable.
MAP_SPACE, // Only and all map objects.
LO_SPACE, // Promoted large objects.
NEW_SPACE, // Young generation semispaces for regular objects collected with
// Scavenger.
OLD_SPACE, // Old generation regular object space.
CODE_SPACE, // Old generation code object space, marked executable.
MAP_SPACE, // Old generation map object space, non-movable.
LO_SPACE, // Old generation large object space.
NEW_LO_SPACE, // Young generation large object space.
FIRST_SPACE = RO_SPACE,
LAST_SPACE = LO_SPACE,
LAST_SPACE = NEW_LO_SPACE,
FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE,
LAST_GROWABLE_PAGED_SPACE = MAP_SPACE
};
......
......@@ -183,7 +183,11 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
}
} else if (LO_SPACE == space) {
DCHECK(large_object);
allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
if (FLAG_young_generation_large_objects) {
allocation = new_lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
} else {
allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
}
} else if (MAP_SPACE == space) {
allocation = map_space_->AllocateRawUnaligned(size_in_bytes);
} else if (RO_SPACE == space) {
......
......@@ -165,6 +165,7 @@ Heap::Heap()
code_space_(nullptr),
map_space_(nullptr),
lo_space_(nullptr),
new_lo_space_(nullptr),
read_only_space_(nullptr),
write_protect_code_memory_(false),
code_space_memory_modification_scope_depth_(0),
......@@ -673,6 +674,8 @@ const char* Heap::GetSpaceName(int idx) {
return "code_space";
case LO_SPACE:
return "large_object_space";
case NEW_LO_SPACE:
return "new_large_object_space";
case RO_SPACE:
return "read_only_space";
default:
......@@ -3647,6 +3650,8 @@ bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
return map_space_->Contains(value);
case LO_SPACE:
return lo_space_->Contains(value);
case NEW_LO_SPACE:
return new_lo_space_->Contains(value);
case RO_SPACE:
return read_only_space_->Contains(value);
}
......@@ -3670,13 +3675,14 @@ bool Heap::InSpaceSlow(Address addr, AllocationSpace space) {
return map_space_->ContainsSlow(addr);
case LO_SPACE:
return lo_space_->ContainsSlow(addr);
case NEW_LO_SPACE:
return new_lo_space_->ContainsSlow(addr);
case RO_SPACE:
return read_only_space_->ContainsSlow(addr);
}
UNREACHABLE();
}
bool Heap::IsValidAllocationSpace(AllocationSpace space) {
switch (space) {
case NEW_SPACE:
......@@ -3684,6 +3690,7 @@ bool Heap::IsValidAllocationSpace(AllocationSpace space) {
case CODE_SPACE:
case MAP_SPACE:
case LO_SPACE:
case NEW_LO_SPACE:
case RO_SPACE:
return true;
default:
......@@ -4593,6 +4600,7 @@ void Heap::SetUp() {
space_[CODE_SPACE] = code_space_ = new CodeSpace(this);
space_[MAP_SPACE] = map_space_ = new MapSpace(this);
space_[LO_SPACE] = lo_space_ = new LargeObjectSpace(this);
space_[NEW_LO_SPACE] = new_lo_space_ = new NewLargeObjectSpace(this);
// Set up the seed that is used to randomize the string hash function.
DCHECK_EQ(Smi::kZero, hash_seed());
......@@ -5529,6 +5537,8 @@ const char* AllocationSpaceName(AllocationSpace space) {
return "MAP_SPACE";
case LO_SPACE:
return "LO_SPACE";
case NEW_LO_SPACE:
return "NEW_LO_SPACE";
case RO_SPACE:
return "RO_SPACE";
default:
......@@ -5602,6 +5612,7 @@ bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
return dst == CODE_SPACE && type == CODE_TYPE;
case MAP_SPACE:
case LO_SPACE:
case NEW_LO_SPACE:
case RO_SPACE:
return false;
}
......
......@@ -1012,6 +1012,7 @@ class Heap {
CodeSpace* code_space() { return code_space_; }
MapSpace* map_space() { return map_space_; }
LargeObjectSpace* lo_space() { return lo_space_; }
NewLargeObjectSpace* new_lo_space() { return new_lo_space_; }
ReadOnlySpace* read_only_space() { return read_only_space_; }
inline PagedSpace* paged_space(int idx);
......@@ -2290,6 +2291,7 @@ class Heap {
CodeSpace* code_space_;
MapSpace* map_space_;
LargeObjectSpace* lo_space_;
NewLargeObjectSpace* new_lo_space_;
ReadOnlySpace* read_only_space_;
// Map from the space id to the space.
Space* space_[LAST_SPACE + 1];
......
......@@ -3250,7 +3250,10 @@ HeapObject* LargeObjectIterator::Next() {
// LargeObjectSpace
LargeObjectSpace::LargeObjectSpace(Heap* heap)
: Space(heap, LO_SPACE), // Managed on a per-allocation basis
: LargeObjectSpace(heap, LO_SPACE) {}
LargeObjectSpace::LargeObjectSpace(Heap* heap, AllocationSpace id)
: Space(heap, id),
size_(0),
page_count_(0),
objects_size_(0),
......@@ -3547,5 +3550,13 @@ void Page::Print() {
}
#endif // DEBUG
NewLargeObjectSpace::NewLargeObjectSpace(Heap* heap)
: LargeObjectSpace(heap, NEW_LO_SPACE) {}
size_t NewLargeObjectSpace::Available() {
// TODO(hpayer): Update as soon as we have a growing strategy.
return 0;
}
} // namespace internal
} // namespace v8
......@@ -2946,6 +2946,8 @@ class LargeObjectSpace : public Space {
typedef LargePageIterator iterator;
explicit LargeObjectSpace(Heap* heap);
LargeObjectSpace(Heap* heap, AllocationSpace id);
~LargeObjectSpace() override { TearDown(); }
// Releases internal resources, frees objects in this space.
......@@ -3039,6 +3041,13 @@ class LargeObjectSpace : public Space {
friend class LargeObjectIterator;
};
class NewLargeObjectSpace : public LargeObjectSpace {
public:
explicit NewLargeObjectSpace(Heap* heap);
// Available bytes for objects in this space.
size_t Available() override;
};
class LargeObjectIterator : public ObjectIterator {
public:
......
......@@ -105,8 +105,12 @@ class SerializerDeserializer : public RootVisitor {
// No reservation for large object space necessary.
// We also handle map space differenly.
STATIC_ASSERT(MAP_SPACE == CODE_SPACE + 1);
// We do not support young generation large objects.
STATIC_ASSERT(LAST_SPACE == NEW_LO_SPACE);
STATIC_ASSERT(LAST_SPACE - 1 == LO_SPACE);
static const int kNumberOfPreallocatedSpaces = CODE_SPACE + 1;
static const int kNumberOfSpaces = LAST_SPACE + 1;
static const int kNumberOfSpaces = LO_SPACE + 1;
protected:
static bool CanBeDeferred(HeapObject* o);
......
......@@ -647,6 +647,7 @@ void Serializer<AllocatorT>::ObjectSerializer::SerializeObject() {
Map* map = object_->map();
AllocationSpace space =
MemoryChunk::FromAddress(object_->address())->owner()->identity();
DCHECK(space != NEW_LO_SPACE);
SerializePrologue(space, size, map);
// Serialize the rest of the object.
......
......@@ -5656,6 +5656,18 @@ TEST(Regress618958) {
!heap->incremental_marking()->IsStopped()));
}
TEST(YoungGenerationLargeObjectAllocation) {
FLAG_young_generation_large_objects = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Heap* heap = CcTest::heap();
Isolate* isolate = heap->isolate();
Handle<FixedArray> array = isolate->factory()->NewFixedArray(200000);
MemoryChunk* chunk = MemoryChunk::FromAddress(array->address());
CHECK(chunk->owner()->identity() == NEW_LO_SPACE);
}
TEST(UncommitUnusedLargeObjectMemory) {
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
......
......@@ -18922,6 +18922,9 @@ TEST(GetHeapSpaceStatistics) {
v8::HeapSpaceStatistics space_statistics;
isolate->GetHeapSpaceStatistics(&space_statistics, i);
CHECK_NOT_NULL(space_statistics.space_name());
if (strcmp(space_statistics.space_name(), "new_large_object_space") == 0) {
continue;
}
CHECK_GT(space_statistics.space_size(), 0u);
total_size += space_statistics.space_size();
CHECK_GT(space_statistics.space_used_size(), 0u);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment