Commit 32d0e026 authored by Dan Elphick's avatar Dan Elphick Committed by Commit Bot

[heap] Move RO_SPACE to beginning of AllocationSpace

Moves RO_SPACE to the front of the AllocationSpace enum, so the space
pre-allocation iterations don't miss it. Being at the start of the enum
means that it continues to not be iterated over by any sweeper code,
which iterates from FIRST_GROWABLE_PAGED_SPACE to
LAST_GROWABLE_PAGED_SPACE (renamed from FIRST_PAGED_SPACE and
LAST_PAGED_SPACE).

Bug: v8:7464
Change-Id: I480ba784afbd878552d1cb7f9f5fa57c3b55e004
Reviewed-on: https://chromium-review.googlesource.com/973604
Commit-Queue: Dan Elphick <delphick@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarYang Guo <yangguo@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52177}
parent 68b4026c
...@@ -523,20 +523,21 @@ typedef bool (*WeakSlotCallbackWithHeap)(Heap* heap, Object** pointer); ...@@ -523,20 +523,21 @@ typedef bool (*WeakSlotCallbackWithHeap)(Heap* heap, Object** pointer);
// NOTE: SpaceIterator depends on AllocationSpace enumeration values being // NOTE: SpaceIterator depends on AllocationSpace enumeration values being
// consecutive. // consecutive.
enum AllocationSpace { enum AllocationSpace {
// TODO(v8:7464): Actually map this space's memory as read-only.
RO_SPACE, // Immortal, immovable and immutable objects,
NEW_SPACE, // Semispaces collected with copying collector. NEW_SPACE, // Semispaces collected with copying collector.
OLD_SPACE, // May contain pointers to new space. OLD_SPACE, // May contain pointers to new space.
CODE_SPACE, // No pointers to new space, marked executable. CODE_SPACE, // No pointers to new space, marked executable.
MAP_SPACE, // Only and all map objects. MAP_SPACE, // Only and all map objects.
LO_SPACE, // Promoted large objects. LO_SPACE, // Promoted large objects.
// TODO(v8:7464): Actually map this space's memory as read-only.
RO_SPACE, // Immortal, immovable and immutable objects.
FIRST_SPACE = NEW_SPACE, FIRST_SPACE = RO_SPACE,
LAST_SPACE = RO_SPACE, LAST_SPACE = LO_SPACE,
FIRST_PAGED_SPACE = OLD_SPACE, FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE,
LAST_PAGED_SPACE = MAP_SPACE LAST_GROWABLE_PAGED_SPACE = MAP_SPACE
}; };
constexpr int kSpaceTagSize = 4; constexpr int kSpaceTagSize = 4;
STATIC_ASSERT(FIRST_SPACE == 0);
enum AllocationAlignment { kWordAligned, kDoubleAligned, kDoubleUnaligned }; enum AllocationAlignment { kWordAligned, kDoubleAligned, kDoubleUnaligned };
......
...@@ -1479,8 +1479,8 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) { ...@@ -1479,8 +1479,8 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) {
static const int kThreshold = 20; static const int kThreshold = 20;
while (gc_performed && counter++ < kThreshold) { while (gc_performed && counter++ < kThreshold) {
gc_performed = false; gc_performed = false;
for (int space = NEW_SPACE; space < SerializerDeserializer::kNumberOfSpaces; for (int space = FIRST_SPACE;
space++) { space < SerializerDeserializer::kNumberOfSpaces; space++) {
Reservation* reservation = &reservations[space]; Reservation* reservation = &reservations[space];
DCHECK_LE(1, reservation->size()); DCHECK_LE(1, reservation->size());
if (reservation->at(0).size == 0) continue; if (reservation->at(0).size == 0) continue;
...@@ -5308,7 +5308,8 @@ bool Heap::ConfigureHeap(size_t max_semi_space_size_in_kb, ...@@ -5308,7 +5308,8 @@ bool Heap::ConfigureHeap(size_t max_semi_space_size_in_kb,
} }
// The old generation is paged and needs at least one page for each space. // The old generation is paged and needs at least one page for each space.
int paged_space_count = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1; int paged_space_count =
LAST_GROWABLE_PAGED_SPACE - FIRST_GROWABLE_PAGED_SPACE + 1;
initial_max_old_generation_size_ = max_old_generation_size_ = initial_max_old_generation_size_ = max_old_generation_size_ =
Max(static_cast<size_t>(paged_space_count * Page::kPageSize), Max(static_cast<size_t>(paged_space_count * Page::kPageSize),
max_old_generation_size_); max_old_generation_size_);
......
...@@ -78,16 +78,18 @@ class Sweeper::SweeperTask final : public CancelableTask { ...@@ -78,16 +78,18 @@ class Sweeper::SweeperTask final : public CancelableTask {
void RunInternal() final { void RunInternal() final {
TRACE_BACKGROUND_GC(tracer_, TRACE_BACKGROUND_GC(tracer_,
GCTracer::BackgroundScope::MC_BACKGROUND_SWEEPING); GCTracer::BackgroundScope::MC_BACKGROUND_SWEEPING);
DCHECK_GE(space_to_start_, FIRST_PAGED_SPACE); DCHECK_GE(space_to_start_, FIRST_GROWABLE_PAGED_SPACE);
DCHECK_LE(space_to_start_, LAST_PAGED_SPACE); DCHECK_LE(space_to_start_, LAST_GROWABLE_PAGED_SPACE);
const int offset = space_to_start_ - FIRST_PAGED_SPACE; const int offset = space_to_start_ - FIRST_GROWABLE_PAGED_SPACE;
const int num_spaces = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1; const int num_spaces =
LAST_GROWABLE_PAGED_SPACE - FIRST_GROWABLE_PAGED_SPACE + 1;
for (int i = 0; i < num_spaces; i++) { for (int i = 0; i < num_spaces; i++) {
const int space_id = FIRST_PAGED_SPACE + ((i + offset) % num_spaces); const int space_id =
FIRST_GROWABLE_PAGED_SPACE + ((i + offset) % num_spaces);
// Do not sweep code space concurrently. // Do not sweep code space concurrently.
if (static_cast<AllocationSpace>(space_id) == CODE_SPACE) continue; if (static_cast<AllocationSpace>(space_id) == CODE_SPACE) continue;
DCHECK_GE(space_id, FIRST_PAGED_SPACE); DCHECK_GE(space_id, FIRST_GROWABLE_PAGED_SPACE);
DCHECK_LE(space_id, LAST_PAGED_SPACE); DCHECK_LE(space_id, LAST_GROWABLE_PAGED_SPACE);
sweeper_->SweepSpaceFromTask(static_cast<AllocationSpace>(space_id)); sweeper_->SweepSpaceFromTask(static_cast<AllocationSpace>(space_id));
} }
num_sweeping_tasks_->Decrement(1); num_sweeping_tasks_->Decrement(1);
......
...@@ -123,7 +123,7 @@ class Sweeper { ...@@ -123,7 +123,7 @@ class Sweeper {
class IterabilityTask; class IterabilityTask;
class SweeperTask; class SweeperTask;
static const int kNumberOfSweepingSpaces = LAST_PAGED_SPACE + 1; static const int kNumberOfSweepingSpaces = LAST_GROWABLE_PAGED_SPACE + 1;
static const int kMaxSweeperTasks = 3; static const int kMaxSweeperTasks = 3;
template <typename Callback> template <typename Callback>
...@@ -159,11 +159,12 @@ class Sweeper { ...@@ -159,11 +159,12 @@ class Sweeper {
void MakeIterable(Page* page); void MakeIterable(Page* page);
bool IsValidIterabilitySpace(AllocationSpace space) { bool IsValidIterabilitySpace(AllocationSpace space) {
return space == NEW_SPACE; return space == NEW_SPACE || space == RO_SPACE;
} }
bool IsValidSweepingSpace(AllocationSpace space) { bool IsValidSweepingSpace(AllocationSpace space) {
return space >= FIRST_PAGED_SPACE && space <= LAST_PAGED_SPACE; return space >= FIRST_GROWABLE_PAGED_SPACE &&
space <= LAST_GROWABLE_PAGED_SPACE;
} }
Heap* const heap_; Heap* const heap_;
......
...@@ -49,14 +49,12 @@ void BuiltinSerializerAllocator::OutputStatistics() { ...@@ -49,14 +49,12 @@ void BuiltinSerializerAllocator::OutputStatistics() {
PrintF(" Spaces (bytes):\n"); PrintF(" Spaces (bytes):\n");
STATIC_ASSERT(NEW_SPACE == 0); for (int space = FIRST_SPACE; space < kNumberOfSpaces; space++) {
for (int space = 0; space < kNumberOfSpaces; space++) {
PrintF("%16s", AllocationSpaceName(static_cast<AllocationSpace>(space))); PrintF("%16s", AllocationSpaceName(static_cast<AllocationSpace>(space)));
} }
PrintF("\n"); PrintF("\n");
STATIC_ASSERT(NEW_SPACE == 0); for (int space = FIRST_SPACE; space < kNumberOfSpaces; space++) {
for (int space = 0; space < kNumberOfSpaces; space++) {
uint32_t space_size = (space == CODE_SPACE) ? virtual_chunk_size_ : 0; uint32_t space_size = (space == CODE_SPACE) ? virtual_chunk_size_ : 0;
PrintF("%16d", space_size); PrintF("%16d", space_size);
} }
......
...@@ -122,9 +122,8 @@ HeapObject* DefaultDeserializerAllocator::GetObject(AllocationSpace space, ...@@ -122,9 +122,8 @@ HeapObject* DefaultDeserializerAllocator::GetObject(AllocationSpace space,
void DefaultDeserializerAllocator::DecodeReservation( void DefaultDeserializerAllocator::DecodeReservation(
std::vector<SerializedData::Reservation> res) { std::vector<SerializedData::Reservation> res) {
DCHECK_EQ(0, reservations_[NEW_SPACE].size()); DCHECK_EQ(0, reservations_[FIRST_SPACE].size());
STATIC_ASSERT(NEW_SPACE == 0); int current_space = FIRST_SPACE;
int current_space = NEW_SPACE;
for (auto& r : res) { for (auto& r : res) {
reservations_[current_space].push_back({r.chunk_size(), NULL, NULL}); reservations_[current_space].push_back({r.chunk_size(), NULL, NULL});
if (r.is_last()) current_space++; if (r.is_last()) current_space++;
...@@ -135,7 +134,7 @@ void DefaultDeserializerAllocator::DecodeReservation( ...@@ -135,7 +134,7 @@ void DefaultDeserializerAllocator::DecodeReservation(
bool DefaultDeserializerAllocator::ReserveSpace() { bool DefaultDeserializerAllocator::ReserveSpace() {
#ifdef DEBUG #ifdef DEBUG
for (int i = NEW_SPACE; i < kNumberOfSpaces; ++i) { for (int i = FIRST_SPACE; i < kNumberOfSpaces; ++i) {
DCHECK_GT(reservations_[i].size(), 0); DCHECK_GT(reservations_[i].size(), 0);
} }
#endif // DEBUG #endif // DEBUG
...@@ -153,8 +152,6 @@ bool DefaultDeserializerAllocator::ReserveSpace() { ...@@ -153,8 +152,6 @@ bool DefaultDeserializerAllocator::ReserveSpace() {
bool DefaultDeserializerAllocator::ReserveSpace( bool DefaultDeserializerAllocator::ReserveSpace(
StartupDeserializer* startup_deserializer, StartupDeserializer* startup_deserializer,
BuiltinDeserializer* builtin_deserializer) { BuiltinDeserializer* builtin_deserializer) {
const int first_space = NEW_SPACE;
const int last_space = SerializerDeserializer::kNumberOfSpaces;
Isolate* isolate = startup_deserializer->isolate(); Isolate* isolate = startup_deserializer->isolate();
// Create a set of merged reservations to reserve space in one go. // Create a set of merged reservations to reserve space in one go.
...@@ -163,7 +160,7 @@ bool DefaultDeserializerAllocator::ReserveSpace( ...@@ -163,7 +160,7 @@ bool DefaultDeserializerAllocator::ReserveSpace(
// Instead, we manually determine the required code-space. // Instead, we manually determine the required code-space.
Heap::Reservation merged_reservations[kNumberOfSpaces]; Heap::Reservation merged_reservations[kNumberOfSpaces];
for (int i = first_space; i < last_space; i++) { for (int i = FIRST_SPACE; i < kNumberOfSpaces; i++) {
merged_reservations[i] = merged_reservations[i] =
startup_deserializer->allocator()->reservations_[i]; startup_deserializer->allocator()->reservations_[i];
} }
...@@ -206,12 +203,12 @@ bool DefaultDeserializerAllocator::ReserveSpace( ...@@ -206,12 +203,12 @@ bool DefaultDeserializerAllocator::ReserveSpace(
// Write back startup reservations. // Write back startup reservations.
for (int i = first_space; i < last_space; i++) { for (int i = FIRST_SPACE; i < kNumberOfSpaces; i++) {
startup_deserializer->allocator()->reservations_[i].swap( startup_deserializer->allocator()->reservations_[i].swap(
merged_reservations[i]); merged_reservations[i]);
} }
for (int i = first_space; i < kNumberOfPreallocatedSpaces; i++) { for (int i = FIRST_SPACE; i < kNumberOfPreallocatedSpaces; i++) {
startup_deserializer->allocator()->high_water_[i] = startup_deserializer->allocator()->high_water_[i] =
startup_deserializer->allocator()->reservations_[i][0].start; startup_deserializer->allocator()->reservations_[i][0].start;
} }
......
...@@ -86,8 +86,7 @@ std::vector<SerializedData::Reservation> ...@@ -86,8 +86,7 @@ std::vector<SerializedData::Reservation>
DefaultSerializerAllocator::EncodeReservations() const { DefaultSerializerAllocator::EncodeReservations() const {
std::vector<SerializedData::Reservation> out; std::vector<SerializedData::Reservation> out;
STATIC_ASSERT(NEW_SPACE == 0); for (int i = FIRST_SPACE; i < kNumberOfPreallocatedSpaces; i++) {
for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) {
for (size_t j = 0; j < completed_chunks_[i].size(); j++) { for (size_t j = 0; j < completed_chunks_[i].size(); j++) {
out.emplace_back(completed_chunks_[i][j]); out.emplace_back(completed_chunks_[i][j]);
} }
...@@ -106,9 +105,6 @@ DefaultSerializerAllocator::EncodeReservations() const { ...@@ -106,9 +105,6 @@ DefaultSerializerAllocator::EncodeReservations() const {
out.emplace_back(large_objects_total_size_); out.emplace_back(large_objects_total_size_);
out.back().mark_as_last(); out.back().mark_as_last();
STATIC_ASSERT(RO_SPACE == LO_SPACE + 1);
out.emplace_back(0);
out.back().mark_as_last();
return out; return out;
} }
...@@ -117,14 +113,12 @@ void DefaultSerializerAllocator::OutputStatistics() { ...@@ -117,14 +113,12 @@ void DefaultSerializerAllocator::OutputStatistics() {
PrintF(" Spaces (bytes):\n"); PrintF(" Spaces (bytes):\n");
STATIC_ASSERT(NEW_SPACE == 0); for (int space = FIRST_SPACE; space < kNumberOfSpaces; space++) {
for (int space = 0; space < kNumberOfSpaces; space++) {
PrintF("%16s", AllocationSpaceName(static_cast<AllocationSpace>(space))); PrintF("%16s", AllocationSpaceName(static_cast<AllocationSpace>(space)));
} }
PrintF("\n"); PrintF("\n");
STATIC_ASSERT(NEW_SPACE == 0); for (int space = FIRST_SPACE; space < kNumberOfPreallocatedSpaces; space++) {
for (int space = 0; space < kNumberOfPreallocatedSpaces; space++) {
size_t s = pending_chunk_[space]; size_t s = pending_chunk_[space];
for (uint32_t chunk_size : completed_chunks_[space]) s += chunk_size; for (uint32_t chunk_size : completed_chunks_[space]) s += chunk_size;
PrintF("%16" PRIuS, s); PrintF("%16" PRIuS, s);
...@@ -134,10 +128,7 @@ void DefaultSerializerAllocator::OutputStatistics() { ...@@ -134,10 +128,7 @@ void DefaultSerializerAllocator::OutputStatistics() {
PrintF("%16d", num_maps_ * Map::kSize); PrintF("%16d", num_maps_ * Map::kSize);
STATIC_ASSERT(LO_SPACE == MAP_SPACE + 1); STATIC_ASSERT(LO_SPACE == MAP_SPACE + 1);
PrintF("%16d", large_objects_total_size_); PrintF("%16d\n", large_objects_total_size_);
STATIC_ASSERT(RO_SPACE == LO_SPACE + 1);
PrintF("%16d\n", 0);
} }
// static // static
......
...@@ -365,8 +365,9 @@ TEST(SizeOfInitialHeap) { ...@@ -365,8 +365,9 @@ TEST(SizeOfInitialHeap) {
// Freshly initialized VM gets by with the snapshot size (which is below // Freshly initialized VM gets by with the snapshot size (which is below
// kMaxInitialSizePerSpace per space). // kMaxInitialSizePerSpace per space).
Heap* heap = isolate->heap(); Heap* heap = isolate->heap();
int page_count[LAST_PAGED_SPACE + 1] = {0, 0, 0, 0}; int page_count[LAST_GROWABLE_PAGED_SPACE + 1] = {0, 0, 0, 0};
for (int i = FIRST_PAGED_SPACE; i <= LAST_PAGED_SPACE; i++) { for (int i = FIRST_GROWABLE_PAGED_SPACE; i <= LAST_GROWABLE_PAGED_SPACE;
i++) {
// Debug code can be very large, so skip CODE_SPACE if we are generating it. // Debug code can be very large, so skip CODE_SPACE if we are generating it.
if (i == CODE_SPACE && i::FLAG_debug_code) continue; if (i == CODE_SPACE && i::FLAG_debug_code) continue;
...@@ -378,7 +379,8 @@ TEST(SizeOfInitialHeap) { ...@@ -378,7 +379,8 @@ TEST(SizeOfInitialHeap) {
// Executing the empty script gets by with the same number of pages, i.e., // Executing the empty script gets by with the same number of pages, i.e.,
// requires no extra space. // requires no extra space.
CompileRun("/*empty*/"); CompileRun("/*empty*/");
for (int i = FIRST_PAGED_SPACE; i <= LAST_PAGED_SPACE; i++) { for (int i = FIRST_GROWABLE_PAGED_SPACE; i <= LAST_GROWABLE_PAGED_SPACE;
i++) {
// Skip CODE_SPACE, since we had to generate code even for an empty script. // Skip CODE_SPACE, since we had to generate code even for an empty script.
if (i == CODE_SPACE) continue; if (i == CODE_SPACE) continue;
CHECK_EQ(page_count[i], isolate->heap()->paged_space(i)->CountTotalPages()); CHECK_EQ(page_count[i], isolate->heap()->paged_space(i)->CountTotalPages());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment