Commit f8ae62fe authored by Dan Elphick's avatar Dan Elphick Committed by Commit Bot

[heap] Move initial objects into RO_SPACE

This moves:
* the main oddballs (null, undefined, hole, true, false) as well as
their supporting maps (also adds hole as an internalized string to make
this work).
* most of the internalized strings
* the struct maps
* empty array
* empty enum cache
* the contents of the initial string table
* the weak_cell_cache for any map in RO_SPACE (and eagerly creates the
value avoid writing to it during run-time)

The StartupSerializer stats change as follows:

     RO_SPACE  NEW_SPACE  OLD_SPACE  CODE_SPACE  MAP_SPACE  LO_SPACE
old         0          0     270264       32608      12144         0
new     21776          0     253168       32608       8184         0
Overall memory usage has increased by 720 bytes due to the eager
initialization of the Map weak cell caches.

Also extends --serialization-statistics to print out separate instance
type stats for objects in RO_SPACE as shown here:

  Read Only Instance types (count and bytes):
       404      16736  ONE_BYTE_INTERNALIZED_STRING_TYPE
         2         32  HEAP_NUMBER_TYPE
         5        240  ODDBALL_TYPE
        45       3960  MAP_TYPE
         1         16  BYTE_ARRAY_TYPE
         1         24  TUPLE2_TYPE
         1         16  FIXED_ARRAY_TYPE
         1         32  DESCRIPTOR_ARRAY_TYPE
        45        720  WEAK_CELL_TYPE

Bug: v8:7464
Change-Id: I12981c39c82a7057f68bbbe03f89fb57b0b4c6a6
Reviewed-on: https://chromium-review.googlesource.com/973722
Commit-Queue: Dan Elphick <delphick@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarRoss McIlroy <rmcilroy@chromium.org>
Reviewed-by: 's avatarYang Guo <yangguo@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52435}
parent b4d1a3af
...@@ -570,10 +570,10 @@ inline std::ostream& operator<<(std::ostream& os, WriteBarrierKind kind) { ...@@ -570,10 +570,10 @@ inline std::ostream& operator<<(std::ostream& os, WriteBarrierKind kind) {
} }
// A flag that indicates whether objects should be pretenured when // A flag that indicates whether objects should be pretenured when
// allocated (allocated directly into the old generation) or not // allocated (allocated directly into either the old generation or read-only
// (allocated in the young generation if the object size and type // space), or not (allocated in the young generation if the object size and type
// allows). // allows).
enum PretenureFlag { NOT_TENURED, TENURED }; enum PretenureFlag { NOT_TENURED, TENURED, TENURED_READ_ONLY };
inline std::ostream& operator<<(std::ostream& os, const PretenureFlag& flag) { inline std::ostream& operator<<(std::ostream& os, const PretenureFlag& flag) {
switch (flag) { switch (flag) {
...@@ -581,6 +581,8 @@ inline std::ostream& operator<<(std::ostream& os, const PretenureFlag& flag) { ...@@ -581,6 +581,8 @@ inline std::ostream& operator<<(std::ostream& os, const PretenureFlag& flag) {
return os << "NotTenured"; return os << "NotTenured";
case TENURED: case TENURED:
return os << "Tenured"; return os << "Tenured";
case TENURED_READ_ONLY:
return os << "TenuredReadOnly";
} }
UNREACHABLE(); UNREACHABLE();
} }
......
...@@ -173,7 +173,8 @@ AllocationResult Heap::AllocateOneByteInternalizedString( ...@@ -173,7 +173,8 @@ AllocationResult Heap::AllocateOneByteInternalizedString(
// Allocate string. // Allocate string.
HeapObject* result = nullptr; HeapObject* result = nullptr;
{ {
AllocationResult allocation = AllocateRaw(size, OLD_SPACE); AllocationResult allocation =
AllocateRaw(size, CanAllocateInReadOnlySpace() ? RO_SPACE : OLD_SPACE);
if (!allocation.To(&result)) return allocation; if (!allocation.To(&result)) return allocation;
} }
...@@ -338,6 +339,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, ...@@ -338,6 +339,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
DCHECK(isolate_->serializer_enabled()); DCHECK(isolate_->serializer_enabled());
#endif #endif
DCHECK(!large_object); DCHECK(!large_object);
DCHECK(CanAllocateInReadOnlySpace());
allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment); allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment);
} else { } else {
// NEW_SPACE is not allowed here. // NEW_SPACE is not allowed here.
...@@ -417,6 +419,12 @@ void Heap::OnMoveEvent(HeapObject* target, HeapObject* source, ...@@ -417,6 +419,12 @@ void Heap::OnMoveEvent(HeapObject* target, HeapObject* source,
} }
} }
bool Heap::CanAllocateInReadOnlySpace() {
return !deserialization_complete_ &&
(isolate()->serializer_enabled() ||
!isolate()->initialized_from_snapshot());
}
void Heap::UpdateAllocationsHash(HeapObject* object) { void Heap::UpdateAllocationsHash(HeapObject* object) {
Address object_address = object->address(); Address object_address = object->address();
MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address); MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
......
...@@ -2542,8 +2542,9 @@ void Heap::ConfigureInitialOldGenerationSize() { ...@@ -2542,8 +2542,9 @@ void Heap::ConfigureInitialOldGenerationSize() {
AllocationResult Heap::AllocatePartialMap(InstanceType instance_type, AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
int instance_size) { int instance_size) {
DCHECK(CanAllocateInReadOnlySpace());
Object* result = nullptr; Object* result = nullptr;
AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE); AllocationResult allocation = AllocateRaw(Map::kSize, RO_SPACE);
if (!allocation.To(&result)) return allocation; if (!allocation.To(&result)) return allocation;
// Map::cast cannot be used due to uninitialized map field. // Map::cast cannot be used due to uninitialized map field.
Map* map = reinterpret_cast<Map*>(result); Map* map = reinterpret_cast<Map*>(result);
...@@ -2729,15 +2730,18 @@ AllocationResult Heap::AllocatePropertyCell(Name* name) { ...@@ -2729,15 +2730,18 @@ AllocationResult Heap::AllocatePropertyCell(Name* name) {
return result; return result;
} }
AllocationResult Heap::AllocateWeakCell(HeapObject* value,
AllocationResult Heap::AllocateWeakCell(HeapObject* value) { PretenureFlag pretenure) {
DCHECK(pretenure != NOT_TENURED);
int size = WeakCell::kSize; int size = WeakCell::kSize;
STATIC_ASSERT(WeakCell::kSize <= kMaxRegularHeapObjectSize); STATIC_ASSERT(WeakCell::kSize <= kMaxRegularHeapObjectSize);
HeapObject* result = nullptr; HeapObject* result = nullptr;
{ {
AllocationResult allocation = AllocateRaw(size, OLD_SPACE); AllocationResult allocation =
AllocateRaw(size, pretenure == TENURED ? OLD_SPACE : RO_SPACE);
if (!allocation.To(&result)) return allocation; if (!allocation.To(&result)) return allocation;
} }
DCHECK_NOT_NULL(weak_cell_map());
result->set_map_after_allocation(weak_cell_map(), SKIP_WRITE_BARRIER); result->set_map_after_allocation(weak_cell_map(), SKIP_WRITE_BARRIER);
WeakCell::cast(result)->initialize(value); WeakCell::cast(result)->initialize(value);
return result; return result;
...@@ -3717,7 +3721,10 @@ AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars, ...@@ -3717,7 +3721,10 @@ AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars,
// Allocate string. // Allocate string.
HeapObject* result = nullptr; HeapObject* result = nullptr;
{ {
AllocationResult allocation = AllocateRaw(size, OLD_SPACE); // TODO(delphick): Look at reworking internalized string creation to avoid
// this hidden global mode switch.
AllocationResult allocation =
AllocateRaw(size, CanAllocateInReadOnlySpace() ? RO_SPACE : OLD_SPACE);
if (!allocation.To(&result)) return allocation; if (!allocation.To(&result)) return allocation;
} }
...@@ -3795,12 +3802,12 @@ AllocationResult Heap::AllocateRawTwoByteString(int length, ...@@ -3795,12 +3802,12 @@ AllocationResult Heap::AllocateRawTwoByteString(int length,
return result; return result;
} }
AllocationResult Heap::AllocateEmptyFixedArray() { AllocationResult Heap::AllocateEmptyFixedArray() {
DCHECK(CanAllocateInReadOnlySpace());
int size = FixedArray::SizeFor(0); int size = FixedArray::SizeFor(0);
HeapObject* result = nullptr; HeapObject* result = nullptr;
{ {
AllocationResult allocation = AllocateRaw(size, OLD_SPACE); AllocationResult allocation = AllocateRaw(size, RO_SPACE);
if (!allocation.To(&result)) return allocation; if (!allocation.To(&result)) return allocation;
} }
// Initialize the object. // Initialize the object.
...@@ -3876,10 +3883,9 @@ AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) { ...@@ -3876,10 +3883,9 @@ AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) {
return result; return result;
} }
AllocationResult Heap::AllocateEmptyFixedTypedArray( AllocationResult Heap::AllocateEmptyFixedTypedArray(
ExternalArrayType array_type) { ExternalArrayType array_type, PretenureFlag pretenure) {
return AllocateFixedTypedArray(0, array_type, false, TENURED); return AllocateFixedTypedArray(0, array_type, false, pretenure);
} }
namespace { namespace {
...@@ -4071,7 +4077,8 @@ AllocationResult Heap::AllocatePropertyArray(int length, ...@@ -4071,7 +4077,8 @@ AllocationResult Heap::AllocatePropertyArray(int length,
PretenureFlag pretenure) { PretenureFlag pretenure) {
// Allow length = 0 for the empty_property_array singleton. // Allow length = 0 for the empty_property_array singleton.
DCHECK_LE(0, length); DCHECK_LE(0, length);
DCHECK_IMPLIES(length == 0, pretenure == TENURED); DCHECK_IMPLIES(length == 0,
pretenure == TENURED || pretenure == TENURED_READ_ONLY);
DCHECK(!InNewSpace(undefined_value())); DCHECK(!InNewSpace(undefined_value()));
HeapObject* result = nullptr; HeapObject* result = nullptr;
......
...@@ -933,6 +933,7 @@ class Heap { ...@@ -933,6 +933,7 @@ class Heap {
inline void OnMoveEvent(HeapObject* target, HeapObject* source, inline void OnMoveEvent(HeapObject* target, HeapObject* source,
int size_in_bytes); int size_in_bytes);
inline bool CanAllocateInReadOnlySpace();
bool deserialization_complete() const { return deserialization_complete_; } bool deserialization_complete() const { return deserialization_complete_; }
bool HasLowAllocationRate(); bool HasLowAllocationRate();
...@@ -1800,7 +1801,16 @@ class Heap { ...@@ -1800,7 +1801,16 @@ class Heap {
// Selects the proper allocation space based on the pretenuring decision. // Selects the proper allocation space based on the pretenuring decision.
static AllocationSpace SelectSpace(PretenureFlag pretenure) { static AllocationSpace SelectSpace(PretenureFlag pretenure) {
return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE; switch (pretenure) {
case TENURED_READ_ONLY:
return RO_SPACE;
case TENURED:
return OLD_SPACE;
case NOT_TENURED:
return NEW_SPACE;
default:
UNREACHABLE();
}
} }
static size_t DefaultGetExternallyAllocatedMemoryInBytesCallback() { static size_t DefaultGetExternallyAllocatedMemoryInBytesCallback() {
...@@ -2214,6 +2224,8 @@ class Heap { ...@@ -2214,6 +2224,8 @@ class Heap {
MUST_USE_RESULT AllocationResult MUST_USE_RESULT AllocationResult
AllocatePartialMap(InstanceType instance_type, int instance_size); AllocatePartialMap(InstanceType instance_type, int instance_size);
void FinalizePartialMap(Map* map);
// Allocate a block of memory in the given space (filled with a filler). // Allocate a block of memory in the given space (filled with a filler).
// Used as a fall-back for generated code when the space is full. // Used as a fall-back for generated code when the space is full.
MUST_USE_RESULT AllocationResult MUST_USE_RESULT AllocationResult
...@@ -2340,8 +2352,8 @@ class Heap { ...@@ -2340,8 +2352,8 @@ class Heap {
MUST_USE_RESULT AllocationResult AllocateEmptyBoilerplateDescription(); MUST_USE_RESULT AllocationResult AllocateEmptyBoilerplateDescription();
// Allocate empty fixed typed array of given type. // Allocate empty fixed typed array of given type.
MUST_USE_RESULT AllocationResult MUST_USE_RESULT AllocationResult AllocateEmptyFixedTypedArray(
AllocateEmptyFixedTypedArray(ExternalArrayType array_type); ExternalArrayType array_type, PretenureFlag pretenure = TENURED);
// Allocate a tenured simple cell. // Allocate a tenured simple cell.
MUST_USE_RESULT AllocationResult AllocateCell(Object* value); MUST_USE_RESULT AllocationResult AllocateCell(Object* value);
...@@ -2353,7 +2365,8 @@ class Heap { ...@@ -2353,7 +2365,8 @@ class Heap {
// Allocate a tenured JS global property cell initialized with the hole. // Allocate a tenured JS global property cell initialized with the hole.
MUST_USE_RESULT AllocationResult AllocatePropertyCell(Name* name); MUST_USE_RESULT AllocationResult AllocatePropertyCell(Name* name);
MUST_USE_RESULT AllocationResult AllocateWeakCell(HeapObject* value); MUST_USE_RESULT AllocationResult
AllocateWeakCell(HeapObject* value, PretenureFlag pretenure = TENURED);
MUST_USE_RESULT AllocationResult AllocateTransitionArray(int capacity); MUST_USE_RESULT AllocationResult AllocateTransitionArray(int capacity);
......
This diff is collapsed.
...@@ -314,6 +314,7 @@ HeapObject* PagedSpace::TryAllocateLinearlyAligned( ...@@ -314,6 +314,7 @@ HeapObject* PagedSpace::TryAllocateLinearlyAligned(
AllocationResult PagedSpace::AllocateRawUnaligned( AllocationResult PagedSpace::AllocateRawUnaligned(
int size_in_bytes, UpdateSkipList update_skip_list) { int size_in_bytes, UpdateSkipList update_skip_list) {
DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
if (!EnsureLinearAllocationArea(size_in_bytes)) { if (!EnsureLinearAllocationArea(size_in_bytes)) {
return AllocationResult::Retry(identity()); return AllocationResult::Retry(identity());
} }
...@@ -329,7 +330,8 @@ AllocationResult PagedSpace::AllocateRawUnaligned( ...@@ -329,7 +330,8 @@ AllocationResult PagedSpace::AllocateRawUnaligned(
AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes, AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
AllocationAlignment alignment) { AllocationAlignment alignment) {
DCHECK(identity() == OLD_SPACE); DCHECK(identity() == OLD_SPACE || identity() == RO_SPACE);
DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
int allocation_size = size_in_bytes; int allocation_size = size_in_bytes;
HeapObject* object = TryAllocateLinearlyAligned(&allocation_size, alignment); HeapObject* object = TryAllocateLinearlyAligned(&allocation_size, alignment);
if (object == nullptr) { if (object == nullptr) {
......
...@@ -1917,7 +1917,8 @@ void PagedSpace::Verify(ObjectVisitor* visitor) { ...@@ -1917,7 +1917,8 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
// be in map space. // be in map space.
Map* map = object->map(); Map* map = object->map();
CHECK(map->IsMap()); CHECK(map->IsMap());
CHECK(heap()->map_space()->Contains(map)); CHECK(heap()->map_space()->Contains(map) ||
heap()->read_only_space()->Contains(map));
// Perform space-specific object verification. // Perform space-specific object verification.
VerifyObject(object); VerifyObject(object);
...@@ -2368,10 +2369,11 @@ void NewSpace::Verify() { ...@@ -2368,10 +2369,11 @@ void NewSpace::Verify() {
HeapObject* object = HeapObject::FromAddress(current); HeapObject* object = HeapObject::FromAddress(current);
// The first word should be a map, and we expect all map pointers to // The first word should be a map, and we expect all map pointers to
// be in map space. // be in map space or read-only space.
Map* map = object->map(); Map* map = object->map();
CHECK(map->IsMap()); CHECK(map->IsMap());
CHECK(heap()->map_space()->Contains(map)); CHECK(heap()->map_space()->Contains(map) ||
heap()->read_only_space()->Contains(map));
// The object should not be code or a map. // The object should not be code or a map.
CHECK(!object->IsMap()); CHECK(!object->IsMap());
...@@ -3445,10 +3447,11 @@ void LargeObjectSpace::Verify() { ...@@ -3445,10 +3447,11 @@ void LargeObjectSpace::Verify() {
CHECK(object->address() == page->area_start()); CHECK(object->address() == page->area_start());
// The first word should be a map, and we expect all map pointers to be // The first word should be a map, and we expect all map pointers to be
// in map space. // in map space or read-only space.
Map* map = object->map(); Map* map = object->map();
CHECK(map->IsMap()); CHECK(map->IsMap());
CHECK(heap()->map_space()->Contains(map)); CHECK(heap()->map_space()->Contains(map) ||
heap()->read_only_space()->Contains(map));
// We have only code, sequential strings, external strings (sequential // We have only code, sequential strings, external strings (sequential
// strings that have been morphed into external strings), thin strings // strings that have been morphed into external strings), thin strings
......
...@@ -25,13 +25,19 @@ Serializer<AllocatorT>::Serializer(Isolate* isolate) ...@@ -25,13 +25,19 @@ Serializer<AllocatorT>::Serializer(Isolate* isolate)
if (FLAG_serialization_statistics) { if (FLAG_serialization_statistics) {
instance_type_count_ = NewArray<int>(kInstanceTypes); instance_type_count_ = NewArray<int>(kInstanceTypes);
instance_type_size_ = NewArray<size_t>(kInstanceTypes); instance_type_size_ = NewArray<size_t>(kInstanceTypes);
read_only_instance_type_count_ = NewArray<int>(kInstanceTypes);
read_only_instance_type_size_ = NewArray<size_t>(kInstanceTypes);
for (int i = 0; i < kInstanceTypes; i++) { for (int i = 0; i < kInstanceTypes; i++) {
instance_type_count_[i] = 0; instance_type_count_[i] = 0;
instance_type_size_[i] = 0; instance_type_size_[i] = 0;
read_only_instance_type_count_[i] = 0;
read_only_instance_type_size_[i] = 0;
} }
} else { } else {
instance_type_count_ = nullptr; instance_type_count_ = nullptr;
instance_type_size_ = nullptr; instance_type_size_ = nullptr;
read_only_instance_type_count_ = nullptr;
read_only_instance_type_size_ = nullptr;
} }
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
} }
...@@ -43,16 +49,24 @@ Serializer<AllocatorT>::~Serializer() { ...@@ -43,16 +49,24 @@ Serializer<AllocatorT>::~Serializer() {
if (instance_type_count_ != nullptr) { if (instance_type_count_ != nullptr) {
DeleteArray(instance_type_count_); DeleteArray(instance_type_count_);
DeleteArray(instance_type_size_); DeleteArray(instance_type_size_);
DeleteArray(read_only_instance_type_count_);
DeleteArray(read_only_instance_type_size_);
} }
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
} }
#ifdef OBJECT_PRINT #ifdef OBJECT_PRINT
template <class AllocatorT> template <class AllocatorT>
void Serializer<AllocatorT>::CountInstanceType(Map* map, int size) { void Serializer<AllocatorT>::CountInstanceType(Map* map, int size,
AllocationSpace space) {
int instance_type = map->instance_type(); int instance_type = map->instance_type();
instance_type_count_[instance_type]++; if (space != RO_SPACE) {
instance_type_size_[instance_type] += size; instance_type_count_[instance_type]++;
instance_type_size_[instance_type] += size;
} else {
read_only_instance_type_count_[instance_type]++;
read_only_instance_type_size_[instance_type] += size;
}
} }
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
...@@ -72,6 +86,21 @@ void Serializer<AllocatorT>::OutputStatistics(const char* name) { ...@@ -72,6 +86,21 @@ void Serializer<AllocatorT>::OutputStatistics(const char* name) {
} }
INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE) INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE)
#undef PRINT_INSTANCE_TYPE #undef PRINT_INSTANCE_TYPE
size_t read_only_total = 0;
#define UPDATE_TOTAL(Name) \
read_only_total += read_only_instance_type_size_[Name];
INSTANCE_TYPE_LIST(UPDATE_TOTAL)
#undef UPDATE_TOTAL
if (read_only_total > 0) {
PrintF("\n Read Only Instance types (count and bytes):\n");
#define PRINT_INSTANCE_TYPE(Name) \
if (read_only_instance_type_count_[Name]) { \
PrintF("%10d %10" PRIuS " %s\n", read_only_instance_type_count_[Name], \
read_only_instance_type_size_[Name], #Name); \
}
INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE)
#undef PRINT_INSTANCE_TYPE
}
PrintF("\n"); PrintF("\n");
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
} }
...@@ -360,7 +389,7 @@ void Serializer<AllocatorT>::ObjectSerializer::SerializePrologue( ...@@ -360,7 +389,7 @@ void Serializer<AllocatorT>::ObjectSerializer::SerializePrologue(
#ifdef OBJECT_PRINT #ifdef OBJECT_PRINT
if (FLAG_serialization_statistics) { if (FLAG_serialization_statistics) {
serializer_->CountInstanceType(map, size); serializer_->CountInstanceType(map, size, space);
} }
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
......
...@@ -225,7 +225,7 @@ class Serializer : public SerializerDeserializer { ...@@ -225,7 +225,7 @@ class Serializer : public SerializerDeserializer {
void OutputStatistics(const char* name); void OutputStatistics(const char* name);
#ifdef OBJECT_PRINT #ifdef OBJECT_PRINT
void CountInstanceType(Map* map, int size); void CountInstanceType(Map* map, int size, AllocationSpace space);
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
#ifdef DEBUG #ifdef DEBUG
...@@ -255,6 +255,8 @@ class Serializer : public SerializerDeserializer { ...@@ -255,6 +255,8 @@ class Serializer : public SerializerDeserializer {
static const int kInstanceTypes = LAST_TYPE + 1; static const int kInstanceTypes = LAST_TYPE + 1;
int* instance_type_count_; int* instance_type_count_;
size_t* instance_type_size_; size_t* instance_type_size_;
int* read_only_instance_type_count_;
size_t* read_only_instance_type_size_;
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
#ifdef DEBUG #ifdef DEBUG
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment