Commit 6c68efac authored by Dan Elphick's avatar Dan Elphick Committed by Commit Bot

Reland "[heap] Move initial objects into RO_SPACE"

This is a reland of f8ae62fe

Original change's description:
> [heap] Move initial objects into RO_SPACE
> 
> This moves:
> * the main oddballs (null, undefined, hole, true, false) as well as
> their supporting maps (also adds hole as an internalized string to make
> this work).
> * most of the internalized strings
> * the struct maps
> * empty array
> * empty enum cache
> * the contents of the initial string table
> * the weak_cell_cache for any map in RO_SPACE (and eagerly creates the
> value avoid writing to it during run-time)
> 
> The StartupSerializer stats change as follows:
> 
>      RO_SPACE  NEW_SPACE  OLD_SPACE  CODE_SPACE  MAP_SPACE  LO_SPACE
> old         0          0     270264       32608      12144         0
> new     21776          0     253168       32608       8184         0
> Overall memory usage has increased by 720 bytes due to the eager
> initialization of the Map weak cell caches.
> 
> Also extends --serialization-statistics to print out separate instance
> type stats for objects in RO_SPACE as shown here:
> 
>   Read Only Instance types (count and bytes):
>        404      16736  ONE_BYTE_INTERNALIZED_STRING_TYPE
>          2         32  HEAP_NUMBER_TYPE
>          5        240  ODDBALL_TYPE
>         45       3960  MAP_TYPE
>          1         16  BYTE_ARRAY_TYPE
>          1         24  TUPLE2_TYPE
>          1         16  FIXED_ARRAY_TYPE
>          1         32  DESCRIPTOR_ARRAY_TYPE
>         45        720  WEAK_CELL_TYPE
> 
> Bug: v8:7464
> Change-Id: I12981c39c82a7057f68bbbe03f89fb57b0b4c6a6
> Reviewed-on: https://chromium-review.googlesource.com/973722
> Commit-Queue: Dan Elphick <delphick@chromium.org>
> Reviewed-by: Hannes Payer <hpayer@chromium.org>
> Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
> Reviewed-by: Yang Guo <yangguo@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#52435}

Bug: v8:7464
Change-Id: I50427edfeb53ca80ec4cf46566368fb2213ccf7b
Reviewed-on: https://chromium-review.googlesource.com/999654
Commit-Queue: Dan Elphick <delphick@chromium.org>
Reviewed-by: 's avatarYang Guo <yangguo@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52638}
parent c7e6cf7e
...@@ -571,10 +571,10 @@ inline std::ostream& operator<<(std::ostream& os, WriteBarrierKind kind) { ...@@ -571,10 +571,10 @@ inline std::ostream& operator<<(std::ostream& os, WriteBarrierKind kind) {
} }
// A flag that indicates whether objects should be pretenured when // A flag that indicates whether objects should be pretenured when
// allocated (allocated directly into the old generation) or not // allocated (allocated directly into either the old generation or read-only
// (allocated in the young generation if the object size and type // space), or not (allocated in the young generation if the object size and type
// allows). // allows).
enum PretenureFlag { NOT_TENURED, TENURED }; enum PretenureFlag { NOT_TENURED, TENURED, TENURED_READ_ONLY };
inline std::ostream& operator<<(std::ostream& os, const PretenureFlag& flag) { inline std::ostream& operator<<(std::ostream& os, const PretenureFlag& flag) {
switch (flag) { switch (flag) {
...@@ -582,6 +582,8 @@ inline std::ostream& operator<<(std::ostream& os, const PretenureFlag& flag) { ...@@ -582,6 +582,8 @@ inline std::ostream& operator<<(std::ostream& os, const PretenureFlag& flag) {
return os << "NotTenured"; return os << "NotTenured";
case TENURED: case TENURED:
return os << "Tenured"; return os << "Tenured";
case TENURED_READ_ONLY:
return os << "TenuredReadOnly";
} }
UNREACHABLE(); UNREACHABLE();
} }
......
...@@ -663,7 +663,11 @@ Handle<SeqOneByteString> Factory::AllocateRawOneByteInternalizedString( ...@@ -663,7 +663,11 @@ Handle<SeqOneByteString> Factory::AllocateRawOneByteInternalizedString(
Map* map = *one_byte_internalized_string_map(); Map* map = *one_byte_internalized_string_map();
int size = SeqOneByteString::SizeFor(length); int size = SeqOneByteString::SizeFor(length);
HeapObject* result = AllocateRawWithImmortalMap(size, TENURED, map); HeapObject* result = AllocateRawWithImmortalMap(
size,
isolate()->heap()->CanAllocateInReadOnlySpace() ? TENURED_READ_ONLY
: TENURED,
map);
Handle<SeqOneByteString> answer(SeqOneByteString::cast(result), isolate()); Handle<SeqOneByteString> answer(SeqOneByteString::cast(result), isolate());
answer->set_length(length); answer->set_length(length);
answer->set_hash_field(hash_field); answer->set_hash_field(hash_field);
...@@ -707,7 +711,11 @@ Handle<String> Factory::AllocateInternalizedStringImpl(T t, int chars, ...@@ -707,7 +711,11 @@ Handle<String> Factory::AllocateInternalizedStringImpl(T t, int chars,
size = SeqTwoByteString::SizeFor(chars); size = SeqTwoByteString::SizeFor(chars);
} }
HeapObject* result = AllocateRawWithImmortalMap(size, TENURED, map); HeapObject* result = AllocateRawWithImmortalMap(
size,
isolate()->heap()->CanAllocateInReadOnlySpace() ? TENURED_READ_ONLY
: TENURED,
map);
Handle<String> answer(String::cast(result), isolate()); Handle<String> answer(String::cast(result), isolate());
answer->set_length(chars); answer->set_length(chars);
answer->set_hash_field(hash_field); answer->set_hash_field(hash_field);
...@@ -1607,13 +1615,14 @@ Handle<PropertyCell> Factory::NewPropertyCell(Handle<Name> name) { ...@@ -1607,13 +1615,14 @@ Handle<PropertyCell> Factory::NewPropertyCell(Handle<Name> name) {
return cell; return cell;
} }
Handle<WeakCell> Factory::NewWeakCell(Handle<HeapObject> value) { Handle<WeakCell> Factory::NewWeakCell(Handle<HeapObject> value,
PretenureFlag pretenure) {
// It is safe to dereference the value because we are embedding it // It is safe to dereference the value because we are embedding it
// in cell and not inspecting its fields. // in cell and not inspecting its fields.
AllowDeferredHandleDereference convert_to_cell; AllowDeferredHandleDereference convert_to_cell;
STATIC_ASSERT(WeakCell::kSize <= kMaxRegularHeapObjectSize); STATIC_ASSERT(WeakCell::kSize <= kMaxRegularHeapObjectSize);
HeapObject* result = HeapObject* result =
AllocateRawWithImmortalMap(WeakCell::kSize, TENURED, *weak_cell_map()); AllocateRawWithImmortalMap(WeakCell::kSize, pretenure, *weak_cell_map());
Handle<WeakCell> cell(WeakCell::cast(result), isolate()); Handle<WeakCell> cell(WeakCell::cast(result), isolate());
cell->initialize(*value); cell->initialize(*value);
return cell; return cell;
......
...@@ -435,7 +435,8 @@ class V8_EXPORT_PRIVATE Factory { ...@@ -435,7 +435,8 @@ class V8_EXPORT_PRIVATE Factory {
Handle<PropertyCell> NewPropertyCell(Handle<Name> name); Handle<PropertyCell> NewPropertyCell(Handle<Name> name);
Handle<WeakCell> NewWeakCell(Handle<HeapObject> value); Handle<WeakCell> NewWeakCell(Handle<HeapObject> value,
PretenureFlag pretenure = TENURED);
Handle<FeedbackCell> NewNoClosuresCell(Handle<HeapObject> value); Handle<FeedbackCell> NewNoClosuresCell(Handle<HeapObject> value);
Handle<FeedbackCell> NewOneClosureCell(Handle<HeapObject> value); Handle<FeedbackCell> NewOneClosureCell(Handle<HeapObject> value);
......
...@@ -183,6 +183,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, ...@@ -183,6 +183,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
DCHECK(isolate_->serializer_enabled()); DCHECK(isolate_->serializer_enabled());
#endif #endif
DCHECK(!large_object); DCHECK(!large_object);
DCHECK(CanAllocateInReadOnlySpace());
allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment); allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment);
} else { } else {
// NEW_SPACE is not allowed here. // NEW_SPACE is not allowed here.
...@@ -261,6 +262,12 @@ void Heap::OnMoveEvent(HeapObject* target, HeapObject* source, ...@@ -261,6 +262,12 @@ void Heap::OnMoveEvent(HeapObject* target, HeapObject* source,
} }
} }
bool Heap::CanAllocateInReadOnlySpace() {
return !deserialization_complete_ &&
(isolate()->serializer_enabled() ||
!isolate()->initialized_from_snapshot());
}
void Heap::UpdateAllocationsHash(HeapObject* object) { void Heap::UpdateAllocationsHash(HeapObject* object) {
Address object_address = object->address(); Address object_address = object->address();
MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address); MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
......
...@@ -943,6 +943,7 @@ class Heap { ...@@ -943,6 +943,7 @@ class Heap {
inline void OnMoveEvent(HeapObject* target, HeapObject* source, inline void OnMoveEvent(HeapObject* target, HeapObject* source,
int size_in_bytes); int size_in_bytes);
inline bool CanAllocateInReadOnlySpace();
bool deserialization_complete() const { return deserialization_complete_; } bool deserialization_complete() const { return deserialization_complete_; }
bool HasLowAllocationRate(); bool HasLowAllocationRate();
...@@ -1807,7 +1808,16 @@ class Heap { ...@@ -1807,7 +1808,16 @@ class Heap {
// Selects the proper allocation space based on the pretenuring decision. // Selects the proper allocation space based on the pretenuring decision.
static AllocationSpace SelectSpace(PretenureFlag pretenure) { static AllocationSpace SelectSpace(PretenureFlag pretenure) {
return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE; switch (pretenure) {
case TENURED_READ_ONLY:
return RO_SPACE;
case TENURED:
return OLD_SPACE;
case NOT_TENURED:
return NEW_SPACE;
default:
UNREACHABLE();
}
} }
static size_t DefaultGetExternallyAllocatedMemoryInBytesCallback() { static size_t DefaultGetExternallyAllocatedMemoryInBytesCallback() {
...@@ -2132,9 +2142,11 @@ class Heap { ...@@ -2132,9 +2142,11 @@ class Heap {
V8_WARN_UNUSED_RESULT AllocationResult V8_WARN_UNUSED_RESULT AllocationResult
AllocatePartialMap(InstanceType instance_type, int instance_size); AllocatePartialMap(InstanceType instance_type, int instance_size);
void FinalizePartialMap(Map* map);
// Allocate empty fixed typed array of given type. // Allocate empty fixed typed array of given type.
V8_WARN_UNUSED_RESULT AllocationResult V8_WARN_UNUSED_RESULT AllocationResult AllocateEmptyFixedTypedArray(
AllocateEmptyFixedTypedArray(ExternalArrayType array_type); ExternalArrayType array_type, AllocationSpace space = OLD_SPACE);
void set_force_oom(bool value) { force_oom_ = value; } void set_force_oom(bool value) { force_oom_ = value; }
......
This diff is collapsed.
...@@ -315,6 +315,7 @@ HeapObject* PagedSpace::TryAllocateLinearlyAligned( ...@@ -315,6 +315,7 @@ HeapObject* PagedSpace::TryAllocateLinearlyAligned(
AllocationResult PagedSpace::AllocateRawUnaligned( AllocationResult PagedSpace::AllocateRawUnaligned(
int size_in_bytes, UpdateSkipList update_skip_list) { int size_in_bytes, UpdateSkipList update_skip_list) {
DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
if (!EnsureLinearAllocationArea(size_in_bytes)) { if (!EnsureLinearAllocationArea(size_in_bytes)) {
return AllocationResult::Retry(identity()); return AllocationResult::Retry(identity());
} }
...@@ -330,7 +331,8 @@ AllocationResult PagedSpace::AllocateRawUnaligned( ...@@ -330,7 +331,8 @@ AllocationResult PagedSpace::AllocateRawUnaligned(
AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes, AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
AllocationAlignment alignment) { AllocationAlignment alignment) {
DCHECK(identity() == OLD_SPACE); DCHECK(identity() == OLD_SPACE || identity() == RO_SPACE);
DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
int allocation_size = size_in_bytes; int allocation_size = size_in_bytes;
HeapObject* object = TryAllocateLinearlyAligned(&allocation_size, alignment); HeapObject* object = TryAllocateLinearlyAligned(&allocation_size, alignment);
if (object == nullptr) { if (object == nullptr) {
......
...@@ -1923,7 +1923,8 @@ void PagedSpace::Verify(ObjectVisitor* visitor) { ...@@ -1923,7 +1923,8 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
// be in map space. // be in map space.
Map* map = object->map(); Map* map = object->map();
CHECK(map->IsMap()); CHECK(map->IsMap());
CHECK(heap()->map_space()->Contains(map)); CHECK(heap()->map_space()->Contains(map) ||
heap()->read_only_space()->Contains(map));
// Perform space-specific object verification. // Perform space-specific object verification.
VerifyObject(object); VerifyObject(object);
...@@ -2374,10 +2375,11 @@ void NewSpace::Verify() { ...@@ -2374,10 +2375,11 @@ void NewSpace::Verify() {
HeapObject* object = HeapObject::FromAddress(current); HeapObject* object = HeapObject::FromAddress(current);
// The first word should be a map, and we expect all map pointers to // The first word should be a map, and we expect all map pointers to
// be in map space. // be in map space or read-only space.
Map* map = object->map(); Map* map = object->map();
CHECK(map->IsMap()); CHECK(map->IsMap());
CHECK(heap()->map_space()->Contains(map)); CHECK(heap()->map_space()->Contains(map) ||
heap()->read_only_space()->Contains(map));
// The object should not be code or a map. // The object should not be code or a map.
CHECK(!object->IsMap()); CHECK(!object->IsMap());
...@@ -3452,10 +3454,11 @@ void LargeObjectSpace::Verify() { ...@@ -3452,10 +3454,11 @@ void LargeObjectSpace::Verify() {
CHECK(object->address() == page->area_start()); CHECK(object->address() == page->area_start());
// The first word should be a map, and we expect all map pointers to be // The first word should be a map, and we expect all map pointers to be
// in map space. // in map space or read-only space.
Map* map = object->map(); Map* map = object->map();
CHECK(map->IsMap()); CHECK(map->IsMap());
CHECK(heap()->map_space()->Contains(map)); CHECK(heap()->map_space()->Contains(map) ||
heap()->read_only_space()->Contains(map));
// We have only code, sequential strings, external strings (sequential // We have only code, sequential strings, external strings (sequential
// strings that have been morphed into external strings), thin strings // strings that have been morphed into external strings), thin strings
......
...@@ -25,13 +25,19 @@ Serializer<AllocatorT>::Serializer(Isolate* isolate) ...@@ -25,13 +25,19 @@ Serializer<AllocatorT>::Serializer(Isolate* isolate)
if (FLAG_serialization_statistics) { if (FLAG_serialization_statistics) {
instance_type_count_ = NewArray<int>(kInstanceTypes); instance_type_count_ = NewArray<int>(kInstanceTypes);
instance_type_size_ = NewArray<size_t>(kInstanceTypes); instance_type_size_ = NewArray<size_t>(kInstanceTypes);
read_only_instance_type_count_ = NewArray<int>(kInstanceTypes);
read_only_instance_type_size_ = NewArray<size_t>(kInstanceTypes);
for (int i = 0; i < kInstanceTypes; i++) { for (int i = 0; i < kInstanceTypes; i++) {
instance_type_count_[i] = 0; instance_type_count_[i] = 0;
instance_type_size_[i] = 0; instance_type_size_[i] = 0;
read_only_instance_type_count_[i] = 0;
read_only_instance_type_size_[i] = 0;
} }
} else { } else {
instance_type_count_ = nullptr; instance_type_count_ = nullptr;
instance_type_size_ = nullptr; instance_type_size_ = nullptr;
read_only_instance_type_count_ = nullptr;
read_only_instance_type_size_ = nullptr;
} }
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
} }
...@@ -43,16 +49,24 @@ Serializer<AllocatorT>::~Serializer() { ...@@ -43,16 +49,24 @@ Serializer<AllocatorT>::~Serializer() {
if (instance_type_count_ != nullptr) { if (instance_type_count_ != nullptr) {
DeleteArray(instance_type_count_); DeleteArray(instance_type_count_);
DeleteArray(instance_type_size_); DeleteArray(instance_type_size_);
DeleteArray(read_only_instance_type_count_);
DeleteArray(read_only_instance_type_size_);
} }
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
} }
#ifdef OBJECT_PRINT #ifdef OBJECT_PRINT
template <class AllocatorT> template <class AllocatorT>
void Serializer<AllocatorT>::CountInstanceType(Map* map, int size) { void Serializer<AllocatorT>::CountInstanceType(Map* map, int size,
AllocationSpace space) {
int instance_type = map->instance_type(); int instance_type = map->instance_type();
instance_type_count_[instance_type]++; if (space != RO_SPACE) {
instance_type_size_[instance_type] += size; instance_type_count_[instance_type]++;
instance_type_size_[instance_type] += size;
} else {
read_only_instance_type_count_[instance_type]++;
read_only_instance_type_size_[instance_type] += size;
}
} }
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
...@@ -72,6 +86,21 @@ void Serializer<AllocatorT>::OutputStatistics(const char* name) { ...@@ -72,6 +86,21 @@ void Serializer<AllocatorT>::OutputStatistics(const char* name) {
} }
INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE) INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE)
#undef PRINT_INSTANCE_TYPE #undef PRINT_INSTANCE_TYPE
size_t read_only_total = 0;
#define UPDATE_TOTAL(Name) \
read_only_total += read_only_instance_type_size_[Name];
INSTANCE_TYPE_LIST(UPDATE_TOTAL)
#undef UPDATE_TOTAL
if (read_only_total > 0) {
PrintF("\n Read Only Instance types (count and bytes):\n");
#define PRINT_INSTANCE_TYPE(Name) \
if (read_only_instance_type_count_[Name]) { \
PrintF("%10d %10" PRIuS " %s\n", read_only_instance_type_count_[Name], \
read_only_instance_type_size_[Name], #Name); \
}
INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE)
#undef PRINT_INSTANCE_TYPE
}
PrintF("\n"); PrintF("\n");
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
} }
...@@ -362,7 +391,7 @@ void Serializer<AllocatorT>::ObjectSerializer::SerializePrologue( ...@@ -362,7 +391,7 @@ void Serializer<AllocatorT>::ObjectSerializer::SerializePrologue(
#ifdef OBJECT_PRINT #ifdef OBJECT_PRINT
if (FLAG_serialization_statistics) { if (FLAG_serialization_statistics) {
serializer_->CountInstanceType(map, size); serializer_->CountInstanceType(map, size, space);
} }
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
......
...@@ -226,7 +226,7 @@ class Serializer : public SerializerDeserializer { ...@@ -226,7 +226,7 @@ class Serializer : public SerializerDeserializer {
void OutputStatistics(const char* name); void OutputStatistics(const char* name);
#ifdef OBJECT_PRINT #ifdef OBJECT_PRINT
void CountInstanceType(Map* map, int size); void CountInstanceType(Map* map, int size, AllocationSpace space);
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
#ifdef DEBUG #ifdef DEBUG
...@@ -256,6 +256,8 @@ class Serializer : public SerializerDeserializer { ...@@ -256,6 +256,8 @@ class Serializer : public SerializerDeserializer {
static const int kInstanceTypes = LAST_TYPE + 1; static const int kInstanceTypes = LAST_TYPE + 1;
int* instance_type_count_; int* instance_type_count_;
size_t* instance_type_size_; size_t* instance_type_size_;
int* read_only_instance_type_count_;
size_t* read_only_instance_type_size_;
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
#ifdef DEBUG #ifdef DEBUG
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment