Commit f8ae62fe authored by Dan Elphick's avatar Dan Elphick Committed by Commit Bot

[heap] Move initial objects into RO_SPACE

This moves:
* the main oddballs (null, undefined, hole, true, false) as well as
their supporting maps (also adds hole as an internalized string to make
this work).
* most of the internalized strings
* the struct maps
* empty array
* empty enum cache
* the contents of the initial string table
* the weak_cell_cache for any map in RO_SPACE (and eagerly creates the
value avoid writing to it during run-time)

The StartupSerializer stats change as follows:

     RO_SPACE  NEW_SPACE  OLD_SPACE  CODE_SPACE  MAP_SPACE  LO_SPACE
old         0          0     270264       32608      12144         0
new     21776          0     253168       32608       8184         0
Overall memory usage has increased by 720 bytes due to the eager
initialization of the Map weak cell caches.

Also extends --serialization-statistics to print out separate instance
type stats for objects in RO_SPACE as shown here:

  Read Only Instance types (count and bytes):
       404      16736  ONE_BYTE_INTERNALIZED_STRING_TYPE
         2         32  HEAP_NUMBER_TYPE
         5        240  ODDBALL_TYPE
        45       3960  MAP_TYPE
         1         16  BYTE_ARRAY_TYPE
         1         24  TUPLE2_TYPE
         1         16  FIXED_ARRAY_TYPE
         1         32  DESCRIPTOR_ARRAY_TYPE
        45        720  WEAK_CELL_TYPE

Bug: v8:7464
Change-Id: I12981c39c82a7057f68bbbe03f89fb57b0b4c6a6
Reviewed-on: https://chromium-review.googlesource.com/973722
Commit-Queue: Dan Elphick <delphick@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarRoss McIlroy <rmcilroy@chromium.org>
Reviewed-by: 's avatarYang Guo <yangguo@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52435}
parent b4d1a3af
...@@ -570,10 +570,10 @@ inline std::ostream& operator<<(std::ostream& os, WriteBarrierKind kind) { ...@@ -570,10 +570,10 @@ inline std::ostream& operator<<(std::ostream& os, WriteBarrierKind kind) {
} }
// A flag that indicates whether objects should be pretenured when // A flag that indicates whether objects should be pretenured when
// allocated (allocated directly into the old generation) or not // allocated (allocated directly into either the old generation or read-only
// (allocated in the young generation if the object size and type // space), or not (allocated in the young generation if the object size and type
// allows). // allows).
enum PretenureFlag { NOT_TENURED, TENURED }; enum PretenureFlag { NOT_TENURED, TENURED, TENURED_READ_ONLY };
inline std::ostream& operator<<(std::ostream& os, const PretenureFlag& flag) { inline std::ostream& operator<<(std::ostream& os, const PretenureFlag& flag) {
switch (flag) { switch (flag) {
...@@ -581,6 +581,8 @@ inline std::ostream& operator<<(std::ostream& os, const PretenureFlag& flag) { ...@@ -581,6 +581,8 @@ inline std::ostream& operator<<(std::ostream& os, const PretenureFlag& flag) {
return os << "NotTenured"; return os << "NotTenured";
case TENURED: case TENURED:
return os << "Tenured"; return os << "Tenured";
case TENURED_READ_ONLY:
return os << "TenuredReadOnly";
} }
UNREACHABLE(); UNREACHABLE();
} }
......
...@@ -173,7 +173,8 @@ AllocationResult Heap::AllocateOneByteInternalizedString( ...@@ -173,7 +173,8 @@ AllocationResult Heap::AllocateOneByteInternalizedString(
// Allocate string. // Allocate string.
HeapObject* result = nullptr; HeapObject* result = nullptr;
{ {
AllocationResult allocation = AllocateRaw(size, OLD_SPACE); AllocationResult allocation =
AllocateRaw(size, CanAllocateInReadOnlySpace() ? RO_SPACE : OLD_SPACE);
if (!allocation.To(&result)) return allocation; if (!allocation.To(&result)) return allocation;
} }
...@@ -338,6 +339,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, ...@@ -338,6 +339,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
DCHECK(isolate_->serializer_enabled()); DCHECK(isolate_->serializer_enabled());
#endif #endif
DCHECK(!large_object); DCHECK(!large_object);
DCHECK(CanAllocateInReadOnlySpace());
allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment); allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment);
} else { } else {
// NEW_SPACE is not allowed here. // NEW_SPACE is not allowed here.
...@@ -417,6 +419,12 @@ void Heap::OnMoveEvent(HeapObject* target, HeapObject* source, ...@@ -417,6 +419,12 @@ void Heap::OnMoveEvent(HeapObject* target, HeapObject* source,
} }
} }
bool Heap::CanAllocateInReadOnlySpace() {
return !deserialization_complete_ &&
(isolate()->serializer_enabled() ||
!isolate()->initialized_from_snapshot());
}
void Heap::UpdateAllocationsHash(HeapObject* object) { void Heap::UpdateAllocationsHash(HeapObject* object) {
Address object_address = object->address(); Address object_address = object->address();
MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address); MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
......
...@@ -2542,8 +2542,9 @@ void Heap::ConfigureInitialOldGenerationSize() { ...@@ -2542,8 +2542,9 @@ void Heap::ConfigureInitialOldGenerationSize() {
AllocationResult Heap::AllocatePartialMap(InstanceType instance_type, AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
int instance_size) { int instance_size) {
DCHECK(CanAllocateInReadOnlySpace());
Object* result = nullptr; Object* result = nullptr;
AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE); AllocationResult allocation = AllocateRaw(Map::kSize, RO_SPACE);
if (!allocation.To(&result)) return allocation; if (!allocation.To(&result)) return allocation;
// Map::cast cannot be used due to uninitialized map field. // Map::cast cannot be used due to uninitialized map field.
Map* map = reinterpret_cast<Map*>(result); Map* map = reinterpret_cast<Map*>(result);
...@@ -2729,15 +2730,18 @@ AllocationResult Heap::AllocatePropertyCell(Name* name) { ...@@ -2729,15 +2730,18 @@ AllocationResult Heap::AllocatePropertyCell(Name* name) {
return result; return result;
} }
AllocationResult Heap::AllocateWeakCell(HeapObject* value,
AllocationResult Heap::AllocateWeakCell(HeapObject* value) { PretenureFlag pretenure) {
DCHECK(pretenure != NOT_TENURED);
int size = WeakCell::kSize; int size = WeakCell::kSize;
STATIC_ASSERT(WeakCell::kSize <= kMaxRegularHeapObjectSize); STATIC_ASSERT(WeakCell::kSize <= kMaxRegularHeapObjectSize);
HeapObject* result = nullptr; HeapObject* result = nullptr;
{ {
AllocationResult allocation = AllocateRaw(size, OLD_SPACE); AllocationResult allocation =
AllocateRaw(size, pretenure == TENURED ? OLD_SPACE : RO_SPACE);
if (!allocation.To(&result)) return allocation; if (!allocation.To(&result)) return allocation;
} }
DCHECK_NOT_NULL(weak_cell_map());
result->set_map_after_allocation(weak_cell_map(), SKIP_WRITE_BARRIER); result->set_map_after_allocation(weak_cell_map(), SKIP_WRITE_BARRIER);
WeakCell::cast(result)->initialize(value); WeakCell::cast(result)->initialize(value);
return result; return result;
...@@ -3717,7 +3721,10 @@ AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars, ...@@ -3717,7 +3721,10 @@ AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars,
// Allocate string. // Allocate string.
HeapObject* result = nullptr; HeapObject* result = nullptr;
{ {
AllocationResult allocation = AllocateRaw(size, OLD_SPACE); // TODO(delphick): Look at reworking internalized string creation to avoid
// this hidden global mode switch.
AllocationResult allocation =
AllocateRaw(size, CanAllocateInReadOnlySpace() ? RO_SPACE : OLD_SPACE);
if (!allocation.To(&result)) return allocation; if (!allocation.To(&result)) return allocation;
} }
...@@ -3795,12 +3802,12 @@ AllocationResult Heap::AllocateRawTwoByteString(int length, ...@@ -3795,12 +3802,12 @@ AllocationResult Heap::AllocateRawTwoByteString(int length,
return result; return result;
} }
AllocationResult Heap::AllocateEmptyFixedArray() { AllocationResult Heap::AllocateEmptyFixedArray() {
DCHECK(CanAllocateInReadOnlySpace());
int size = FixedArray::SizeFor(0); int size = FixedArray::SizeFor(0);
HeapObject* result = nullptr; HeapObject* result = nullptr;
{ {
AllocationResult allocation = AllocateRaw(size, OLD_SPACE); AllocationResult allocation = AllocateRaw(size, RO_SPACE);
if (!allocation.To(&result)) return allocation; if (!allocation.To(&result)) return allocation;
} }
// Initialize the object. // Initialize the object.
...@@ -3876,10 +3883,9 @@ AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) { ...@@ -3876,10 +3883,9 @@ AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) {
return result; return result;
} }
AllocationResult Heap::AllocateEmptyFixedTypedArray( AllocationResult Heap::AllocateEmptyFixedTypedArray(
ExternalArrayType array_type) { ExternalArrayType array_type, PretenureFlag pretenure) {
return AllocateFixedTypedArray(0, array_type, false, TENURED); return AllocateFixedTypedArray(0, array_type, false, pretenure);
} }
namespace { namespace {
...@@ -4071,7 +4077,8 @@ AllocationResult Heap::AllocatePropertyArray(int length, ...@@ -4071,7 +4077,8 @@ AllocationResult Heap::AllocatePropertyArray(int length,
PretenureFlag pretenure) { PretenureFlag pretenure) {
// Allow length = 0 for the empty_property_array singleton. // Allow length = 0 for the empty_property_array singleton.
DCHECK_LE(0, length); DCHECK_LE(0, length);
DCHECK_IMPLIES(length == 0, pretenure == TENURED); DCHECK_IMPLIES(length == 0,
pretenure == TENURED || pretenure == TENURED_READ_ONLY);
DCHECK(!InNewSpace(undefined_value())); DCHECK(!InNewSpace(undefined_value()));
HeapObject* result = nullptr; HeapObject* result = nullptr;
......
...@@ -933,6 +933,7 @@ class Heap { ...@@ -933,6 +933,7 @@ class Heap {
inline void OnMoveEvent(HeapObject* target, HeapObject* source, inline void OnMoveEvent(HeapObject* target, HeapObject* source,
int size_in_bytes); int size_in_bytes);
inline bool CanAllocateInReadOnlySpace();
bool deserialization_complete() const { return deserialization_complete_; } bool deserialization_complete() const { return deserialization_complete_; }
bool HasLowAllocationRate(); bool HasLowAllocationRate();
...@@ -1800,7 +1801,16 @@ class Heap { ...@@ -1800,7 +1801,16 @@ class Heap {
// Selects the proper allocation space based on the pretenuring decision. // Selects the proper allocation space based on the pretenuring decision.
static AllocationSpace SelectSpace(PretenureFlag pretenure) { static AllocationSpace SelectSpace(PretenureFlag pretenure) {
return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE; switch (pretenure) {
case TENURED_READ_ONLY:
return RO_SPACE;
case TENURED:
return OLD_SPACE;
case NOT_TENURED:
return NEW_SPACE;
default:
UNREACHABLE();
}
} }
static size_t DefaultGetExternallyAllocatedMemoryInBytesCallback() { static size_t DefaultGetExternallyAllocatedMemoryInBytesCallback() {
...@@ -2214,6 +2224,8 @@ class Heap { ...@@ -2214,6 +2224,8 @@ class Heap {
MUST_USE_RESULT AllocationResult MUST_USE_RESULT AllocationResult
AllocatePartialMap(InstanceType instance_type, int instance_size); AllocatePartialMap(InstanceType instance_type, int instance_size);
void FinalizePartialMap(Map* map);
// Allocate a block of memory in the given space (filled with a filler). // Allocate a block of memory in the given space (filled with a filler).
// Used as a fall-back for generated code when the space is full. // Used as a fall-back for generated code when the space is full.
MUST_USE_RESULT AllocationResult MUST_USE_RESULT AllocationResult
...@@ -2340,8 +2352,8 @@ class Heap { ...@@ -2340,8 +2352,8 @@ class Heap {
MUST_USE_RESULT AllocationResult AllocateEmptyBoilerplateDescription(); MUST_USE_RESULT AllocationResult AllocateEmptyBoilerplateDescription();
// Allocate empty fixed typed array of given type. // Allocate empty fixed typed array of given type.
MUST_USE_RESULT AllocationResult MUST_USE_RESULT AllocationResult AllocateEmptyFixedTypedArray(
AllocateEmptyFixedTypedArray(ExternalArrayType array_type); ExternalArrayType array_type, PretenureFlag pretenure = TENURED);
// Allocate a tenured simple cell. // Allocate a tenured simple cell.
MUST_USE_RESULT AllocationResult AllocateCell(Object* value); MUST_USE_RESULT AllocationResult AllocateCell(Object* value);
...@@ -2353,7 +2365,8 @@ class Heap { ...@@ -2353,7 +2365,8 @@ class Heap {
// Allocate a tenured JS global property cell initialized with the hole. // Allocate a tenured JS global property cell initialized with the hole.
MUST_USE_RESULT AllocationResult AllocatePropertyCell(Name* name); MUST_USE_RESULT AllocationResult AllocatePropertyCell(Name* name);
MUST_USE_RESULT AllocationResult AllocateWeakCell(HeapObject* value); MUST_USE_RESULT AllocationResult
AllocateWeakCell(HeapObject* value, PretenureFlag pretenure = TENURED);
MUST_USE_RESULT AllocationResult AllocateTransitionArray(int capacity); MUST_USE_RESULT AllocationResult AllocateTransitionArray(int capacity);
......
...@@ -78,21 +78,26 @@ const Heap::StructTable Heap::struct_table[] = { ...@@ -78,21 +78,26 @@ const Heap::StructTable Heap::struct_table[] = {
#undef DATA_HANDLER_ELEMENT #undef DATA_HANDLER_ELEMENT
}; };
namespace { void Heap::FinalizePartialMap(Map* map) {
map->set_dependent_code(DependentCode::cast(empty_fixed_array()));
void FinalizePartialMap(Heap* heap, Map* map) {
map->set_dependent_code(DependentCode::cast(heap->empty_fixed_array()));
map->set_raw_transitions(MaybeObject::FromSmi(Smi::kZero)); map->set_raw_transitions(MaybeObject::FromSmi(Smi::kZero));
map->set_instance_descriptors(heap->empty_descriptor_array()); map->set_instance_descriptors(empty_descriptor_array());
if (FLAG_unbox_double_fields) { if (FLAG_unbox_double_fields) {
map->set_layout_descriptor(LayoutDescriptor::FastPointerLayout()); map->set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
} }
map->set_prototype(heap->null_value()); map->set_prototype(null_value());
map->set_constructor_or_backpointer(heap->null_value()); map->set_constructor_or_backpointer(null_value());
// Eagerly initialize the WeakCell cache for the map as it will not be
// writable in RO_SPACE.
Object* result = nullptr;
AllocationResult weak_cell_allocation =
AllocateWeakCell(map, TENURED_READ_ONLY);
CHECK(weak_cell_allocation.To(&result));
WeakCell* weak_cell_cache = reinterpret_cast<WeakCell*>(result);
map->set_weak_cell_cache(weak_cell_cache);
} }
} // namespace
bool Heap::CreateInitialMaps() { bool Heap::CreateInitialMaps() {
HeapObject* obj = nullptr; HeapObject* obj = nullptr;
{ {
...@@ -125,6 +130,7 @@ bool Heap::CreateInitialMaps() { ...@@ -125,6 +130,7 @@ bool Heap::CreateInitialMaps() {
ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, undefined); ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, undefined);
ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, null); ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, null);
ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, the_hole); ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, the_hole);
ALLOCATE_PARTIAL_MAP(WEAK_CELL_TYPE, WeakCell::kSize, weak_cell);
#undef ALLOCATE_PARTIAL_MAP #undef ALLOCATE_PARTIAL_MAP
} }
...@@ -143,21 +149,21 @@ bool Heap::CreateInitialMaps() { ...@@ -143,21 +149,21 @@ bool Heap::CreateInitialMaps() {
set_empty_weak_fixed_array(WeakFixedArray::cast(obj)); set_empty_weak_fixed_array(WeakFixedArray::cast(obj));
{ {
AllocationResult allocation = Allocate(null_map(), OLD_SPACE); AllocationResult allocation = Allocate(null_map(), RO_SPACE);
if (!allocation.To(&obj)) return false; if (!allocation.To(&obj)) return false;
} }
set_null_value(Oddball::cast(obj)); set_null_value(Oddball::cast(obj));
Oddball::cast(obj)->set_kind(Oddball::kNull); Oddball::cast(obj)->set_kind(Oddball::kNull);
{ {
AllocationResult allocation = Allocate(undefined_map(), OLD_SPACE); AllocationResult allocation = Allocate(undefined_map(), RO_SPACE);
if (!allocation.To(&obj)) return false; if (!allocation.To(&obj)) return false;
} }
set_undefined_value(Oddball::cast(obj)); set_undefined_value(Oddball::cast(obj));
Oddball::cast(obj)->set_kind(Oddball::kUndefined); Oddball::cast(obj)->set_kind(Oddball::kUndefined);
DCHECK(!InNewSpace(undefined_value())); DCHECK(!InNewSpace(undefined_value()));
{ {
AllocationResult allocation = Allocate(the_hole_map(), OLD_SPACE); AllocationResult allocation = Allocate(the_hole_map(), RO_SPACE);
if (!allocation.To(&obj)) return false; if (!allocation.To(&obj)) return false;
} }
set_the_hole_value(Oddball::cast(obj)); set_the_hole_value(Oddball::cast(obj));
...@@ -176,7 +182,7 @@ bool Heap::CreateInitialMaps() { ...@@ -176,7 +182,7 @@ bool Heap::CreateInitialMaps() {
// Allocate the empty enum cache. // Allocate the empty enum cache.
{ {
AllocationResult allocation = Allocate(tuple2_map(), OLD_SPACE); AllocationResult allocation = Allocate(tuple2_map(), RO_SPACE);
if (!allocation.To(&obj)) return false; if (!allocation.To(&obj)) return false;
} }
set_empty_enum_cache(EnumCache::cast(obj)); set_empty_enum_cache(EnumCache::cast(obj));
...@@ -186,8 +192,8 @@ bool Heap::CreateInitialMaps() { ...@@ -186,8 +192,8 @@ bool Heap::CreateInitialMaps() {
// Allocate the empty descriptor array. // Allocate the empty descriptor array.
{ {
STATIC_ASSERT(DescriptorArray::kFirstIndex != 0); STATIC_ASSERT(DescriptorArray::kFirstIndex != 0);
AllocationResult allocation = AllocationResult allocation = AllocateUninitializedFixedArray(
AllocateUninitializedFixedArray(DescriptorArray::kFirstIndex, TENURED); DescriptorArray::kFirstIndex, TENURED_READ_ONLY);
if (!allocation.To(&obj)) return false; if (!allocation.To(&obj)) return false;
} }
obj->set_map_no_write_barrier(descriptor_array_map()); obj->set_map_no_write_barrier(descriptor_array_map());
...@@ -198,19 +204,20 @@ bool Heap::CreateInitialMaps() { ...@@ -198,19 +204,20 @@ bool Heap::CreateInitialMaps() {
empty_enum_cache()); empty_enum_cache());
// Fix the instance_descriptors for the existing maps. // Fix the instance_descriptors for the existing maps.
FinalizePartialMap(this, meta_map()); FinalizePartialMap(meta_map());
FinalizePartialMap(this, fixed_array_map()); FinalizePartialMap(weak_cell_map());
FinalizePartialMap(this, weak_fixed_array_map()); FinalizePartialMap(fixed_array_map());
FinalizePartialMap(this, fixed_cow_array_map()); FinalizePartialMap(weak_fixed_array_map());
FinalizePartialMap(this, descriptor_array_map()); FinalizePartialMap(fixed_cow_array_map());
FinalizePartialMap(this, undefined_map()); FinalizePartialMap(descriptor_array_map());
FinalizePartialMap(undefined_map());
undefined_map()->set_is_undetectable(true); undefined_map()->set_is_undetectable(true);
FinalizePartialMap(this, null_map()); FinalizePartialMap(null_map());
null_map()->set_is_undetectable(true); null_map()->set_is_undetectable(true);
FinalizePartialMap(this, the_hole_map()); FinalizePartialMap(the_hole_map());
for (unsigned i = 0; i < arraysize(struct_table); ++i) { for (unsigned i = 0; i < arraysize(struct_table); ++i) {
const StructTable& entry = struct_table[i]; const StructTable& entry = struct_table[i];
FinalizePartialMap(this, Map::cast(roots_[entry.index])); FinalizePartialMap(Map::cast(roots_[entry.index]));
} }
{ // Map allocation { // Map allocation
...@@ -307,7 +314,6 @@ bool Heap::CreateInitialMaps() { ...@@ -307,7 +314,6 @@ bool Heap::CreateInitialMaps() {
} }
ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell) ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell)
ALLOCATE_MAP(WEAK_CELL_TYPE, WeakCell::kSize, weak_cell)
ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler) ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler)
ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler) ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler)
...@@ -379,23 +385,23 @@ bool Heap::CreateInitialMaps() { ...@@ -379,23 +385,23 @@ bool Heap::CreateInitialMaps() {
set_empty_boilerplate_description(BoilerplateDescription::cast(obj)); set_empty_boilerplate_description(BoilerplateDescription::cast(obj));
{ {
AllocationResult allocation = Allocate(boolean_map(), OLD_SPACE); AllocationResult allocation = Allocate(boolean_map(), RO_SPACE);
if (!allocation.To(&obj)) return false; if (!allocation.To(&obj)) return false;
} }
set_true_value(Oddball::cast(obj)); set_true_value(Oddball::cast(obj));
Oddball::cast(obj)->set_kind(Oddball::kTrue); Oddball::cast(obj)->set_kind(Oddball::kTrue);
{ {
AllocationResult allocation = Allocate(boolean_map(), OLD_SPACE); AllocationResult allocation = Allocate(boolean_map(), RO_SPACE);
if (!allocation.To(&obj)) return false; if (!allocation.To(&obj)) return false;
} }
set_false_value(Oddball::cast(obj)); set_false_value(Oddball::cast(obj));
Oddball::cast(obj)->set_kind(Oddball::kFalse); Oddball::cast(obj)->set_kind(Oddball::kFalse);
{ // Empty arrays // Empty arrays
{ {
ByteArray * byte_array; ByteArray* byte_array;
if (!AllocateByteArray(0, TENURED).To(&byte_array)) return false; if (!AllocateByteArray(0, TENURED_READ_ONLY).To(&byte_array)) return false;
set_empty_byte_array(byte_array); set_empty_byte_array(byte_array);
} }
...@@ -408,14 +414,15 @@ bool Heap::CreateInitialMaps() { ...@@ -408,14 +414,15 @@ bool Heap::CreateInitialMaps() {
#define ALLOCATE_EMPTY_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype, size) \ #define ALLOCATE_EMPTY_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype, size) \
{ \ { \
FixedTypedArrayBase* obj; \ FixedTypedArrayBase* obj; \
if (!AllocateEmptyFixedTypedArray(kExternal##Type##Array).To(&obj)) \ if (!AllocateEmptyFixedTypedArray(kExternal##Type##Array, TENURED) \
.To(&obj)) \
return false; \ return false; \
set_empty_fixed_##type##_array(obj); \ set_empty_fixed_##type##_array(obj); \
} }
TYPED_ARRAYS(ALLOCATE_EMPTY_FIXED_TYPED_ARRAY) TYPED_ARRAYS(ALLOCATE_EMPTY_FIXED_TYPED_ARRAY)
#undef ALLOCATE_EMPTY_FIXED_TYPED_ARRAY #undef ALLOCATE_EMPTY_FIXED_TYPED_ARRAY
}
DCHECK(!InNewSpace(empty_fixed_array())); DCHECK(!InNewSpace(empty_fixed_array()));
return true; return true;
} }
...@@ -441,16 +448,26 @@ void Heap::CreateInitialObjects() { ...@@ -441,16 +448,26 @@ void Heap::CreateInitialObjects() {
DCHECK(std::signbit(minus_zero_value()->Number())); DCHECK(std::signbit(minus_zero_value()->Number()));
set_nan_value(*factory->NewHeapNumber( set_nan_value(*factory->NewHeapNumber(
std::numeric_limits<double>::quiet_NaN(), IMMUTABLE, TENURED)); std::numeric_limits<double>::quiet_NaN(), IMMUTABLE, TENURED_READ_ONLY));
set_hole_nan_value( set_hole_nan_value(*factory->NewHeapNumberFromBits(kHoleNanInt64, IMMUTABLE,
*factory->NewHeapNumberFromBits(kHoleNanInt64, IMMUTABLE, TENURED)); TENURED_READ_ONLY));
set_infinity_value(*factory->NewHeapNumber(V8_INFINITY, IMMUTABLE, TENURED)); set_infinity_value(*factory->NewHeapNumber(V8_INFINITY, IMMUTABLE, TENURED));
set_minus_infinity_value( set_minus_infinity_value(
*factory->NewHeapNumber(-V8_INFINITY, IMMUTABLE, TENURED)); *factory->NewHeapNumber(-V8_INFINITY, IMMUTABLE, TENURED));
// Allocate cache for single character one byte strings.
set_single_character_string_cache(
*factory->NewFixedArray(String::kMaxOneByteCharCode + 1, TENURED));
// Allocate initial string table. // Allocate initial string table.
set_string_table(*StringTable::New(isolate(), kInitialStringTableSize)); set_string_table(*StringTable::New(isolate(), kInitialStringTableSize));
for (unsigned i = 0; i < arraysize(constant_string_table); i++) {
Handle<String> str =
factory->InternalizeUtf8String(constant_string_table[i].contents);
roots_[constant_string_table[i].index] = *str;
}
// Allocate // Allocate
// Finish initializing oddballs after creating the string table. // Finish initializing oddballs after creating the string table.
...@@ -504,12 +521,6 @@ void Heap::CreateInitialObjects() { ...@@ -504,12 +521,6 @@ void Heap::CreateInitialObjects() {
handle(Smi::FromInt(-7), isolate()), "undefined", handle(Smi::FromInt(-7), isolate()), "undefined",
Oddball::kStaleRegister)); Oddball::kStaleRegister));
for (unsigned i = 0; i < arraysize(constant_string_table); i++) {
Handle<String> str =
factory->InternalizeUtf8String(constant_string_table[i].contents);
roots_[constant_string_table[i].index] = *str;
}
// Create the code_stubs dictionary. The initial size is set to avoid // Create the code_stubs dictionary. The initial size is set to avoid
// expanding the dictionary during bootstrapping. // expanding the dictionary during bootstrapping.
set_code_stubs(*SimpleNumberDictionary::New(isolate(), 128)); set_code_stubs(*SimpleNumberDictionary::New(isolate(), 128));
...@@ -560,10 +571,6 @@ void Heap::CreateInitialObjects() { ...@@ -560,10 +571,6 @@ void Heap::CreateInitialObjects() {
set_number_string_cache( set_number_string_cache(
*factory->NewFixedArray(kInitialNumberStringCacheSize * 2, TENURED)); *factory->NewFixedArray(kInitialNumberStringCacheSize * 2, TENURED));
// Allocate cache for single character one byte strings.
set_single_character_string_cache(
*factory->NewFixedArray(String::kMaxOneByteCharCode + 1, TENURED));
// Allocate cache for string split and regexp-multiple. // Allocate cache for string split and regexp-multiple.
set_string_split_cache(*factory->NewFixedArray( set_string_split_cache(*factory->NewFixedArray(
RegExpResultsCache::kRegExpResultsCacheSize, TENURED)); RegExpResultsCache::kRegExpResultsCacheSize, TENURED));
......
...@@ -314,6 +314,7 @@ HeapObject* PagedSpace::TryAllocateLinearlyAligned( ...@@ -314,6 +314,7 @@ HeapObject* PagedSpace::TryAllocateLinearlyAligned(
AllocationResult PagedSpace::AllocateRawUnaligned( AllocationResult PagedSpace::AllocateRawUnaligned(
int size_in_bytes, UpdateSkipList update_skip_list) { int size_in_bytes, UpdateSkipList update_skip_list) {
DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
if (!EnsureLinearAllocationArea(size_in_bytes)) { if (!EnsureLinearAllocationArea(size_in_bytes)) {
return AllocationResult::Retry(identity()); return AllocationResult::Retry(identity());
} }
...@@ -329,7 +330,8 @@ AllocationResult PagedSpace::AllocateRawUnaligned( ...@@ -329,7 +330,8 @@ AllocationResult PagedSpace::AllocateRawUnaligned(
AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes, AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
AllocationAlignment alignment) { AllocationAlignment alignment) {
DCHECK(identity() == OLD_SPACE); DCHECK(identity() == OLD_SPACE || identity() == RO_SPACE);
DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
int allocation_size = size_in_bytes; int allocation_size = size_in_bytes;
HeapObject* object = TryAllocateLinearlyAligned(&allocation_size, alignment); HeapObject* object = TryAllocateLinearlyAligned(&allocation_size, alignment);
if (object == nullptr) { if (object == nullptr) {
......
...@@ -1917,7 +1917,8 @@ void PagedSpace::Verify(ObjectVisitor* visitor) { ...@@ -1917,7 +1917,8 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
// be in map space. // be in map space.
Map* map = object->map(); Map* map = object->map();
CHECK(map->IsMap()); CHECK(map->IsMap());
CHECK(heap()->map_space()->Contains(map)); CHECK(heap()->map_space()->Contains(map) ||
heap()->read_only_space()->Contains(map));
// Perform space-specific object verification. // Perform space-specific object verification.
VerifyObject(object); VerifyObject(object);
...@@ -2368,10 +2369,11 @@ void NewSpace::Verify() { ...@@ -2368,10 +2369,11 @@ void NewSpace::Verify() {
HeapObject* object = HeapObject::FromAddress(current); HeapObject* object = HeapObject::FromAddress(current);
// The first word should be a map, and we expect all map pointers to // The first word should be a map, and we expect all map pointers to
// be in map space. // be in map space or read-only space.
Map* map = object->map(); Map* map = object->map();
CHECK(map->IsMap()); CHECK(map->IsMap());
CHECK(heap()->map_space()->Contains(map)); CHECK(heap()->map_space()->Contains(map) ||
heap()->read_only_space()->Contains(map));
// The object should not be code or a map. // The object should not be code or a map.
CHECK(!object->IsMap()); CHECK(!object->IsMap());
...@@ -3445,10 +3447,11 @@ void LargeObjectSpace::Verify() { ...@@ -3445,10 +3447,11 @@ void LargeObjectSpace::Verify() {
CHECK(object->address() == page->area_start()); CHECK(object->address() == page->area_start());
// The first word should be a map, and we expect all map pointers to be // The first word should be a map, and we expect all map pointers to be
// in map space. // in map space or read-only space.
Map* map = object->map(); Map* map = object->map();
CHECK(map->IsMap()); CHECK(map->IsMap());
CHECK(heap()->map_space()->Contains(map)); CHECK(heap()->map_space()->Contains(map) ||
heap()->read_only_space()->Contains(map));
// We have only code, sequential strings, external strings (sequential // We have only code, sequential strings, external strings (sequential
// strings that have been morphed into external strings), thin strings // strings that have been morphed into external strings), thin strings
......
...@@ -25,13 +25,19 @@ Serializer<AllocatorT>::Serializer(Isolate* isolate) ...@@ -25,13 +25,19 @@ Serializer<AllocatorT>::Serializer(Isolate* isolate)
if (FLAG_serialization_statistics) { if (FLAG_serialization_statistics) {
instance_type_count_ = NewArray<int>(kInstanceTypes); instance_type_count_ = NewArray<int>(kInstanceTypes);
instance_type_size_ = NewArray<size_t>(kInstanceTypes); instance_type_size_ = NewArray<size_t>(kInstanceTypes);
read_only_instance_type_count_ = NewArray<int>(kInstanceTypes);
read_only_instance_type_size_ = NewArray<size_t>(kInstanceTypes);
for (int i = 0; i < kInstanceTypes; i++) { for (int i = 0; i < kInstanceTypes; i++) {
instance_type_count_[i] = 0; instance_type_count_[i] = 0;
instance_type_size_[i] = 0; instance_type_size_[i] = 0;
read_only_instance_type_count_[i] = 0;
read_only_instance_type_size_[i] = 0;
} }
} else { } else {
instance_type_count_ = nullptr; instance_type_count_ = nullptr;
instance_type_size_ = nullptr; instance_type_size_ = nullptr;
read_only_instance_type_count_ = nullptr;
read_only_instance_type_size_ = nullptr;
} }
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
} }
...@@ -43,16 +49,24 @@ Serializer<AllocatorT>::~Serializer() { ...@@ -43,16 +49,24 @@ Serializer<AllocatorT>::~Serializer() {
if (instance_type_count_ != nullptr) { if (instance_type_count_ != nullptr) {
DeleteArray(instance_type_count_); DeleteArray(instance_type_count_);
DeleteArray(instance_type_size_); DeleteArray(instance_type_size_);
DeleteArray(read_only_instance_type_count_);
DeleteArray(read_only_instance_type_size_);
} }
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
} }
#ifdef OBJECT_PRINT #ifdef OBJECT_PRINT
template <class AllocatorT> template <class AllocatorT>
void Serializer<AllocatorT>::CountInstanceType(Map* map, int size) { void Serializer<AllocatorT>::CountInstanceType(Map* map, int size,
AllocationSpace space) {
int instance_type = map->instance_type(); int instance_type = map->instance_type();
if (space != RO_SPACE) {
instance_type_count_[instance_type]++; instance_type_count_[instance_type]++;
instance_type_size_[instance_type] += size; instance_type_size_[instance_type] += size;
} else {
read_only_instance_type_count_[instance_type]++;
read_only_instance_type_size_[instance_type] += size;
}
} }
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
...@@ -72,6 +86,21 @@ void Serializer<AllocatorT>::OutputStatistics(const char* name) { ...@@ -72,6 +86,21 @@ void Serializer<AllocatorT>::OutputStatistics(const char* name) {
} }
INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE) INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE)
#undef PRINT_INSTANCE_TYPE #undef PRINT_INSTANCE_TYPE
size_t read_only_total = 0;
#define UPDATE_TOTAL(Name) \
read_only_total += read_only_instance_type_size_[Name];
INSTANCE_TYPE_LIST(UPDATE_TOTAL)
#undef UPDATE_TOTAL
if (read_only_total > 0) {
PrintF("\n Read Only Instance types (count and bytes):\n");
#define PRINT_INSTANCE_TYPE(Name) \
if (read_only_instance_type_count_[Name]) { \
PrintF("%10d %10" PRIuS " %s\n", read_only_instance_type_count_[Name], \
read_only_instance_type_size_[Name], #Name); \
}
INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE)
#undef PRINT_INSTANCE_TYPE
}
PrintF("\n"); PrintF("\n");
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
} }
...@@ -360,7 +389,7 @@ void Serializer<AllocatorT>::ObjectSerializer::SerializePrologue( ...@@ -360,7 +389,7 @@ void Serializer<AllocatorT>::ObjectSerializer::SerializePrologue(
#ifdef OBJECT_PRINT #ifdef OBJECT_PRINT
if (FLAG_serialization_statistics) { if (FLAG_serialization_statistics) {
serializer_->CountInstanceType(map, size); serializer_->CountInstanceType(map, size, space);
} }
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
......
...@@ -225,7 +225,7 @@ class Serializer : public SerializerDeserializer { ...@@ -225,7 +225,7 @@ class Serializer : public SerializerDeserializer {
void OutputStatistics(const char* name); void OutputStatistics(const char* name);
#ifdef OBJECT_PRINT #ifdef OBJECT_PRINT
void CountInstanceType(Map* map, int size); void CountInstanceType(Map* map, int size, AllocationSpace space);
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
#ifdef DEBUG #ifdef DEBUG
...@@ -255,6 +255,8 @@ class Serializer : public SerializerDeserializer { ...@@ -255,6 +255,8 @@ class Serializer : public SerializerDeserializer {
static const int kInstanceTypes = LAST_TYPE + 1; static const int kInstanceTypes = LAST_TYPE + 1;
int* instance_type_count_; int* instance_type_count_;
size_t* instance_type_size_; size_t* instance_type_size_;
int* read_only_instance_type_count_;
size_t* read_only_instance_type_size_;
#endif // OBJECT_PRINT #endif // OBJECT_PRINT
#ifdef DEBUG #ifdef DEBUG
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment