Commit 8f9721bb authored by erik.corry@gmail.com's avatar erik.corry@gmail.com

Shave 39% from snapshot size.

Review URL: http://codereview.chromium.org/8344079

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@9722 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 5d72dde9
......@@ -1027,9 +1027,9 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
// Set up the roots register.
ExternalReference roots_address =
ExternalReference::roots_address(masm->isolate());
__ mov(r10, Operand(roots_address));
ExternalReference roots_array_start =
ExternalReference::roots_array_start(masm->isolate());
__ mov(r10, Operand(roots_array_start));
// Push the function and the receiver onto the stack.
__ push(r1);
......
......@@ -756,8 +756,9 @@ void Deoptimizer::EntryGenerator::Generate() {
__ pop(ip); // remove lr
// Set up the roots register.
ExternalReference roots_address = ExternalReference::roots_address(isolate);
__ mov(r10, Operand(roots_address));
ExternalReference roots_array_start =
ExternalReference::roots_array_start(isolate);
__ mov(r10, Operand(roots_array_start));
__ pop(ip); // remove pc
__ pop(r7); // get continuation, leave pc on stack
......
......@@ -834,8 +834,8 @@ ExternalReference ExternalReference::keyed_lookup_cache_field_offsets(
}
ExternalReference ExternalReference::roots_address(Isolate* isolate) {
return ExternalReference(isolate->heap()->roots_address());
ExternalReference ExternalReference::roots_array_start(Isolate* isolate) {
return ExternalReference(isolate->heap()->roots_array_start());
}
......
......@@ -593,8 +593,8 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference keyed_lookup_cache_keys(Isolate* isolate);
static ExternalReference keyed_lookup_cache_field_offsets(Isolate* isolate);
// Static variable Heap::roots_address()
static ExternalReference roots_address(Isolate* isolate);
// Static variable Heap::roots_array_start()
static ExternalReference roots_array_start(Isolate* isolate);
// Static variable StackGuard::address_of_jslimit()
static ExternalReference address_of_stack_limit(Isolate* isolate);
......
......@@ -64,18 +64,27 @@ inline Heap* _inline_get_heap_();
V(Oddball, null_value, NullValue) \
V(Oddball, true_value, TrueValue) \
V(Oddball, false_value, FalseValue) \
V(Oddball, arguments_marker, ArgumentsMarker) \
V(Oddball, frame_alignment_marker, FrameAlignmentMarker) \
V(Map, global_property_cell_map, GlobalPropertyCellMap) \
V(Map, shared_function_info_map, SharedFunctionInfoMap) \
V(Map, meta_map, MetaMap) \
V(Map, ascii_symbol_map, AsciiSymbolMap) \
V(Map, heap_number_map, HeapNumberMap) \
V(Map, global_context_map, GlobalContextMap) \
V(Map, fixed_array_map, FixedArrayMap) \
V(Map, code_map, CodeMap) \
V(Map, serialized_scope_info_map, SerializedScopeInfoMap) \
V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
V(Map, meta_map, MetaMap) \
V(Map, hash_table_map, HashTableMap) \
V(FixedArray, empty_fixed_array, EmptyFixedArray) \
V(ByteArray, empty_byte_array, EmptyByteArray) \
V(FixedDoubleArray, empty_fixed_double_array, EmptyFixedDoubleArray) \
V(String, empty_string, EmptyString) \
V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
V(Smi, stack_limit, StackLimit) \
V(Oddball, frame_alignment_marker, FrameAlignmentMarker) \
V(Oddball, arguments_marker, ArgumentsMarker) \
V(FixedArray, number_string_cache, NumberStringCache) \
V(Object, instanceof_cache_function, InstanceofCacheFunction) \
V(Object, instanceof_cache_map, InstanceofCacheMap) \
......@@ -83,11 +92,6 @@ inline Heap* _inline_get_heap_();
V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
V(FixedArray, string_split_cache, StringSplitCache) \
V(Object, termination_exception, TerminationException) \
V(FixedArray, empty_fixed_array, EmptyFixedArray) \
V(ByteArray, empty_byte_array, EmptyByteArray) \
V(FixedDoubleArray, empty_fixed_double_array, EmptyFixedDoubleArray) \
V(String, empty_string, EmptyString) \
V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
V(Map, string_map, StringMap) \
V(Map, ascii_string_map, AsciiStringMap) \
V(Map, symbol_map, SymbolMap) \
......@@ -95,7 +99,6 @@ inline Heap* _inline_get_heap_();
V(Map, cons_ascii_string_map, ConsAsciiStringMap) \
V(Map, sliced_string_map, SlicedStringMap) \
V(Map, sliced_ascii_string_map, SlicedAsciiStringMap) \
V(Map, ascii_symbol_map, AsciiSymbolMap) \
V(Map, cons_symbol_map, ConsSymbolMap) \
V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap) \
V(Map, external_symbol_map, ExternalSymbolMap) \
......@@ -120,10 +123,7 @@ inline Heap* _inline_get_heap_();
V(Map, catch_context_map, CatchContextMap) \
V(Map, with_context_map, WithContextMap) \
V(Map, block_context_map, BlockContextMap) \
V(Map, code_map, CodeMap) \
V(Map, oddball_map, OddballMap) \
V(Map, global_property_cell_map, GlobalPropertyCellMap) \
V(Map, shared_function_info_map, SharedFunctionInfoMap) \
V(Map, message_object_map, JSMessageObjectMap) \
V(Map, foreign_map, ForeignMap) \
V(HeapNumber, nan_value, NanValue) \
......@@ -1097,7 +1097,7 @@ class Heap {
inline void SetLastScriptId(Object* last_script_id);
// Generated code can embed this address to get access to the roots.
Object** roots_address() { return roots_; }
Object** roots_array_start() { return roots_; }
Address* store_buffer_top_address() {
return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
......@@ -1474,7 +1474,7 @@ class Heap {
int unflattened_strings_length_;
#define ROOT_ACCESSOR(type, name, camel_name) \
inline void set_##name(type* value) { \
inline void set_##name(type* value) { \
roots_[k##camel_name##RootIndex] = value; \
}
ROOT_LIST(ROOT_ACCESSOR)
......
......@@ -3882,11 +3882,11 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
Register scratch = scratch2;
// Load the number string cache.
ExternalReference roots_address =
ExternalReference::roots_address(masm->isolate());
ExternalReference roots_array_start =
ExternalReference::roots_array_start(masm->isolate());
__ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
__ mov(number_string_cache,
Operand::StaticArray(scratch, times_pointer_size, roots_address));
Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
// Make the hash mask from the length of the number string cache. It
// contains two elements (number and string) for each cache entry.
__ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
......@@ -4854,8 +4854,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff);
static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
ExternalReference roots_address =
ExternalReference::roots_address(masm->isolate());
ExternalReference roots_array_start =
ExternalReference::roots_array_start(masm->isolate());
ASSERT_EQ(object.code(), InstanceofStub::left().code());
ASSERT_EQ(function.code(), InstanceofStub::right().code());
......@@ -4877,16 +4877,17 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
// Look up the function and the map in the instanceof cache.
Label miss;
__ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
__ cmp(function,
Operand::StaticArray(scratch, times_pointer_size, roots_address));
__ cmp(function, Operand::StaticArray(scratch,
times_pointer_size,
roots_array_start));
__ j(not_equal, &miss, Label::kNear);
__ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
__ cmp(map, Operand::StaticArray(
scratch, times_pointer_size, roots_address));
scratch, times_pointer_size, roots_array_start));
__ j(not_equal, &miss, Label::kNear);
__ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
__ mov(eax, Operand::StaticArray(
scratch, times_pointer_size, roots_address));
scratch, times_pointer_size, roots_array_start));
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
__ bind(&miss);
}
......@@ -4902,9 +4903,10 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
// map and function. The cached answer will be set when it is known below.
if (!HasCallSiteInlineCheck()) {
__ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
__ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map);
__ mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
map);
__ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
__ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address),
__ mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
function);
} else {
// The constants for the code patching are based on no push instructions
......@@ -4941,7 +4943,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ Set(eax, Immediate(0));
__ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
__ mov(Operand::StaticArray(scratch,
times_pointer_size, roots_address), eax);
times_pointer_size, roots_array_start), eax);
} else {
// Get return address and delta to inlined map check.
__ mov(eax, factory->true_value());
......@@ -4963,7 +4965,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ Set(eax, Immediate(Smi::FromInt(1)));
__ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
__ mov(Operand::StaticArray(
scratch, times_pointer_size, roots_address), eax);
scratch, times_pointer_size, roots_array_start), eax);
} else {
// Get return address and delta to inlined map check.
__ mov(eax, factory->false_value());
......@@ -5752,11 +5754,11 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
// Load the symbol table.
Register symbol_table = c2;
ExternalReference roots_address =
ExternalReference::roots_address(masm->isolate());
ExternalReference roots_array_start =
ExternalReference::roots_array_start(masm->isolate());
__ mov(scratch, Immediate(Heap::kSymbolTableRootIndex));
__ mov(symbol_table,
Operand::StaticArray(scratch, times_pointer_size, roots_address));
Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
// Calculate capacity mask from the symbol table capacity.
Register mask = scratch2;
......
......@@ -352,7 +352,7 @@ void MacroAssembler::SafePush(const Immediate& x) {
void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
// see ROOT_ACCESSOR macro in factory.h
Handle<Object> value(&isolate()->heap()->roots_address()[index]);
Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
cmp(with, value);
}
......
......@@ -1053,9 +1053,9 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
__ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
// Set up the roots register.
ExternalReference roots_address =
ExternalReference::roots_address(masm->isolate());
__ li(s6, Operand(roots_address));
ExternalReference roots_array_start =
ExternalReference::roots_array_start(masm->isolate());
__ li(s6, Operand(roots_array_start));
// Push the function and the receiver onto the stack.
__ Push(a1, a2);
......
......@@ -312,7 +312,6 @@ int main(int argc, char** argv) {
}
// If we don't do this then we end up with a stray root pointing at the
// context even after we have disposed of the context.
// TODO(gc): request full compaction?
HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
i::Object* raw_context = *(v8::Utils::OpenHandle(*context));
context.Dispose();
......
......@@ -318,10 +318,10 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
// Miscellaneous
Add(ExternalReference::roots_address(isolate).address(),
Add(ExternalReference::roots_array_start(isolate).address(),
UNCLASSIFIED,
3,
"Heap::roots_address()");
"Heap::roots_array_start()");
Add(ExternalReference::address_of_stack_limit(isolate).address(),
UNCLASSIFIED,
4,
......@@ -784,7 +784,7 @@ void Deserializer::ReadChunk(Object** current,
ReadObject(space_number, dest_space, &new_object); \
} else if (where == kRootArray) { \
int root_id = source_->GetInt(); \
new_object = isolate->heap()->roots_address()[root_id]; \
new_object = isolate->heap()->roots_array_start()[root_id]; \
} else if (where == kPartialSnapshotCache) { \
int cache_index = source_->GetInt(); \
new_object = isolate->serialize_partial_snapshot_cache() \
......@@ -883,11 +883,17 @@ void Deserializer::ReadChunk(Object** current,
CASE_STATEMENT(where, how, within, kLargeCode) \
CASE_BODY(where, how, within, kLargeCode, kUnknownOffsetFromStart)
#define EMIT_COMMON_REFERENCE_PATTERNS(pseudo_space_number, \
space_number, \
offset_from_start) \
CASE_STATEMENT(kFromStart, kPlain, kStartOfObject, pseudo_space_number) \
CASE_BODY(kFromStart, kPlain, kStartOfObject, space_number, offset_from_start)
#define FOUR_CASES(byte_code) \
case byte_code: \
case byte_code + 1: \
case byte_code + 2: \
case byte_code + 3:
#define SIXTEEN_CASES(byte_code) \
FOUR_CASES(byte_code) \
FOUR_CASES(byte_code + 4) \
FOUR_CASES(byte_code + 8) \
FOUR_CASES(byte_code + 12)
// We generate 15 cases and bodies that process special tags that combine
// the raw data tag and the length into one byte.
......@@ -911,6 +917,32 @@ void Deserializer::ReadChunk(Object** current,
break;
}
SIXTEEN_CASES(kRootArrayLowConstants)
SIXTEEN_CASES(kRootArrayHighConstants) {
int root_id = RootArrayConstantFromByteCode(data);
*current++ = isolate->heap()->roots_array_start()[root_id];
break;
}
case kRepeat: {
int repeats = source_->GetInt();
Object* object = current[-1];
for (int i = 0; i < repeats; i++) current[i] = object;
current += repeats;
break;
}
STATIC_ASSERT(kMaxRepeats == 12);
FOUR_CASES(kConstantRepeat)
FOUR_CASES(kConstantRepeat + 4)
FOUR_CASES(kConstantRepeat + 8) {
int repeats = RepeatsForCode(data);
Object* object = current[-1];
for (int i = 0; i < repeats; i++) current[i] = object;
current += repeats;
break;
}
// Deserialize a new object and write a pointer to it to the current
// object.
ONE_PER_SPACE(kNewObject, kPlain, kStartOfObject)
......@@ -936,9 +968,6 @@ void Deserializer::ReadChunk(Object** current,
// start and write a pointer to its first instruction to the current code
// object.
ALL_SPACES(kFromStart, kFromCode, kFirstInstruction)
// Find an already deserialized object at one of the predetermined popular
// offsets from the start and write a pointer to it in the current object.
COMMON_REFERENCE_PATTERNS(EMIT_COMMON_REFERENCE_PATTERNS)
// Find an object in the roots array and write a pointer to it to the
// current object.
CASE_STATEMENT(kRootArray, kPlain, kStartOfObject, 0)
......@@ -980,7 +1009,6 @@ void Deserializer::ReadChunk(Object** current,
#undef CASE_BODY
#undef ONE_PER_SPACE
#undef ALL_SPACES
#undef EMIT_COMMON_REFERENCE_PATTERNS
#undef ASSIGN_DEST_SPACE
case kNewPage: {
......@@ -1067,7 +1095,8 @@ Serializer::Serializer(SnapshotByteSink* sink)
: sink_(sink),
current_root_index_(0),
external_reference_encoder_(new ExternalReferenceEncoder),
large_object_total_(0) {
large_object_total_(0),
root_index_wave_front_(0) {
// The serializer is meant to be used only to generate initial heap images
// from a context in which there is only one isolate.
ASSERT(Isolate::Current()->IsDefaultIsolate());
......@@ -1124,6 +1153,9 @@ void Serializer::VisitPointers(Object** start, Object** end) {
Isolate* isolate = Isolate::Current();
for (Object** current = start; current < end; current++) {
if (start == isolate->heap()->roots_array_start()) {
root_index_wave_front_ = Max(root_index_wave_front_, current - start);
}
if (reinterpret_cast<Address>(current) ==
isolate->heap()->store_buffer()->TopAddress()) {
sink_->Put(kSkip, "Skip");
......@@ -1191,10 +1223,12 @@ int PartialSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) {
}
int PartialSerializer::RootIndex(HeapObject* heap_object) {
for (int i = 0; i < Heap::kRootListLength; i++) {
Object* root = HEAP->roots_address()[i];
if (root == heap_object) return i;
int Serializer::RootIndex(HeapObject* heap_object) {
Heap* heap = HEAP;
if (heap->InNewSpace(heap_object)) return kInvalidRootIndex;
for (int i = 0; i < root_index_wave_front_; i++) {
Object* root = heap->roots_array_start()[i];
if (!root->IsSmi() && root == heap_object) return i;
}
return kInvalidRootIndex;
}
......@@ -1230,18 +1264,8 @@ void Serializer::SerializeReferenceToPreviousObject(
// all objects) then we should shift out the bits that are always 0.
if (!SpaceIsLarge(space)) address >>= kObjectAlignmentBits;
if (from_start) {
#define COMMON_REFS_CASE(pseudo_space, actual_space, offset) \
if (space == actual_space && address == offset && \
how_to_code == kPlain && where_to_point == kStartOfObject) { \
sink_->Put(kFromStart + how_to_code + where_to_point + \
pseudo_space, "RefSer"); \
} else /* NOLINT */
COMMON_REFERENCE_PATTERNS(COMMON_REFS_CASE)
#undef COMMON_REFS_CASE
{ /* NOLINT */
sink_->Put(kFromStart + how_to_code + where_to_point + space, "RefSer");
sink_->PutInt(address, "address");
}
sink_->Put(kFromStart + how_to_code + where_to_point + space, "RefSer");
sink_->PutInt(address, "address");
} else {
sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRefSer");
sink_->PutInt(address, "address");
......@@ -1256,6 +1280,12 @@ void StartupSerializer::SerializeObject(
CHECK(o->IsHeapObject());
HeapObject* heap_object = HeapObject::cast(o);
int root_index;
if ((root_index = RootIndex(heap_object)) != kInvalidRootIndex) {
PutRoot(root_index, heap_object, how_to_code, where_to_point);
return;
}
if (address_mapper_.IsMapped(heap_object)) {
int space = SpaceOfAlreadySerializedObject(heap_object);
int address = address_mapper_.MappedTo(heap_object);
......@@ -1286,6 +1316,28 @@ void StartupSerializer::SerializeWeakReferences() {
}
void Serializer::PutRoot(int root_index,
HeapObject* object,
SerializerDeserializer::HowToCode how_to_code,
SerializerDeserializer::WhereToPoint where_to_point) {
if (how_to_code == kPlain &&
where_to_point == kStartOfObject &&
root_index < kRootArrayNumberOfConstantEncodings &&
!HEAP->InNewSpace(object)) {
if (root_index < kRootArrayNumberOfLowConstantEncodings) {
sink_->Put(kRootArrayLowConstants + root_index, "RootLoConstant");
} else {
sink_->Put(kRootArrayHighConstants + root_index -
kRootArrayNumberOfLowConstantEncodings,
"RootHiConstant");
}
} else {
sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization");
sink_->PutInt(root_index, "root_index");
}
}
void PartialSerializer::SerializeObject(
Object* o,
HowToCode how_to_code,
......@@ -1295,8 +1347,7 @@ void PartialSerializer::SerializeObject(
int root_index;
if ((root_index = RootIndex(heap_object)) != kInvalidRootIndex) {
sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization");
sink_->PutInt(root_index, "root_index");
PutRoot(root_index, heap_object, how_to_code, where_to_point);
return;
}
......@@ -1374,9 +1425,26 @@ void Serializer::ObjectSerializer::VisitPointers(Object** start,
if (current < end) OutputRawData(reinterpret_cast<Address>(current));
while (current < end && !(*current)->IsSmi()) {
serializer_->SerializeObject(*current, kPlain, kStartOfObject);
bytes_processed_so_far_ += kPointerSize;
current++;
if (current != start &&
current[0] == current[-1] &&
!HEAP->InNewSpace(*current)) {
int repeat_count = 1;
while (current < end - 1 && current[repeat_count] == current[0]) {
repeat_count++;
}
current += repeat_count;
bytes_processed_so_far_ += repeat_count * kPointerSize;
if (repeat_count > kMaxRepeats) {
sink_->Put(kRepeat, "SerializeRepeats");
sink_->PutInt(repeat_count, "SerializeRepeats");
} else {
sink_->Put(CodeForRepeats(repeat_count), "SerializeRepeats");
}
} else {
serializer_->SerializeObject(*current, kPlain, kStartOfObject);
bytes_processed_so_far_ += kPointerSize;
current++;
}
}
}
}
......
......@@ -187,24 +187,6 @@ class SnapshotByteSource {
};
// It is very common to have a reference to objects at certain offsets in the
// heap. These offsets have been determined experimentally. We code
// references to such objects in a single byte that encodes the way the pointer
// is written (only plain pointers allowed), the space number and the offset.
// This only works for objects in the first page of a space. Don't use this for
// things in newspace since it bypasses the write barrier.
static const int k64 = (sizeof(uintptr_t) - 4) / 4;
#define COMMON_REFERENCE_PATTERNS(f) \
f(kNumberOfSpaces, 2, (11 - k64)) \
f((kNumberOfSpaces + 1), 2, 0) \
f((kNumberOfSpaces + 2), 2, (142 - 16 * k64)) \
f((kNumberOfSpaces + 3), 2, (74 - 15 * k64)) \
f((kNumberOfSpaces + 4), 2, 5) \
f((kNumberOfSpaces + 5), 1, 135) \
f((kNumberOfSpaces + 6), 2, (228 - 39 * k64))
#define COMMON_RAW_LENGTHS(f) \
f(1, 1) \
f(2, 2) \
......@@ -242,7 +224,7 @@ class SerializerDeserializer: public ObjectVisitor {
// 0xd-0xf Free.
kBackref = 0x10, // Object is described relative to end.
// 0x11-0x18 One per space.
// 0x19-0x1f Common backref offsets.
// 0x19-0x1f Free.
kFromStart = 0x20, // Object is described relative to start.
// 0x21-0x28 One per space.
// 0x29-0x2f Free.
......@@ -279,9 +261,29 @@ class SerializerDeserializer: public ObjectVisitor {
// is referred to from external strings in the snapshot.
static const int kNativesStringResource = 0x71;
static const int kNewPage = 0x72;
// 0x73-0x7f Free.
// 0xb0-0xbf Free.
// 0xf0-0xff Free.
static const int kRepeat = 0x73;
static const int kConstantRepeat = 0x74;
// 0x74-0x7f Repeat last word (subtract 0x73 to get the count).
static const int kMaxRepeats = 0x7f - 0x73;
static int CodeForRepeats(int repeats) {
ASSERT(repeats >= 1 && repeats <= kMaxRepeats);
return 0x73 + repeats;
}
static int RepeatsForCode(int byte_code) {
ASSERT(byte_code >= kConstantRepeat && byte_code <= 0x7f);
return byte_code - 0x73;
}
static const int kRootArrayLowConstants = 0xb0;
// 0xb0-0xbf Things from the first 16 elements of the root array.
static const int kRootArrayHighConstants = 0xf0;
// 0xf0-0xff Things from the next 16 elements of the root array.
static const int kRootArrayNumberOfConstantEncodings = 0x20;
static const int kRootArrayNumberOfLowConstantEncodings = 0x10;
static int RootArrayConstantFromByteCode(int byte_code) {
int constant = (byte_code & 0xf) | ((byte_code & 0x40) >> 2);
ASSERT(constant >= 0 && constant < kRootArrayNumberOfConstantEncodings);
return constant;
}
static const int kLargeData = LAST_SPACE;
......@@ -475,14 +477,22 @@ class Serializer : public SerializerDeserializer {
static void TooLateToEnableNow() { too_late_to_enable_now_ = true; }
static bool enabled() { return serialization_enabled_; }
SerializationAddressMapper* address_mapper() { return &address_mapper_; }
void PutRoot(
int index, HeapObject* object, HowToCode how, WhereToPoint where);
#ifdef DEBUG
virtual void Synchronize(const char* tag);
#endif
protected:
static const int kInvalidRootIndex = -1;
virtual int RootIndex(HeapObject* heap_object) = 0;
int RootIndex(HeapObject* heap_object);
virtual bool ShouldBeInThePartialSnapshotCache(HeapObject* o) = 0;
intptr_t root_index_wave_front() { return root_index_wave_front_; }
void set_root_index_wave_front(intptr_t value) {
ASSERT(value >= root_index_wave_front_);
root_index_wave_front_ = value;
}
class ObjectSerializer : public ObjectVisitor {
public:
......@@ -558,6 +568,7 @@ class Serializer : public SerializerDeserializer {
static bool too_late_to_enable_now_;
int large_object_total_;
SerializationAddressMapper address_mapper_;
intptr_t root_index_wave_front_;
friend class ObjectSerializer;
friend class Deserializer;
......@@ -572,6 +583,7 @@ class PartialSerializer : public Serializer {
SnapshotByteSink* sink)
: Serializer(sink),
startup_serializer_(startup_snapshot_serializer) {
set_root_index_wave_front(Heap::kStrongRootListLength);
}
// Serialize the objects reachable from a single object pointer.
......@@ -581,7 +593,6 @@ class PartialSerializer : public Serializer {
WhereToPoint where_to_point);
protected:
virtual int RootIndex(HeapObject* o);
virtual int PartialSnapshotCacheIndex(HeapObject* o);
virtual bool ShouldBeInThePartialSnapshotCache(HeapObject* o) {
// Scripts should be referred only through shared function infos. We can't
......@@ -606,7 +617,7 @@ class StartupSerializer : public Serializer {
explicit StartupSerializer(SnapshotByteSink* sink) : Serializer(sink) {
// Clear the cache of objects used by the partial snapshot. After the
// strong roots have been serialized we can create a partial snapshot
// which will repopulate the cache with objects neede by that partial
// which will repopulate the cache with objects needed by that partial
// snapshot.
Isolate::Current()->set_serialize_partial_snapshot_cache_length(0);
}
......@@ -625,7 +636,6 @@ class StartupSerializer : public Serializer {
}
private:
virtual int RootIndex(HeapObject* o) { return kInvalidRootIndex; }
virtual bool ShouldBeInThePartialSnapshotCache(HeapObject* o) {
return false;
}
......
......@@ -55,7 +55,7 @@ MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
Address roots_register_value = kRootRegisterBias +
reinterpret_cast<Address>(isolate->heap()->roots_address());
reinterpret_cast<Address>(isolate->heap()->roots_array_start());
intptr_t delta = other.address() - roots_register_value;
return delta;
}
......
......@@ -319,9 +319,9 @@ class MacroAssembler: public Assembler {
void LoadFromSafepointRegisterSlot(Register dst, Register src);
void InitializeRootRegister() {
ExternalReference roots_address =
ExternalReference::roots_address(isolate());
movq(kRootRegister, roots_address);
ExternalReference roots_array_start =
ExternalReference::roots_array_start(isolate());
movq(kRootRegister, roots_array_start);
addq(kRootRegister, Immediate(kRootRegisterBias));
}
......
......@@ -130,7 +130,8 @@ TEST(ExternalReferenceEncoder) {
encoder.Encode(
ExternalReference::new_space_start(isolate).address()));
CHECK_EQ(make_code(UNCLASSIFIED, 3),
encoder.Encode(ExternalReference::roots_address(isolate).address()));
encoder.Encode(
ExternalReference::roots_array_start(isolate).address()));
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment