Commit 735fa0c4 authored by yangguo's avatar yangguo Committed by Commit bot

[serializer] prepare attached references for general use.

Currently attached references are only used for global proxy, source
string and code stubs. Mid-term future we want to use attached
references for arbitrary objects (in fixed order) provided from outside.

This change renames BackReference to SerializerReference to include both
back references and attached references.

R=mtrofin@chromium.org, vogelheim@chromium.org

Review-Url: https://codereview.chromium.org/1992723002
Cr-Commit-Position: refs/heads/master@{#36318}
parent e83ac234
......@@ -40,7 +40,6 @@ class AddressMapBase {
}
};
class RootIndexMap : public AddressMapBase {
public:
explicit RootIndexMap(Isolate* isolate);
......@@ -59,123 +58,151 @@ class RootIndexMap : public AddressMapBase {
DISALLOW_COPY_AND_ASSIGN(RootIndexMap);
};
class BackReference {
class SerializerReference {
public:
explicit BackReference(uint32_t bitfield) : bitfield_(bitfield) {}
explicit SerializerReference(uint32_t bitfield) : bitfield_(bitfield) {}
SerializerReference() : bitfield_(Special(kInvalidValue)) {}
BackReference() : bitfield_(kInvalidValue) {}
static SerializerReference BackReference(AllocationSpace space,
uint32_t chunk_index,
uint32_t chunk_offset) {
DCHECK(IsAligned(chunk_offset, kObjectAlignment));
DCHECK_NE(LO_SPACE, space);
return SerializerReference(
SpaceBits::encode(space) | ChunkIndexBits::encode(chunk_index) |
ChunkOffsetBits::encode(chunk_offset >> kObjectAlignmentBits));
}
static BackReference SourceReference() { return BackReference(kSourceValue); }
static SerializerReference LargeObjectReference(uint32_t index) {
return SerializerReference(SpaceBits::encode(LO_SPACE) |
ValueIndexBits::encode(index));
}
static BackReference GlobalProxyReference() {
return BackReference(kGlobalProxyValue);
static SerializerReference AttachedReference(uint32_t index) {
return SerializerReference(SpaceBits::encode(kAttachedReferenceSpace) |
ValueIndexBits::encode(index));
}
static BackReference LargeObjectReference(uint32_t index) {
return BackReference(SpaceBits::encode(LO_SPACE) |
ChunkOffsetBits::encode(index));
static SerializerReference DummyReference() {
return SerializerReference(Special(kDummyValue));
}
static BackReference DummyReference() { return BackReference(kDummyValue); }
bool is_valid() const { return bitfield_ != Special(kInvalidValue); }
static BackReference Reference(AllocationSpace space, uint32_t chunk_index,
uint32_t chunk_offset) {
DCHECK(IsAligned(chunk_offset, kObjectAlignment));
DCHECK_NE(LO_SPACE, space);
return BackReference(
SpaceBits::encode(space) | ChunkIndexBits::encode(chunk_index) |
ChunkOffsetBits::encode(chunk_offset >> kObjectAlignmentBits));
bool is_back_reference() const {
return SpaceBits::decode(bitfield_) <= LAST_SPACE;
}
bool is_valid() const { return bitfield_ != kInvalidValue; }
bool is_source() const { return bitfield_ == kSourceValue; }
bool is_global_proxy() const { return bitfield_ == kGlobalProxyValue; }
AllocationSpace space() const {
DCHECK(is_valid());
return SpaceBits::decode(bitfield_);
DCHECK(is_back_reference());
return static_cast<AllocationSpace>(SpaceBits::decode(bitfield_));
}
uint32_t chunk_offset() const {
DCHECK(is_valid());
DCHECK(is_back_reference());
return ChunkOffsetBits::decode(bitfield_) << kObjectAlignmentBits;
}
uint32_t large_object_index() const {
DCHECK(is_valid());
DCHECK(is_back_reference());
DCHECK(chunk_index() == 0);
return ChunkOffsetBits::decode(bitfield_);
}
uint32_t chunk_index() const {
DCHECK(is_valid());
DCHECK(is_back_reference());
return ChunkIndexBits::decode(bitfield_);
}
uint32_t reference() const {
DCHECK(is_valid());
uint32_t back_reference() const {
DCHECK(is_back_reference());
return bitfield_ & (ChunkOffsetBits::kMask | ChunkIndexBits::kMask);
}
uint32_t bitfield() const { return bitfield_; }
bool is_attached_reference() const {
return SpaceBits::decode(bitfield_) == kAttachedReferenceSpace;
}
int attached_reference_index() const {
DCHECK(is_attached_reference());
return ValueIndexBits::decode(bitfield_);
}
private:
static const uint32_t kInvalidValue = 0xFFFFFFFF;
static const uint32_t kSourceValue = 0xFFFFFFFE;
static const uint32_t kGlobalProxyValue = 0xFFFFFFFD;
static const uint32_t kDummyValue = 0xFFFFFFFC;
inline static uint32_t Special(int value) {
return SpaceBits::encode(kSpecialValueSpace) |
ValueIndexBits::encode(value);
}
// We use the 32-bit bitfield to encode either a back reference, a special
// value, or a attached reference index.
// Back reference:
// [ Space index ] [ Chunk index ] [ Chunk offset ]
// [ LO_SPACE ] [ large object index ]
// Special value
// [ kSpecialValueSpace ] [ Special value index ]
// Attached reference
// [ kAttachedReferenceSpace ] [ Attached reference index ]
static const int kChunkOffsetSize = kPageSizeBits - kObjectAlignmentBits;
static const int kChunkIndexSize = 32 - kChunkOffsetSize - kSpaceTagSize;
static const int kValueIndexSize = kChunkOffsetSize + kChunkIndexSize;
public:
static const int kMaxChunkIndex = (1 << kChunkIndexSize) - 1;
static const int kSpecialValueSpace = LAST_SPACE + 1;
static const int kAttachedReferenceSpace = kSpecialValueSpace + 1;
STATIC_ASSERT(kAttachedReferenceSpace < (1 << kSpaceTagSize));
private:
static const int kInvalidValue = 0;
static const int kDummyValue = 1;
// The chunk offset can also be used to encode the index of special values.
class ChunkOffsetBits : public BitField<uint32_t, 0, kChunkOffsetSize> {};
class ChunkIndexBits
: public BitField<uint32_t, ChunkOffsetBits::kNext, kChunkIndexSize> {};
class SpaceBits
: public BitField<AllocationSpace, ChunkIndexBits::kNext, kSpaceTagSize> {
};
class ValueIndexBits : public BitField<uint32_t, 0, kValueIndexSize> {};
class SpaceBits : public BitField<int, kValueIndexSize, kSpaceTagSize> {};
STATIC_ASSERT(SpaceBits::kNext == 32);
uint32_t bitfield_;
};
friend class SerializerReferenceMap;
};
// Mapping objects to their location after deserialization.
// This is used during building, but not at runtime by V8.
class BackReferenceMap : public AddressMapBase {
class SerializerReferenceMap : public AddressMapBase {
public:
BackReferenceMap()
: no_allocation_(), map_(new HashMap(HashMap::PointersMatch)) {}
SerializerReferenceMap()
: no_allocation_(),
map_(new HashMap(HashMap::PointersMatch)),
attached_reference_index_(0) {}
~BackReferenceMap() { delete map_; }
~SerializerReferenceMap() { delete map_; }
BackReference Lookup(HeapObject* obj) {
SerializerReference Lookup(HeapObject* obj) {
HashMap::Entry* entry = LookupEntry(map_, obj, false);
return entry ? BackReference(GetValue(entry)) : BackReference();
return entry ? SerializerReference(GetValue(entry)) : SerializerReference();
}
void Add(HeapObject* obj, BackReference b) {
void Add(HeapObject* obj, SerializerReference b) {
DCHECK(b.is_valid());
DCHECK_NULL(LookupEntry(map_, obj, false));
HashMap::Entry* entry = LookupEntry(map_, obj, true);
SetValue(entry, b.bitfield());
}
void AddSourceString(String* string) {
Add(string, BackReference::SourceReference());
SetValue(entry, b.bitfield_);
}
void AddGlobalProxy(HeapObject* global_proxy) {
Add(global_proxy, BackReference::GlobalProxyReference());
SerializerReference AddAttachedReference(HeapObject* attached_reference) {
SerializerReference reference =
SerializerReference::AttachedReference(attached_reference_index_++);
Add(attached_reference, reference);
return reference;
}
private:
DisallowHeapAllocation no_allocation_;
HashMap* map_;
DISALLOW_COPY_AND_ASSIGN(BackReferenceMap);
int attached_reference_index_;
DISALLOW_COPY_AND_ASSIGN(SerializerReferenceMap);
};
} // namespace internal
......
......@@ -37,10 +37,10 @@ bool ContextMeasure::IsShared(HeapObject* object) {
void ContextMeasure::MeasureObject(HeapObject* object) {
if (back_reference_map_.Lookup(object).is_valid()) return;
if (reference_map_.Lookup(object).is_valid()) return;
if (root_index_map_.Lookup(object) != RootIndexMap::kInvalidRootIndex) return;
if (IsShared(object)) return;
back_reference_map_.Add(object, BackReference::DummyReference());
reference_map_.Add(object, SerializerReference::DummyReference());
recursion_depth_++;
if (recursion_depth_ > kMaxRecursion) {
deferred_objects_.Add(object);
......
......@@ -29,7 +29,7 @@ class ContextMeasure : public ObjectVisitor {
Context* context_;
BackReferenceMap back_reference_map_;
SerializerReferenceMap reference_map_;
RootIndexMap root_index_map_;
static const int kMaxRecursion = 16;
......
......@@ -76,7 +76,7 @@ void CodeSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
#define IC_KIND_CASE(KIND) case Code::KIND:
IC_KIND_LIST(IC_KIND_CASE)
#undef IC_KIND_CASE
SerializeCodeStub(code_object->stub_key(), how_to_code, where_to_point);
SerializeCodeStub(code_object, how_to_code, where_to_point);
return;
case Code::FUNCTION:
DCHECK(code_object->has_reloc_info_for_serialization());
......@@ -128,34 +128,23 @@ void CodeSerializer::SerializeBuiltin(int builtin_index, HowToCode how_to_code,
sink_->PutInt(builtin_index, "builtin_index");
}
void CodeSerializer::SerializeCodeStub(uint32_t stub_key, HowToCode how_to_code,
void CodeSerializer::SerializeCodeStub(Code* code_stub, HowToCode how_to_code,
WhereToPoint where_to_point) {
DCHECK((how_to_code == kPlain && where_to_point == kStartOfObject) ||
(how_to_code == kPlain && where_to_point == kInnerPointer) ||
(how_to_code == kFromCode && where_to_point == kInnerPointer));
// We only arrive here if we have not encountered this code stub before.
DCHECK(!reference_map()->Lookup(code_stub).is_valid());
uint32_t stub_key = code_stub->stub_key();
DCHECK(CodeStub::MajorKeyFromKey(stub_key) != CodeStub::NoCache);
DCHECK(!CodeStub::GetCode(isolate(), stub_key).is_null());
stub_keys_.Add(stub_key);
int index = AddCodeStubKey(stub_key) + kCodeStubsBaseIndex;
SerializerReference reference =
reference_map()->AddAttachedReference(code_stub);
if (FLAG_trace_serializer) {
PrintF(" Encoding code stub %s as %d\n",
CodeStub::MajorName(CodeStub::MajorKeyFromKey(stub_key)), index);
PrintF(" Encoding code stub %s as attached reference %d\n",
CodeStub::MajorName(CodeStub::MajorKeyFromKey(stub_key)),
reference.attached_reference_index());
}
sink_->Put(kAttachedReference + how_to_code + where_to_point, "CodeStub");
sink_->PutInt(index, "CodeStub key");
}
int CodeSerializer::AddCodeStubKey(uint32_t stub_key) {
// TODO(yangguo) Maybe we need a hash table for a faster lookup than O(n^2).
int index = 0;
while (index < stub_keys_.length()) {
if (stub_keys_[index] == stub_key) return index;
index++;
}
stub_keys_.Add(stub_key);
return index;
PutAttachedReference(reference, how_to_code, where_to_point);
}
MaybeHandle<SharedFunctionInfo> CodeSerializer::Deserialize(
......@@ -173,19 +162,14 @@ MaybeHandle<SharedFunctionInfo> CodeSerializer::Deserialize(
return MaybeHandle<SharedFunctionInfo>();
}
// Prepare and register list of attached objects.
Deserializer deserializer(scd.get());
deserializer.AddAttachedObject(source);
Vector<const uint32_t> code_stub_keys = scd->CodeStubKeys();
Vector<Handle<Object> > attached_objects = Vector<Handle<Object> >::New(
code_stub_keys.length() + kCodeStubsBaseIndex);
attached_objects[kSourceObjectIndex] = source;
for (int i = 0; i < code_stub_keys.length(); i++) {
attached_objects[i + kCodeStubsBaseIndex] =
CodeStub::GetCode(isolate, code_stub_keys[i]).ToHandleChecked();
deserializer.AddAttachedObject(
CodeStub::GetCode(isolate, code_stub_keys[i]).ToHandleChecked());
}
Deserializer deserializer(scd.get());
deserializer.SetAttachedObjects(attached_objects);
// Deserialize.
Handle<SharedFunctionInfo> result;
if (!deserializer.DeserializeCode(isolate).ToHandle(&result)) {
......
......@@ -20,11 +20,6 @@ class CodeSerializer : public Serializer {
MUST_USE_RESULT static MaybeHandle<SharedFunctionInfo> Deserialize(
Isolate* isolate, ScriptData* cached_data, Handle<String> source);
static const int kSourceObjectIndex = 0;
STATIC_ASSERT(kSourceObjectReference == kSourceObjectIndex);
static const int kCodeStubsBaseIndex = 1;
String* source() const {
DCHECK(!AllowHeapAllocation::IsAllowed());
return source_;
......@@ -35,7 +30,7 @@ class CodeSerializer : public Serializer {
private:
CodeSerializer(Isolate* isolate, SnapshotByteSink* sink, String* source)
: Serializer(isolate, sink), source_(source) {
back_reference_map_.AddSourceString(source);
reference_map_.AddAttachedReference(source);
}
~CodeSerializer() override { OutputStatistics("CodeSerializer"); }
......@@ -45,11 +40,10 @@ class CodeSerializer : public Serializer {
void SerializeBuiltin(int builtin_index, HowToCode how_to_code,
WhereToPoint where_to_point);
void SerializeCodeStub(uint32_t stub_key, HowToCode how_to_code,
void SerializeCodeStub(Code* code_stub, HowToCode how_to_code,
WhereToPoint where_to_point);
void SerializeGeneric(HeapObject* heap_object, HowToCode how_to_code,
WhereToPoint where_to_point);
int AddCodeStubKey(uint32_t stub_key);
DisallowHeapAllocation no_gc_;
String* source_;
......
......@@ -119,9 +119,7 @@ MaybeHandle<Object> Deserializer::DeserializePartial(
return MaybeHandle<Object>();
}
Vector<Handle<Object> > attached_objects = Vector<Handle<Object> >::New(1);
attached_objects[kGlobalProxyReference] = global_proxy;
SetAttachedObjects(attached_objects);
AddAttachedObject(global_proxy);
DisallowHeapAllocation no_gc;
// Keep track of the code space start and end pointers in case new
......@@ -167,7 +165,6 @@ MaybeHandle<SharedFunctionInfo> Deserializer::DeserializeCode(
Deserializer::~Deserializer() {
// TODO(svenpanne) Re-enable this assertion when v8 initialization is fixed.
// DCHECK(source_.AtEOF());
attached_objects_.Dispose();
}
// This is called on the roots. It is the driver of the deserialization
......@@ -315,7 +312,7 @@ void Deserializer::CommitPostProcessedObjects(Isolate* isolate) {
HeapObject* Deserializer::GetBackReferencedObject(int space) {
HeapObject* obj;
BackReference back_reference(source_.GetInt());
SerializerReference back_reference(source_.GetInt());
if (space == LO_SPACE) {
CHECK(back_reference.chunk_index() == 0);
uint32_t index = back_reference.large_object_index();
......@@ -496,7 +493,6 @@ bool Deserializer::ReadData(Object** current, Object** limit, int source_space,
new_object = reinterpret_cast<Object*>(address); \
} else if (where == kAttachedReference) { \
int index = source_.GetInt(); \
DCHECK(deserializing_user_code() || index == kGlobalProxyReference); \
new_object = *attached_objects_[index]; \
emit_write_barrier = isolate->heap()->InNewSpace(new_object); \
} else { \
......
......@@ -53,10 +53,10 @@ class Deserializer : public SerializerDeserializer {
// Deserialize a shared function info. Fail gracefully.
MaybeHandle<SharedFunctionInfo> DeserializeCode(Isolate* isolate);
// Pass a vector of externally-provided objects referenced by the snapshot.
// The ownership to its backing store is handed over as well.
void SetAttachedObjects(Vector<Handle<Object> > attached_objects) {
attached_objects_ = attached_objects;
// Add an object to back an attached reference. The order to add objects must
// mirror the order they are added in the serializer.
void AddAttachedObject(Handle<HeapObject> attached_object) {
attached_objects_.Add(attached_object);
}
private:
......@@ -117,7 +117,7 @@ class Deserializer : public SerializerDeserializer {
Isolate* isolate_;
// Objects from the attached object descriptions in the serialized user code.
Vector<Handle<Object> > attached_objects_;
List<Handle<HeapObject> > attached_objects_;
SnapshotByteSource source_;
uint32_t magic_number_;
......
......@@ -14,7 +14,6 @@ PartialSerializer::PartialSerializer(Isolate* isolate,
SnapshotByteSink* sink)
: Serializer(isolate, sink),
startup_serializer_(startup_snapshot_serializer),
global_object_(NULL),
next_partial_cache_index_(0) {
InitializeCodeAddressMap();
}
......@@ -26,8 +25,7 @@ PartialSerializer::~PartialSerializer() {
void PartialSerializer::Serialize(Object** o) {
if ((*o)->IsContext()) {
Context* context = Context::cast(*o);
global_object_ = context->global_object();
back_reference_map()->AddGlobalProxy(context->global_proxy());
reference_map()->AddAttachedReference(context->global_proxy());
// The bootstrap snapshot has a code-stub context. When serializing the
// partial snapshot, it is chained into the weak context list on the isolate
// and it's next context pointer may point to the code-stub context. Clear
......@@ -74,7 +72,7 @@ void PartialSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
// Pointers from the partial snapshot to the objects in the startup snapshot
// should go through the root array or through the partial snapshot cache.
// If this is not the case you may have to add something to the root array.
DCHECK(!startup_serializer_->back_reference_map()->Lookup(obj).is_valid());
DCHECK(!startup_serializer_->reference_map()->Lookup(obj).is_valid());
// All the internalized strings that the partial snapshot needs should be
// either in the root table or in the partial snapshot cache.
DCHECK(!obj->IsInternalizedString());
......
......@@ -50,7 +50,6 @@ class PartialSerializer : public Serializer {
bool ShouldBeInThePartialSnapshotCache(HeapObject* o);
Serializer* startup_serializer_;
Object* global_object_;
PartialCacheIndexMap partial_cache_index_map_;
int next_partial_cache_index_;
DISALLOW_COPY_AND_ASSIGN(PartialSerializer);
......
......@@ -40,11 +40,13 @@ class HotObjectsList {
}
void Add(HeapObject* object) {
DCHECK(!AllowHeapAllocation::IsAllowed());
circular_queue_[index_] = object;
index_ = (index_ + 1) & kSizeMask;
}
HeapObject* Get(int index) {
DCHECK(!AllowHeapAllocation::IsAllowed());
DCHECK_NOT_NULL(circular_queue_[index]);
return circular_queue_[index];
}
......@@ -52,6 +54,7 @@ class HotObjectsList {
static const int kNotFound = -1;
int Find(HeapObject* object) {
DCHECK(!AllowHeapAllocation::IsAllowed());
for (int i = 0; i < kSize; i++) {
if (circular_queue_[i] == object) return i;
}
......@@ -211,12 +214,6 @@ class SerializerDeserializer : public ObjectVisitor {
// Sentinel after a new object to indicate that double alignment is needed.
static const int kDoubleAlignmentSentinel = 0;
// Used as index for the attached reference representing the source object.
static const int kSourceObjectReference = 0;
// Used as index for the attached reference representing the global proxy.
static const int kGlobalProxyReference = 0;
// ---------- member variable ----------
HotObjectsList hot_objects_;
};
......
......@@ -124,10 +124,9 @@ void Serializer::EncodeReservations(
}
#ifdef DEBUG
bool Serializer::BackReferenceIsAlreadyAllocated(BackReference reference) {
DCHECK(reference.is_valid());
DCHECK(!reference.is_source());
DCHECK(!reference.is_global_proxy());
bool Serializer::BackReferenceIsAlreadyAllocated(
SerializerReference reference) {
DCHECK(reference.is_back_reference());
AllocationSpace space = reference.space();
int chunk_index = reference.chunk_index();
if (space == LO_SPACE) {
......@@ -163,25 +162,21 @@ bool Serializer::SerializeKnownObject(HeapObject* obj, HowToCode how_to_code,
return true;
}
}
BackReference back_reference = back_reference_map_.Lookup(obj);
if (back_reference.is_valid()) {
SerializerReference reference = reference_map_.Lookup(obj);
if (reference.is_valid()) {
// Encode the location of an already deserialized object in order to write
// its location into a later object. We can encode the location as an
// offset fromthe start of the deserialized objects or as an offset
// backwards from thecurrent allocation pointer.
if (back_reference.is_source()) {
if (reference.is_attached_reference()) {
FlushSkip(skip);
if (FLAG_trace_serializer) PrintF(" Encoding source object\n");
DCHECK(how_to_code == kPlain && where_to_point == kStartOfObject);
sink_->Put(kAttachedReference + kPlain + kStartOfObject, "Source");
sink_->PutInt(kSourceObjectReference, "kSourceObjectReference");
} else if (back_reference.is_global_proxy()) {
FlushSkip(skip);
if (FLAG_trace_serializer) PrintF(" Encoding global proxy\n");
DCHECK(how_to_code == kPlain && where_to_point == kStartOfObject);
sink_->Put(kAttachedReference + kPlain + kStartOfObject, "Global Proxy");
sink_->PutInt(kGlobalProxyReference, "kGlobalProxyReference");
if (FLAG_trace_serializer) {
PrintF(" Encoding attached reference %d\n",
reference.attached_reference_index());
}
PutAttachedReference(reference, how_to_code, where_to_point);
} else {
DCHECK(reference.is_back_reference());
if (FLAG_trace_serializer) {
PrintF(" Encoding back reference to: ");
obj->ShortPrint();
......@@ -189,7 +184,7 @@ bool Serializer::SerializeKnownObject(HeapObject* obj, HowToCode how_to_code,
}
PutAlignmentPrefix(obj);
AllocationSpace space = back_reference.space();
AllocationSpace space = reference.space();
if (skip == 0) {
sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRef");
} else {
......@@ -197,7 +192,7 @@ bool Serializer::SerializeKnownObject(HeapObject* obj, HowToCode how_to_code,
"BackRefWithSkip");
sink_->PutInt(skip, "BackRefSkipDistance");
}
PutBackReference(obj, back_reference);
PutBackReference(obj, reference);
}
return true;
}
......@@ -236,12 +231,24 @@ void Serializer::PutSmi(Smi* smi) {
for (int i = 0; i < kPointerSize; i++) sink_->Put(bytes[i], "Byte");
}
void Serializer::PutBackReference(HeapObject* object, BackReference reference) {
void Serializer::PutBackReference(HeapObject* object,
SerializerReference reference) {
DCHECK(BackReferenceIsAlreadyAllocated(reference));
sink_->PutInt(reference.reference(), "BackRefValue");
sink_->PutInt(reference.back_reference(), "BackRefValue");
hot_objects_.Add(object);
}
void Serializer::PutAttachedReference(SerializerReference reference,
HowToCode how_to_code,
WhereToPoint where_to_point) {
DCHECK(reference.is_attached_reference());
DCHECK((how_to_code == kPlain && where_to_point == kStartOfObject) ||
(how_to_code == kPlain && where_to_point == kInnerPointer) ||
(how_to_code == kFromCode && where_to_point == kInnerPointer));
sink_->Put(kAttachedReference + how_to_code + where_to_point, "AttachedRef");
sink_->PutInt(reference.attached_reference_index(), "AttachedRefIndex");
}
int Serializer::PutAlignmentPrefix(HeapObject* object) {
AllocationAlignment alignment = object->RequiredAlignment();
if (alignment != kWordAligned) {
......@@ -253,14 +260,14 @@ int Serializer::PutAlignmentPrefix(HeapObject* object) {
return 0;
}
BackReference Serializer::AllocateLargeObject(int size) {
SerializerReference Serializer::AllocateLargeObject(int size) {
// Large objects are allocated one-by-one when deserializing. We do not
// have to keep track of multiple chunks.
large_objects_total_size_ += size;
return BackReference::LargeObjectReference(seen_large_objects_index_++);
return SerializerReference::LargeObjectReference(seen_large_objects_index_++);
}
BackReference Serializer::Allocate(AllocationSpace space, int size) {
SerializerReference Serializer::Allocate(AllocationSpace space, int size) {
DCHECK(space >= 0 && space < kNumberOfPreallocatedSpaces);
DCHECK(size > 0 && size <= static_cast<int>(max_chunk_size(space)));
uint32_t new_chunk_size = pending_chunk_[space] + size;
......@@ -270,14 +277,13 @@ BackReference Serializer::Allocate(AllocationSpace space, int size) {
sink_->Put(kNextChunk, "NextChunk");
sink_->Put(space, "NextChunkSpace");
completed_chunks_[space].Add(pending_chunk_[space]);
DCHECK_LE(completed_chunks_[space].length(), BackReference::kMaxChunkIndex);
pending_chunk_[space] = 0;
new_chunk_size = size;
}
uint32_t offset = pending_chunk_[space];
pending_chunk_[space] = new_chunk_size;
return BackReference::Reference(space, completed_chunks_[space].length(),
offset);
return SerializerReference::BackReference(
space, completed_chunks_[space].length(), offset);
}
void Serializer::Pad() {
......@@ -320,7 +326,7 @@ void Serializer::ObjectSerializer::SerializePrologue(AllocationSpace space,
CodeNameEvent(object_->address(), sink_->Position(), code_name));
}
BackReference back_reference;
SerializerReference back_reference;
if (space == LO_SPACE) {
sink_->Put(kNewObject + reference_representation_ + space,
"NewLargeObject");
......@@ -345,7 +351,7 @@ void Serializer::ObjectSerializer::SerializePrologue(AllocationSpace space,
#endif // OBJECT_PRINT
// Mark this object as already serialized.
serializer_->back_reference_map()->Add(object_, back_reference);
serializer_->reference_map()->Add(object_, back_reference);
// Serialize the map (first word of the object).
serializer_->SerializeObject(map, kPlain, kStartOfObject, 0);
......@@ -513,15 +519,17 @@ void Serializer::ObjectSerializer::SerializeDeferred() {
int size = object_->Size();
Map* map = object_->map();
BackReference reference = serializer_->back_reference_map()->Lookup(object_);
SerializerReference back_reference =
serializer_->reference_map()->Lookup(object_);
DCHECK(back_reference.is_back_reference());
// Serialize the rest of the object.
CHECK_EQ(0, bytes_processed_so_far_);
bytes_processed_so_far_ = kPointerSize;
serializer_->PutAlignmentPrefix(object_);
sink_->Put(kNewObject + reference.space(), "deferred object");
serializer_->PutBackReference(object_, reference);
sink_->Put(kNewObject + back_reference.space(), "deferred object");
serializer_->PutBackReference(object_, back_reference);
sink_->PutInt(size >> kPointerSizeLog2, "deferred object size");
UnlinkWeakNextScope unlink_weak_next(object_);
......
......@@ -128,7 +128,7 @@ class Serializer : public SerializerDeserializer {
Isolate* isolate() const { return isolate_; }
BackReferenceMap* back_reference_map() { return &back_reference_map_; }
SerializerReferenceMap* reference_map() { return &reference_map_; }
RootIndexMap* root_index_map() { return &root_index_map_; }
#ifdef OBJECT_PRINT
......@@ -162,7 +162,10 @@ class Serializer : public SerializerDeserializer {
void PutSmi(Smi* smi);
void PutBackReference(HeapObject* object, BackReference reference);
void PutBackReference(HeapObject* object, SerializerReference reference);
void PutAttachedReference(SerializerReference reference,
HowToCode how_to_code, WhereToPoint where_to_point);
// Emit alignment prefix if necessary, return required padding space in bytes.
int PutAlignmentPrefix(HeapObject* object);
......@@ -178,11 +181,11 @@ class Serializer : public SerializerDeserializer {
}
}
bool BackReferenceIsAlreadyAllocated(BackReference back_reference);
bool BackReferenceIsAlreadyAllocated(SerializerReference back_reference);
// This will return the space for an object.
BackReference AllocateLargeObject(int size);
BackReference Allocate(AllocationSpace space, int size);
SerializerReference AllocateLargeObject(int size);
SerializerReference Allocate(AllocationSpace space, int size);
int EncodeExternalReference(Address addr) {
return external_reference_encoder_.Encode(addr);
}
......@@ -207,7 +210,7 @@ class Serializer : public SerializerDeserializer {
SnapshotByteSink* sink() const { return sink_; }
void QueueDeferredObject(HeapObject* obj) {
DCHECK(back_reference_map_.Lookup(obj).is_valid());
DCHECK(reference_map_.Lookup(obj).is_back_reference());
deferred_objects_.Add(obj);
}
......@@ -218,7 +221,7 @@ class Serializer : public SerializerDeserializer {
SnapshotByteSink* sink_;
ExternalReferenceEncoder external_reference_encoder_;
BackReferenceMap back_reference_map_;
SerializerReferenceMap reference_map_;
RootIndexMap root_index_map_;
int recursion_depth_;
......
......@@ -73,8 +73,8 @@ void StartupSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
// Make sure that the immortal immovable root has been included in the first
// chunk of its reserved space , so that it is deserialized onto the first
// page of its space and stays immortal immovable.
BackReference ref = back_reference_map_.Lookup(obj);
CHECK(ref.is_valid() && ref.chunk_index() == 0);
SerializerReference ref = reference_map_.Lookup(obj);
CHECK(ref.is_back_reference() && ref.chunk_index() == 0);
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment