Commit 1da6cd9f authored by Yang Guo's avatar Yang Guo Committed by Commit Bot

[snapshot] allow custom chunk sizes.

Bug: v8:7887
Change-Id: I3904981f06efcb1fb83e863d0be6a16ebaaf17f2
Reviewed-on: https://chromium-review.googlesource.com/1113930Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Commit-Queue: Yang Guo <yangguo@chromium.org>
Cr-Commit-Position: refs/heads/master@{#54189}
parent c5fa1c1f
...@@ -2345,6 +2345,7 @@ v8_source_set("v8_base") { ...@@ -2345,6 +2345,7 @@ v8_source_set("v8_base") {
"src/snapshot/partial-deserializer.h", "src/snapshot/partial-deserializer.h",
"src/snapshot/partial-serializer.cc", "src/snapshot/partial-serializer.cc",
"src/snapshot/partial-serializer.h", "src/snapshot/partial-serializer.h",
"src/snapshot/references.h",
"src/snapshot/serializer-common.cc", "src/snapshot/serializer-common.cc",
"src/snapshot/serializer-common.h", "src/snapshot/serializer-common.h",
"src/snapshot/serializer.cc", "src/snapshot/serializer.cc",
......
...@@ -69,181 +69,6 @@ class RootIndexMap { ...@@ -69,181 +69,6 @@ class RootIndexMap {
DISALLOW_COPY_AND_ASSIGN(RootIndexMap); DISALLOW_COPY_AND_ASSIGN(RootIndexMap);
}; };
class SerializerReference {
public:
SerializerReference() : bitfield_(Special(kInvalidValue)) {}
static SerializerReference FromBitfield(uint32_t bitfield) {
return SerializerReference(bitfield);
}
static SerializerReference BackReference(AllocationSpace space,
uint32_t chunk_index,
uint32_t chunk_offset) {
DCHECK(IsAligned(chunk_offset, kObjectAlignment));
DCHECK_NE(LO_SPACE, space);
return SerializerReference(
SpaceBits::encode(space) | ChunkIndexBits::encode(chunk_index) |
ChunkOffsetBits::encode(chunk_offset >> kObjectAlignmentBits));
}
static SerializerReference MapReference(uint32_t index) {
return SerializerReference(SpaceBits::encode(MAP_SPACE) |
ValueIndexBits::encode(index));
}
static SerializerReference OffHeapBackingStoreReference(uint32_t index) {
return SerializerReference(SpaceBits::encode(kExternalSpace) |
ValueIndexBits::encode(index));
}
static SerializerReference LargeObjectReference(uint32_t index) {
return SerializerReference(SpaceBits::encode(LO_SPACE) |
ValueIndexBits::encode(index));
}
static SerializerReference AttachedReference(uint32_t index) {
return SerializerReference(SpaceBits::encode(kAttachedReferenceSpace) |
ValueIndexBits::encode(index));
}
static SerializerReference DummyReference() {
return SerializerReference(Special(kDummyValue));
}
bool is_valid() const { return bitfield_ != Special(kInvalidValue); }
bool is_back_reference() const {
return SpaceBits::decode(bitfield_) <= LAST_SPACE;
}
AllocationSpace space() const {
DCHECK(is_back_reference());
return static_cast<AllocationSpace>(SpaceBits::decode(bitfield_));
}
uint32_t chunk_offset() const {
DCHECK(is_back_reference());
return ChunkOffsetBits::decode(bitfield_) << kObjectAlignmentBits;
}
uint32_t map_index() const {
DCHECK(is_back_reference());
return ValueIndexBits::decode(bitfield_);
}
bool is_off_heap_backing_store_reference() const {
return SpaceBits::decode(bitfield_) == kExternalSpace;
}
uint32_t off_heap_backing_store_index() const {
DCHECK(is_off_heap_backing_store_reference());
return ValueIndexBits::decode(bitfield_);
}
uint32_t large_object_index() const {
DCHECK(is_back_reference());
return ValueIndexBits::decode(bitfield_);
}
uint32_t chunk_index() const {
DCHECK(is_back_reference());
return ChunkIndexBits::decode(bitfield_);
}
uint32_t back_reference() const {
DCHECK(is_back_reference());
return bitfield_ & (ChunkOffsetBits::kMask | ChunkIndexBits::kMask);
}
bool is_attached_reference() const {
return SpaceBits::decode(bitfield_) == kAttachedReferenceSpace;
}
int attached_reference_index() const {
DCHECK(is_attached_reference());
return ValueIndexBits::decode(bitfield_);
}
private:
explicit SerializerReference(uint32_t bitfield) : bitfield_(bitfield) {}
inline static uint32_t Special(int value) {
return SpaceBits::encode(kSpecialValueSpace) |
ValueIndexBits::encode(value);
}
// We use the 32-bit bitfield to encode either a back reference, a special
// value, or an attached reference index.
// Back reference:
// [ Space index ] [ Chunk index ] [ Chunk offset ]
// [ LO_SPACE ] [ large object index ]
// Special value
// [ kSpecialValueSpace ] [ Special value index ]
// Attached reference
// [ kAttachedReferenceSpace ] [ Attached reference index ]
// External
// [ kExternalSpace ] [ External reference index ]
static const int kChunkOffsetSize = kPageSizeBits - kObjectAlignmentBits;
static const int kChunkIndexSize = 32 - kChunkOffsetSize - kSpaceTagSize;
static const int kValueIndexSize = kChunkOffsetSize + kChunkIndexSize;
static const int kSpecialValueSpace = LAST_SPACE + 1;
static const int kAttachedReferenceSpace = kSpecialValueSpace + 1;
static const int kExternalSpace = kAttachedReferenceSpace + 1;
STATIC_ASSERT(kExternalSpace < (1 << kSpaceTagSize));
static const int kInvalidValue = 0;
static const int kDummyValue = 1;
// The chunk offset can also be used to encode the index of special values.
class ChunkOffsetBits : public BitField<uint32_t, 0, kChunkOffsetSize> {};
class ChunkIndexBits
: public BitField<uint32_t, ChunkOffsetBits::kNext, kChunkIndexSize> {};
class ValueIndexBits : public BitField<uint32_t, 0, kValueIndexSize> {};
STATIC_ASSERT(ChunkIndexBits::kNext == ValueIndexBits::kNext);
class SpaceBits : public BitField<int, kValueIndexSize, kSpaceTagSize> {};
STATIC_ASSERT(SpaceBits::kNext == 32);
uint32_t bitfield_;
friend class SerializerReferenceMap;
};
// Mapping objects to their location after deserialization.
// This is used during building, but not at runtime by V8.
class SerializerReferenceMap {
public:
SerializerReferenceMap()
: no_allocation_(), map_(), attached_reference_index_(0) {}
SerializerReference Lookup(void* obj) {
Maybe<uint32_t> maybe_index = map_.Get(obj);
return maybe_index.IsJust() ? SerializerReference(maybe_index.FromJust())
: SerializerReference();
}
void Add(void* obj, SerializerReference b) {
DCHECK(b.is_valid());
DCHECK(map_.Get(obj).IsNothing());
map_.Set(obj, b.bitfield_);
}
SerializerReference AddAttachedReference(HeapObject* attached_reference) {
SerializerReference reference =
SerializerReference::AttachedReference(attached_reference_index_++);
Add(attached_reference, reference);
return reference;
}
private:
DisallowHeapAllocation no_allocation_;
PointerToIndexHashMap<void*> map_;
int attached_reference_index_;
DISALLOW_COPY_AND_ASSIGN(SerializerReferenceMap);
};
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -8,7 +8,6 @@ ...@@ -8,7 +8,6 @@
#include <vector> #include <vector>
#include "src/accessors.h" #include "src/accessors.h"
#include "src/address-map.h"
#include "src/builtins/builtins.h" #include "src/builtins/builtins.h"
#include "src/external-reference.h" #include "src/external-reference.h"
......
...@@ -1080,6 +1080,7 @@ DEFINE_BOOL(profile_deserialization, false, ...@@ -1080,6 +1080,7 @@ DEFINE_BOOL(profile_deserialization, false,
"Print the time it takes to deserialize the snapshot.") "Print the time it takes to deserialize the snapshot.")
DEFINE_BOOL(serialization_statistics, false, DEFINE_BOOL(serialization_statistics, false,
"Collect statistics on serialized objects.") "Collect statistics on serialized objects.")
DEFINE_UINT(serialization_chunk_size, 0, "Custom size for serialization chunks")
// Regexp // Regexp
DEFINE_BOOL(regexp_optimization, true, "generate optimized regexp code") DEFINE_BOOL(regexp_optimization, true, "generate optimized regexp code")
......
...@@ -584,7 +584,7 @@ enum AllocationSpace { ...@@ -584,7 +584,7 @@ enum AllocationSpace {
FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE, FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE,
LAST_GROWABLE_PAGED_SPACE = MAP_SPACE LAST_GROWABLE_PAGED_SPACE = MAP_SPACE
}; };
constexpr int kSpaceTagSize = 4; constexpr int kSpaceTagSize = 3;
STATIC_ASSERT(FIRST_SPACE == 0); STATIC_ASSERT(FIRST_SPACE == 0);
enum AllocationAlignment { kWordAligned, kDoubleAligned, kDoubleUnaligned }; enum AllocationAlignment { kWordAligned, kDoubleAligned, kDoubleUnaligned };
......
...@@ -18,9 +18,7 @@ SerializerReference BuiltinSerializerAllocator::Allocate(AllocationSpace space, ...@@ -18,9 +18,7 @@ SerializerReference BuiltinSerializerAllocator::Allocate(AllocationSpace space,
// system. Instead of worrying about chunk indices and offsets, we simply // system. Instead of worrying about chunk indices and offsets, we simply
// need to generate unique offsets here. // need to generate unique offsets here.
const uint32_t virtual_chunk_index = 0; const auto ref = SerializerReference::BuiltinReference(virtual_chunk_offset_);
const auto ref = SerializerReference::BackReference(
CODE_SPACE, virtual_chunk_index, virtual_chunk_offset_);
virtual_chunk_size_ += size; virtual_chunk_size_ += size;
virtual_chunk_offset_ += kObjectAlignment; // Needs to be aligned. virtual_chunk_offset_ += kObjectAlignment; // Needs to be aligned.
...@@ -31,11 +29,8 @@ SerializerReference BuiltinSerializerAllocator::Allocate(AllocationSpace space, ...@@ -31,11 +29,8 @@ SerializerReference BuiltinSerializerAllocator::Allocate(AllocationSpace space,
#ifdef DEBUG #ifdef DEBUG
bool BuiltinSerializerAllocator::BackReferenceIsAlreadyAllocated( bool BuiltinSerializerAllocator::BackReferenceIsAlreadyAllocated(
SerializerReference reference) const { SerializerReference reference) const {
DCHECK(reference.is_back_reference()); DCHECK(reference.is_builtin_reference());
AllocationSpace space = reference.space(); return reference.builtin_offset() < virtual_chunk_offset_;
DCHECK_EQ(space, CODE_SPACE);
DCHECK_EQ(reference.chunk_index(), 0);
return reference.chunk_offset() < virtual_chunk_offset_;
} }
#endif // DEBUG #endif // DEBUG
......
...@@ -31,6 +31,11 @@ ScriptData::ScriptData(const byte* data, int length) ...@@ -31,6 +31,11 @@ ScriptData::ScriptData(const byte* data, int length)
} }
} }
CodeSerializer::CodeSerializer(Isolate* isolate, uint32_t source_hash)
: Serializer(isolate), source_hash_(source_hash) {
allocator()->UseCustomChunkSize(FLAG_serialization_chunk_size);
}
// static // static
ScriptCompiler::CachedData* CodeSerializer::Serialize( ScriptCompiler::CachedData* CodeSerializer::Serialize(
Handle<SharedFunctionInfo> info) { Handle<SharedFunctionInfo> info) {
...@@ -253,7 +258,7 @@ void CodeSerializer::SerializeGeneric(HeapObject* heap_object, ...@@ -253,7 +258,7 @@ void CodeSerializer::SerializeGeneric(HeapObject* heap_object,
void CodeSerializer::SerializeCodeStub(Code* code_stub, HowToCode how_to_code, void CodeSerializer::SerializeCodeStub(Code* code_stub, HowToCode how_to_code,
WhereToPoint where_to_point) { WhereToPoint where_to_point) {
// We only arrive here if we have not encountered this code stub before. // We only arrive here if we have not encountered this code stub before.
DCHECK(!reference_map()->Lookup(code_stub).is_valid()); DCHECK(!reference_map()->LookupReference(code_stub).is_valid());
uint32_t stub_key = code_stub->stub_key(); uint32_t stub_key = code_stub->stub_key();
DCHECK(CodeStub::MajorKeyFromKey(stub_key) != CodeStub::NoCache); DCHECK(CodeStub::MajorKeyFromKey(stub_key) != CodeStub::NoCache);
DCHECK(!CodeStub::GetCode(isolate(), stub_key).is_null()); DCHECK(!CodeStub::GetCode(isolate(), stub_key).is_null());
......
...@@ -58,8 +58,7 @@ class CodeSerializer : public Serializer<> { ...@@ -58,8 +58,7 @@ class CodeSerializer : public Serializer<> {
uint32_t source_hash() const { return source_hash_; } uint32_t source_hash() const { return source_hash_; }
protected: protected:
explicit CodeSerializer(Isolate* isolate, uint32_t source_hash) CodeSerializer(Isolate* isolate, uint32_t source_hash);
: Serializer(isolate), source_hash_(source_hash) {}
~CodeSerializer() override { OutputStatistics("CodeSerializer"); } ~CodeSerializer() override { OutputStatistics("CodeSerializer"); }
virtual void SerializeCodeObject(Code* code_object, HowToCode how_to_code, virtual void SerializeCodeObject(Code* code_object, HowToCode how_to_code,
......
...@@ -5,6 +5,7 @@ ...@@ -5,6 +5,7 @@
#include "src/snapshot/default-serializer-allocator.h" #include "src/snapshot/default-serializer-allocator.h"
#include "src/heap/heap-inl.h" #include "src/heap/heap-inl.h"
#include "src/snapshot/references.h"
#include "src/snapshot/serializer.h" #include "src/snapshot/serializer.h"
#include "src/snapshot/snapshot-source-sink.h" #include "src/snapshot/snapshot-source-sink.h"
...@@ -19,18 +20,34 @@ DefaultSerializerAllocator::DefaultSerializerAllocator( ...@@ -19,18 +20,34 @@ DefaultSerializerAllocator::DefaultSerializerAllocator(
} }
} }
void DefaultSerializerAllocator::UseCustomChunkSize(uint32_t chunk_size) {
custom_chunk_size_ = chunk_size;
}
static uint32_t PageSizeOfSpace(int space) {
return static_cast<uint32_t>(
MemoryAllocator::PageAreaSize(static_cast<AllocationSpace>(space)));
}
uint32_t DefaultSerializerAllocator::TargetChunkSize(int space) {
if (custom_chunk_size_ == 0) return PageSizeOfSpace(space);
DCHECK_LE(custom_chunk_size_, PageSizeOfSpace(space));
return custom_chunk_size_;
}
SerializerReference DefaultSerializerAllocator::Allocate(AllocationSpace space, SerializerReference DefaultSerializerAllocator::Allocate(AllocationSpace space,
uint32_t size) { uint32_t size) {
DCHECK(space >= 0 && space < kNumberOfPreallocatedSpaces); DCHECK(space >= 0 && space < kNumberOfPreallocatedSpaces);
DCHECK(size > 0 && size <= MaxChunkSizeInSpace(space)); DCHECK(size > 0 && size <= PageSizeOfSpace(space));
// Maps are allocated through AllocateMap. // Maps are allocated through AllocateMap.
DCHECK_NE(MAP_SPACE, space); DCHECK_NE(MAP_SPACE, space);
uint32_t new_chunk_size = pending_chunk_[space] + size; uint32_t old_chunk_size = pending_chunk_[space];
if (new_chunk_size > MaxChunkSizeInSpace(space)) { uint32_t new_chunk_size = old_chunk_size + size;
// The new chunk size would not fit onto a single page. Complete the // Start a new chunk if the new size exceeds the target chunk size.
// current chunk and start a new one. // We may exceed the target chunk size if the single object size does.
if (new_chunk_size > TargetChunkSize(space)) {
serializer_->PutNextChunk(space); serializer_->PutNextChunk(space);
completed_chunks_[space].push_back(pending_chunk_[space]); completed_chunks_[space].push_back(pending_chunk_[space]);
pending_chunk_[space] = 0; pending_chunk_[space] = 0;
...@@ -136,13 +153,5 @@ void DefaultSerializerAllocator::OutputStatistics() { ...@@ -136,13 +153,5 @@ void DefaultSerializerAllocator::OutputStatistics() {
PrintF("%16d\n", large_objects_total_size_); PrintF("%16d\n", large_objects_total_size_);
} }
// static
uint32_t DefaultSerializerAllocator::MaxChunkSizeInSpace(int space) {
DCHECK(0 <= space && space < kNumberOfPreallocatedSpaces);
return static_cast<uint32_t>(
MemoryAllocator::PageAreaSize(static_cast<AllocationSpace>(space)));
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
...@@ -23,6 +23,8 @@ class DefaultSerializerAllocator final { ...@@ -23,6 +23,8 @@ class DefaultSerializerAllocator final {
SerializerReference AllocateLargeObject(uint32_t size); SerializerReference AllocateLargeObject(uint32_t size);
SerializerReference AllocateOffHeapBackingStore(); SerializerReference AllocateOffHeapBackingStore();
void UseCustomChunkSize(uint32_t chunk_size);
#ifdef DEBUG #ifdef DEBUG
bool BackReferenceIsAlreadyAllocated( bool BackReferenceIsAlreadyAllocated(
SerializerReference back_reference) const; SerializerReference back_reference) const;
...@@ -33,13 +35,13 @@ class DefaultSerializerAllocator final { ...@@ -33,13 +35,13 @@ class DefaultSerializerAllocator final {
void OutputStatistics(); void OutputStatistics();
private: private:
uint32_t TargetChunkSize(int space);
static constexpr int kNumberOfPreallocatedSpaces = static constexpr int kNumberOfPreallocatedSpaces =
SerializerDeserializer::kNumberOfPreallocatedSpaces; SerializerDeserializer::kNumberOfPreallocatedSpaces;
static constexpr int kNumberOfSpaces = static constexpr int kNumberOfSpaces =
SerializerDeserializer::kNumberOfSpaces; SerializerDeserializer::kNumberOfSpaces;
static uint32_t MaxChunkSizeInSpace(int space);
// Objects from the same space are put into chunks for bulk-allocation // Objects from the same space are put into chunks for bulk-allocation
// when deserializing. We have to make sure that each chunk fits into a // when deserializing. We have to make sure that each chunk fits into a
// page. So we track the chunk size in pending_chunk_ of a space, but // page. So we track the chunk size in pending_chunk_ of a space, but
...@@ -61,6 +63,8 @@ class DefaultSerializerAllocator final { ...@@ -61,6 +63,8 @@ class DefaultSerializerAllocator final {
// from index 0. // from index 0.
uint32_t seen_backing_stores_index_ = 1; uint32_t seen_backing_stores_index_ = 1;
uint32_t custom_chunk_size_ = 0;
// The current serializer. // The current serializer.
Serializer<DefaultSerializerAllocator>* const serializer_; Serializer<DefaultSerializerAllocator>* const serializer_;
......
...@@ -276,34 +276,38 @@ int Deserializer<AllocatorT>::MaybeReplaceWithDeserializeLazy(int builtin_id) { ...@@ -276,34 +276,38 @@ int Deserializer<AllocatorT>::MaybeReplaceWithDeserializeLazy(int builtin_id) {
template <class AllocatorT> template <class AllocatorT>
HeapObject* Deserializer<AllocatorT>::GetBackReferencedObject(int space) { HeapObject* Deserializer<AllocatorT>::GetBackReferencedObject(int space) {
HeapObject* obj; HeapObject* obj;
SerializerReference back_reference =
SerializerReference::FromBitfield(source_.GetInt());
switch (space) { switch (space) {
case LO_SPACE: case LO_SPACE:
obj = allocator()->GetLargeObject(back_reference.large_object_index()); obj = allocator()->GetLargeObject(source_.GetInt());
break; break;
case MAP_SPACE: case MAP_SPACE:
obj = allocator()->GetMap(back_reference.map_index()); obj = allocator()->GetMap(source_.GetInt());
break; break;
case RO_SPACE: case RO_SPACE: {
uint32_t chunk_index = source_.GetInt();
uint32_t chunk_offset = source_.GetInt();
if (isolate()->heap()->deserialization_complete()) { if (isolate()->heap()->deserialization_complete()) {
PagedSpace* read_only_space = isolate()->heap()->read_only_space(); PagedSpace* read_only_space = isolate()->heap()->read_only_space();
Page* page = read_only_space->first_page(); Page* page = read_only_space->first_page();
for (uint32_t i = 0; i < back_reference.chunk_index(); ++i) { for (uint32_t i = 0; i < chunk_index; ++i) {
page = page->next_page(); page = page->next_page();
} }
Address address = page->OffsetToAddress(back_reference.chunk_offset()); Address address = page->OffsetToAddress(chunk_offset);
obj = HeapObject::FromAddress(address); obj = HeapObject::FromAddress(address);
} else {
obj = allocator()->GetObject(static_cast<AllocationSpace>(space),
chunk_index, chunk_offset);
}
break; break;
} }
V8_FALLTHROUGH; default: {
default: uint32_t chunk_index = source_.GetInt();
uint32_t chunk_offset = source_.GetInt();
obj = allocator()->GetObject(static_cast<AllocationSpace>(space), obj = allocator()->GetObject(static_cast<AllocationSpace>(space),
back_reference.chunk_index(), chunk_index, chunk_offset);
back_reference.chunk_offset());
break; break;
} }
}
if (deserializing_user_code() && obj->IsThinString()) { if (deserializing_user_code() && obj->IsThinString()) {
obj = ThinString::cast(obj)->actual(); obj = ThinString::cast(obj)->actual();
......
...@@ -20,6 +20,7 @@ PartialSerializer::PartialSerializer( ...@@ -20,6 +20,7 @@ PartialSerializer::PartialSerializer(
can_be_rehashed_(true), can_be_rehashed_(true),
context_(nullptr) { context_(nullptr) {
InitializeCodeAddressMap(); InitializeCodeAddressMap();
allocator()->UseCustomChunkSize(FLAG_serialization_chunk_size);
} }
PartialSerializer::~PartialSerializer() { PartialSerializer::~PartialSerializer() {
...@@ -139,7 +140,7 @@ void PartialSerializer::SerializeEmbedderFields() { ...@@ -139,7 +140,7 @@ void PartialSerializer::SerializeEmbedderFields() {
HandleScope scope(isolate()); HandleScope scope(isolate());
Handle<JSObject> obj(embedder_field_holders_.back(), isolate()); Handle<JSObject> obj(embedder_field_holders_.back(), isolate());
embedder_field_holders_.pop_back(); embedder_field_holders_.pop_back();
SerializerReference reference = reference_map()->Lookup(*obj); SerializerReference reference = reference_map()->LookupReference(*obj);
DCHECK(reference.is_back_reference()); DCHECK(reference.is_back_reference());
int embedder_fields_count = obj->GetEmbedderFieldCount(); int embedder_fields_count = obj->GetEmbedderFieldCount();
for (int i = 0; i < embedder_fields_count; i++) { for (int i = 0; i < embedder_fields_count; i++) {
......
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_SNAPSHOT_REFERENCES_H_
#define V8_SNAPSHOT_REFERENCES_H_
#include "src/assert-scope.h"
#include "src/base/hashmap.h"
#include "src/utils.h"
namespace v8 {
namespace internal {
class SerializerReference {
private:
enum SpecialValueType {
kSpecialValue,
kAttachedReference,
kOffHeapBackingStore,
kBuiltinReference,
};
static const int kSpecialValueSpace = LAST_SPACE + 1;
static const int kInvalidValue = 0;
static const int kDummyValue = 1;
SerializerReference(SpecialValueType type, uint32_t value)
: bitfield_(SpaceBits::encode(kSpecialValueSpace) |
SpecialValueTypeBits::encode(type)),
value_(value) {}
public:
SerializerReference() : SerializerReference(kSpecialValue, kInvalidValue) {}
SerializerReference(uint32_t space, uint32_t chunk_index,
uint32_t chunk_offset)
: bitfield_(SpaceBits::encode(space) |
ChunkIndexBits::encode(chunk_index)),
value_(chunk_offset) {}
static SerializerReference BackReference(AllocationSpace space,
uint32_t chunk_index,
uint32_t chunk_offset) {
DCHECK(IsAligned(chunk_offset, kObjectAlignment));
DCHECK_LT(space, LO_SPACE);
return SerializerReference(space, chunk_index, chunk_offset);
}
static SerializerReference MapReference(uint32_t index) {
return SerializerReference(MAP_SPACE, 0, index);
}
static SerializerReference OffHeapBackingStoreReference(uint32_t index) {
return SerializerReference(kOffHeapBackingStore, index);
}
static SerializerReference LargeObjectReference(uint32_t index) {
return SerializerReference(LO_SPACE, 0, index);
}
static SerializerReference AttachedReference(uint32_t index) {
return SerializerReference(kAttachedReference, index);
}
static SerializerReference BuiltinReference(uint32_t index) {
return SerializerReference(kBuiltinReference, index);
}
static SerializerReference DummyReference() {
return SerializerReference(kSpecialValue, kDummyValue);
}
bool is_valid() const {
return SpaceBits::decode(bitfield_) != kSpecialValueSpace ||
SpecialValueTypeBits::decode(bitfield_) != kSpecialValue ||
value_ != kInvalidValue;
}
bool is_back_reference() const {
return SpaceBits::decode(bitfield_) <= LAST_SPACE;
}
AllocationSpace space() const {
DCHECK(is_back_reference());
return static_cast<AllocationSpace>(SpaceBits::decode(bitfield_));
}
uint32_t chunk_offset() const {
DCHECK(is_back_reference());
return value_;
}
uint32_t chunk_index() const {
DCHECK(space() != MAP_SPACE && space() != LO_SPACE);
return ChunkIndexBits::decode(bitfield_);
}
uint32_t map_index() const {
DCHECK_EQ(MAP_SPACE, SpaceBits::decode(bitfield_));
return value_;
}
bool is_off_heap_backing_store_reference() const {
return SpaceBits::decode(bitfield_) == kSpecialValueSpace &&
SpecialValueTypeBits::decode(bitfield_) == kOffHeapBackingStore;
}
uint32_t off_heap_backing_store_index() const {
DCHECK(is_off_heap_backing_store_reference());
return value_;
}
uint32_t large_object_index() const {
DCHECK_EQ(LO_SPACE, SpaceBits::decode(bitfield_));
return value_;
}
bool is_attached_reference() const {
return SpaceBits::decode(bitfield_) == kSpecialValueSpace &&
SpecialValueTypeBits::decode(bitfield_) == kAttachedReference;
}
uint32_t attached_reference_index() const {
DCHECK(is_attached_reference());
return value_;
}
bool is_builtin_reference() const {
return SpaceBits::decode(bitfield_) == kSpecialValueSpace &&
SpecialValueTypeBits::decode(bitfield_) == kBuiltinReference;
}
uint32_t builtin_offset() const {
DCHECK(is_builtin_reference());
return value_;
}
private:
class SpaceBits : public BitField<int, 0, kSpaceTagSize> {};
class ChunkIndexBits
: public BitField<uint32_t, SpaceBits::kNext, 32 - kSpaceTagSize> {};
class SpecialValueTypeBits
: public BitField<SpecialValueType, SpaceBits::kNext,
32 - kSpaceTagSize> {};
// We use two fields to store a reference.
// In case of a normal back reference, the bitfield_ stores the space and
// the chunk index. In case of special references, it uses a special value
// for space and stores the special value type.
uint32_t bitfield_;
// value_ stores either chunk offset or special value.
uint32_t value_;
friend class SerializerReferenceMap;
};
class SerializerReferenceMap
: public base::TemplateHashMapImpl<uintptr_t, SerializerReference,
base::KeyEqualityMatcher<intptr_t>,
base::DefaultAllocationPolicy> {
public:
typedef base::TemplateHashMapEntry<uintptr_t, SerializerReference> Entry;
SerializerReferenceMap() : no_allocation_(), attached_reference_index_(0) {}
SerializerReference LookupReference(void* value) const {
uintptr_t key = Key(value);
Entry* entry = Lookup(key, Hash(key));
if (entry == nullptr) return SerializerReference();
return entry->value;
}
void Add(void* obj, SerializerReference reference) {
DCHECK(reference.is_valid());
DCHECK(!LookupReference(obj).is_valid());
uintptr_t key = Key(obj);
LookupOrInsert(key, Hash(key))->value = reference;
}
SerializerReference AddAttachedReference(void* attached_reference) {
SerializerReference reference =
SerializerReference::AttachedReference(attached_reference_index_++);
Add(attached_reference, reference);
return reference;
}
private:
static inline uintptr_t Key(void* value) {
return reinterpret_cast<uintptr_t>(value);
}
static uint32_t Hash(uintptr_t key) { return static_cast<uint32_t>(key); }
DisallowHeapAllocation no_allocation_;
int attached_reference_index_;
DISALLOW_COPY_AND_ASSIGN(SerializerReferenceMap);
};
} // namespace internal
} // namespace v8
#endif // V8_SNAPSHOT_REFERENCES_H_
...@@ -9,6 +9,7 @@ ...@@ -9,6 +9,7 @@
#include "src/base/bits.h" #include "src/base/bits.h"
#include "src/external-reference-table.h" #include "src/external-reference-table.h"
#include "src/globals.h" #include "src/globals.h"
#include "src/snapshot/references.h"
#include "src/utils.h" #include "src/utils.h"
#include "src/visitors.h" #include "src/visitors.h"
......
...@@ -177,7 +177,7 @@ bool Serializer<AllocatorT>::SerializeBackReference(HeapObject* obj, ...@@ -177,7 +177,7 @@ bool Serializer<AllocatorT>::SerializeBackReference(HeapObject* obj,
HowToCode how_to_code, HowToCode how_to_code,
WhereToPoint where_to_point, WhereToPoint where_to_point,
int skip) { int skip) {
SerializerReference reference = reference_map_.Lookup(obj); SerializerReference reference = reference_map_.LookupReference(obj);
if (!reference.is_valid()) return false; if (!reference.is_valid()) return false;
// Encode the location of an already deserialized object in order to write // Encode the location of an already deserialized object in order to write
// its location into a later object. We can encode the location as an // its location into a later object. We can encode the location as an
...@@ -291,7 +291,21 @@ template <class AllocatorT> ...@@ -291,7 +291,21 @@ template <class AllocatorT>
void Serializer<AllocatorT>::PutBackReference(HeapObject* object, void Serializer<AllocatorT>::PutBackReference(HeapObject* object,
SerializerReference reference) { SerializerReference reference) {
DCHECK(allocator()->BackReferenceIsAlreadyAllocated(reference)); DCHECK(allocator()->BackReferenceIsAlreadyAllocated(reference));
sink_.PutInt(reference.back_reference(), "BackRefValue"); switch (reference.space()) {
case MAP_SPACE:
sink_.PutInt(reference.map_index(), "BackRefChunkOffset");
break;
case LO_SPACE:
sink_.PutInt(reference.large_object_index(), "BackRefChunkOffset");
break;
default:
sink_.PutInt(reference.chunk_index(), "BackRefChunkIndex");
sink_.PutInt(reference.chunk_offset(), "BackRefChunkOffset");
break;
}
hot_objects_.Add(object); hot_objects_.Add(object);
} }
...@@ -406,7 +420,7 @@ template <class AllocatorT> ...@@ -406,7 +420,7 @@ template <class AllocatorT>
int32_t Serializer<AllocatorT>::ObjectSerializer::SerializeBackingStore( int32_t Serializer<AllocatorT>::ObjectSerializer::SerializeBackingStore(
void* backing_store, int32_t byte_length) { void* backing_store, int32_t byte_length) {
SerializerReference reference = SerializerReference reference =
serializer_->reference_map()->Lookup(backing_store); serializer_->reference_map()->LookupReference(backing_store);
// Serialize the off-heap backing store. // Serialize the off-heap backing store.
if (!reference.is_valid()) { if (!reference.is_valid()) {
...@@ -678,7 +692,7 @@ void Serializer<AllocatorT>::ObjectSerializer::SerializeDeferred() { ...@@ -678,7 +692,7 @@ void Serializer<AllocatorT>::ObjectSerializer::SerializeDeferred() {
int size = object_->Size(); int size = object_->Size();
Map* map = object_->map(); Map* map = object_->map();
SerializerReference back_reference = SerializerReference back_reference =
serializer_->reference_map()->Lookup(object_); serializer_->reference_map()->LookupReference(object_);
DCHECK(back_reference.is_back_reference()); DCHECK(back_reference.is_back_reference());
// Serialize the rest of the object. // Serialize the rest of the object.
......
...@@ -141,7 +141,7 @@ class Serializer : public SerializerDeserializer { ...@@ -141,7 +141,7 @@ class Serializer : public SerializerDeserializer {
const std::vector<byte>* Payload() const { return sink_.data(); } const std::vector<byte>* Payload() const { return sink_.data(); }
bool ReferenceMapContains(HeapObject* o) { bool ReferenceMapContains(HeapObject* o) {
return reference_map()->Lookup(o).is_valid(); return reference_map()->LookupReference(o).is_valid();
} }
Isolate* isolate() const { return isolate_; } Isolate* isolate() const { return isolate_; }
...@@ -219,7 +219,7 @@ class Serializer : public SerializerDeserializer { ...@@ -219,7 +219,7 @@ class Serializer : public SerializerDeserializer {
Code* CopyCode(Code* code); Code* CopyCode(Code* code);
void QueueDeferredObject(HeapObject* obj) { void QueueDeferredObject(HeapObject* obj) {
DCHECK(reference_map_.Lookup(obj).is_back_reference()); DCHECK(reference_map_.LookupReference(obj).is_back_reference());
deferred_objects_.push_back(obj); deferred_objects_.push_back(obj);
} }
......
...@@ -260,8 +260,9 @@ v8::StartupData Snapshot::CreateSnapshotBlob( ...@@ -260,8 +260,9 @@ v8::StartupData Snapshot::CreateSnapshotBlob(
reinterpret_cast<const char*>(startup_snapshot->RawData().start()), reinterpret_cast<const char*>(startup_snapshot->RawData().start()),
payload_length); payload_length);
if (FLAG_profile_deserialization) { if (FLAG_profile_deserialization) {
PrintF("Snapshot blob consists of:\n%10d bytes for startup\n", PrintF("Snapshot blob consists of:\n%10d bytes in %d chunks for startup\n",
payload_length); payload_length,
static_cast<uint32_t>(startup_snapshot->Reservations().size()));
} }
payload_offset += payload_length; payload_offset += payload_length;
...@@ -286,7 +287,8 @@ v8::StartupData Snapshot::CreateSnapshotBlob( ...@@ -286,7 +287,8 @@ v8::StartupData Snapshot::CreateSnapshotBlob(
reinterpret_cast<const char*>(context_snapshot->RawData().start()), reinterpret_cast<const char*>(context_snapshot->RawData().start()),
payload_length); payload_length);
if (FLAG_profile_deserialization) { if (FLAG_profile_deserialization) {
PrintF("%10d bytes for context #%d\n", payload_length, i); PrintF("%10d bytes in %d chunks for context #%d\n", payload_length,
static_cast<uint32_t>(context_snapshot->Reservations().size()), i);
} }
payload_offset += payload_length; payload_offset += payload_length;
} }
......
...@@ -299,9 +299,7 @@ static void SanityCheck(v8::Isolate* v8_isolate) { ...@@ -299,9 +299,7 @@ static void SanityCheck(v8::Isolate* v8_isolate) {
isolate->factory()->InternalizeOneByteString(STATIC_CHAR_VECTOR("Empty")); isolate->factory()->InternalizeOneByteString(STATIC_CHAR_VECTOR("Empty"));
} }
UNINITIALIZED_TEST(StartupSerializerOnce) { void TestStartupSerializerOnceImpl() {
DisableLazyDeserialization();
DisableAlwaysOpt();
v8::Isolate* isolate = TestIsolate::NewInitialized(); v8::Isolate* isolate = TestIsolate::NewInitialized();
StartupBlobs blobs = Serialize(isolate); StartupBlobs blobs = Serialize(isolate);
isolate->Dispose(); isolate->Dispose();
...@@ -319,6 +317,40 @@ UNINITIALIZED_TEST(StartupSerializerOnce) { ...@@ -319,6 +317,40 @@ UNINITIALIZED_TEST(StartupSerializerOnce) {
blobs.Dispose(); blobs.Dispose();
} }
UNINITIALIZED_TEST(StartupSerializerOnce) {
DisableLazyDeserialization();
DisableAlwaysOpt();
TestStartupSerializerOnceImpl();
}
UNINITIALIZED_TEST(StartupSerializerOnce32) {
DisableLazyDeserialization();
DisableAlwaysOpt();
FLAG_serialization_chunk_size = 32;
TestStartupSerializerOnceImpl();
}
UNINITIALIZED_TEST(StartupSerializerOnce1K) {
DisableLazyDeserialization();
DisableAlwaysOpt();
FLAG_serialization_chunk_size = 1 * KB;
TestStartupSerializerOnceImpl();
}
UNINITIALIZED_TEST(StartupSerializerOnce4K) {
DisableLazyDeserialization();
DisableAlwaysOpt();
FLAG_serialization_chunk_size = 4 * KB;
TestStartupSerializerOnceImpl();
}
UNINITIALIZED_TEST(StartupSerializerOnce32K) {
DisableLazyDeserialization();
DisableAlwaysOpt();
FLAG_serialization_chunk_size = 32 * KB;
TestStartupSerializerOnceImpl();
}
UNINITIALIZED_TEST(StartupSerializerRootMapDependencies) { UNINITIALIZED_TEST(StartupSerializerRootMapDependencies) {
DisableAlwaysOpt(); DisableAlwaysOpt();
v8::SnapshotCreator snapshot_creator; v8::SnapshotCreator snapshot_creator;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment