Commit 49e3bfd5 authored by Jakob Gruber's avatar Jakob Gruber Committed by Commit Bot

[snapshot] Move builtins to dedicated snapshot area

As a first step towards lazy builtin deserialization, this CL moves
builtins to their own dedicated area in the snapshot blob, physically
located after startup data and before context-specific data.

The startup- and partial serializers now serialize all seen builtins as
references, i.e. they only encode the relevant builtin id (taking care
to preserve special behavior around the interpreter trampoline and
CompileLazy). Builtins are later fully serialized by the
BuiltinSerializer. The separate blobs are finally glued together by
CreateSnapshotBlob.

Deserialization takes the same steps: when we see builtin reference
bytecodes before builtins have been deserialized, we push to a list of
deferred builtin references. After builtin deserialization, this list is
iterated and all builtin references are fixed up.

Bug: v8:6624
Cq-Include-Trybots: master.tryserver.chromium.linux:linux_chromium_rel_ng
Change-Id: Idee42fa9c92bdbe8d5b8c4b8bf3ca9dd39634004
Reviewed-on: https://chromium-review.googlesource.com/610225Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarYang Guo <yangguo@chromium.org>
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#47596}
parent 7571de3a
......@@ -1936,6 +1936,10 @@ v8_source_set("v8_base") {
"src/signature.h",
"src/simulator.h",
"src/small-pointer-list.h",
"src/snapshot/builtin-deserializer.cc",
"src/snapshot/builtin-deserializer.h",
"src/snapshot/builtin-serializer.cc",
"src/snapshot/builtin-serializer.h",
"src/snapshot/code-serializer.cc",
"src/snapshot/code-serializer.h",
"src/snapshot/deserializer.cc",
......
......@@ -62,6 +62,7 @@
#include "src/runtime-profiler.h"
#include "src/runtime/runtime.h"
#include "src/simulator.h"
#include "src/snapshot/builtin-serializer.h"
#include "src/snapshot/code-serializer.h"
#include "src/snapshot/natives.h"
#include "src/snapshot/snapshot.h"
......@@ -733,6 +734,12 @@ StartupData SnapshotCreator::CreateBlob(
context_snapshots.push_back(new i::SnapshotData(&partial_serializer));
}
// Builtin serialization places additional objects into the partial snapshot
// cache and thus needs to happen before SerializeWeakReferencesAndDeferred
// is called below.
i::BuiltinSerializer builtin_serializer(isolate, &startup_serializer);
builtin_serializer.SerializeBuiltins();
startup_serializer.SerializeWeakReferencesAndDeferred();
can_be_rehashed = can_be_rehashed && startup_serializer.can_be_rehashed();
......@@ -743,8 +750,9 @@ StartupData SnapshotCreator::CreateBlob(
#endif // DEBUG
i::SnapshotData startup_snapshot(&startup_serializer);
i::SnapshotData builtin_snapshot(&builtin_serializer);
StartupData result = i::Snapshot::CreateSnapshotBlob(
&startup_snapshot, context_snapshots, can_be_rehashed);
&startup_snapshot, &builtin_snapshot, context_snapshots, can_be_rehashed);
// Delete heap-allocated context snapshot instances.
for (const auto context_snapshot : context_snapshots) {
......
......@@ -1339,8 +1339,11 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) {
if (space == MAP_SPACE) {
// We allocate each map individually to avoid fragmentation.
maps->clear();
DCHECK_EQ(1, reservation->size());
int num_maps = reservation->at(0).size / Map::kSize;
DCHECK_LE(reservation->size(), 2);
int reserved_size = 0;
for (const Chunk& c : *reservation) reserved_size += c.size;
DCHECK_EQ(0, reserved_size % Map::kSize);
int num_maps = reserved_size / Map::kSize;
for (int i = 0; i < num_maps; i++) {
// The deserializer will update the skip list.
AllocationResult allocation = map_space()->AllocateRawUnaligned(
......@@ -1360,8 +1363,10 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) {
}
} else if (space == LO_SPACE) {
// Just check that we can allocate during deserialization.
DCHECK_EQ(1, reservation->size());
perform_gc = !CanExpandOldGeneration(reservation->at(0).size);
DCHECK_LE(reservation->size(), 2);
int reserved_size = 0;
for (const Chunk& c : *reservation) reserved_size += c.size;
perform_gc = !CanExpandOldGeneration(reserved_size);
} else {
for (auto& chunk : *reservation) {
AllocationResult allocation;
......
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/snapshot/builtin-deserializer.h"
#include "src/snapshot/snapshot.h"
namespace v8 {
namespace internal {
void BuiltinDeserializer::DeserializeAllBuiltins() {
DCHECK(!AllowHeapAllocation::IsAllowed());
isolate()->builtins()->IterateBuiltins(this);
PostProcessDeferredBuiltinReferences();
}
} // namespace internal
} // namespace v8
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_SNAPSHOT_BUILTIN_DESERIALIZER_H_
#define V8_SNAPSHOT_BUILTIN_DESERIALIZER_H_
#include "src/snapshot/deserializer.h"
namespace v8 {
namespace internal {
class SnapshotData;
// Deserializes the builtins blob.
class BuiltinDeserializer final : public Deserializer {
public:
explicit BuiltinDeserializer(const SnapshotData* data)
: Deserializer(data, false) {}
// Expose Deserializer::Initialize.
using Deserializer::Initialize;
// Builtins deserialization is tightly integrated with deserialization of the
// startup blob. In particular, we need to ensure that no GC can occur
// between startup- and builtins deserialization, as all existing builtin
// references need to be fixed up after builtins have been deserialized.
// Thus this quirky two-sided API: required memory needs to be reserved
// pre-startup deserialization, and builtins must be deserialized at exactly
// the right point during startup deserialization.
void DeserializeAllBuiltins();
};
} // namespace internal
} // namespace v8
#endif // V8_SNAPSHOT_BUILTIN_DESERIALIZER_H_
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/snapshot/builtin-serializer.h"
#include "src/objects-inl.h"
#include "src/snapshot/startup-serializer.h"
namespace v8 {
namespace internal {
BuiltinSerializer::BuiltinSerializer(Isolate* isolate,
StartupSerializer* startup_serializer)
: Serializer(isolate), startup_serializer_(startup_serializer) {}
BuiltinSerializer::~BuiltinSerializer() {
OutputStatistics("BuiltinSerializer");
}
void BuiltinSerializer::SerializeBuiltins() {
isolate()->builtins()->IterateBuiltins(this);
}
void BuiltinSerializer::VisitRootPointers(Root root, Object** start,
Object** end) {
for (Object** current = start; current < end; current++) {
Code* code = Code::cast(*current);
SerializeBuiltin(code);
}
Pad(); // Pad with kNop since GetInt() might read too far.
}
void BuiltinSerializer::SerializeBuiltin(Code* code) {
DCHECK_GE(code->builtin_index(), 0);
// All builtins are serialized unconditionally when the respective builtin is
// reached while iterating the builtins list. A builtin seen at any other
// time (e.g. startup snapshot creation, or while iterating a builtin code
// object during builtin serialization) is serialized by reference - see
// BuiltinSerializer::SerializeObject below.
ObjectSerializer object_serializer(this, code, &sink_, kPlain,
kStartOfObject);
object_serializer.Serialize();
}
void BuiltinSerializer::SerializeObject(HeapObject* o, HowToCode how_to_code,
WhereToPoint where_to_point, int skip) {
DCHECK(!o->IsSmi());
// Roots can simply be serialized as root references.
int root_index = root_index_map_.Lookup(o);
if (root_index != RootIndexMap::kInvalidRootIndex) {
DCHECK(startup_serializer_->root_has_been_serialized(root_index));
PutRoot(root_index, o, how_to_code, where_to_point, skip);
return;
}
// Builtins are serialized using a dedicated bytecode. We only reach this
// point if encountering a Builtin e.g. while iterating the body of another
// builtin.
if (SerializeBuiltinReference(o, how_to_code, where_to_point, skip)) return;
// Embedded objects are serialized as part of the partial snapshot cache.
// Currently we expect to see:
// * Code: Jump targets.
// * ByteArrays: Relocation infos.
// * FixedArrays: Handler tables.
// * Strings: CSA_ASSERTs in debug builds, various other string constants.
// * HeapNumbers: Embedded constants.
// TODO(6624): Jump targets should never trigger content serialization, it
// should always result in a reference instead. Reloc infos and handler
// tables should not end up in the partial snapshot cache.
FlushSkip(skip);
int cache_index = startup_serializer_->PartialSnapshotCacheIndex(o);
sink_.Put(kPartialSnapshotCache + how_to_code + where_to_point,
"PartialSnapshotCache");
sink_.PutInt(cache_index, "partial_snapshot_cache_index");
}
} // namespace internal
} // namespace v8
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_SNAPSHOT_BUILTIN_SERIALIZER_H_
#define V8_SNAPSHOT_BUILTIN_SERIALIZER_H_
#include "src/snapshot/serializer.h"
namespace v8 {
namespace internal {
class StartupSerializer;
// Responsible for serializing all builtin objects during startup snapshot
// creation. Builtins are serialized into a dedicated area of the snapshot.
// See snapshot.h for documentation of the snapshot layout.
class BuiltinSerializer : public Serializer {
public:
BuiltinSerializer(Isolate* isolate, StartupSerializer* startup_serializer);
~BuiltinSerializer() override;
void SerializeBuiltins();
private:
void VisitRootPointers(Root root, Object** start, Object** end) override;
void SerializeBuiltin(Code* code);
void SerializeObject(HeapObject* o, HowToCode how_to_code,
WhereToPoint where_to_point, int skip) override;
StartupSerializer* startup_serializer_;
DISALLOW_COPY_AND_ASSIGN(BuiltinSerializer);
};
} // namespace internal
} // namespace v8
#endif // V8_SNAPSHOT_BUILTIN_SERIALIZER_H_
......@@ -84,8 +84,7 @@ void CodeSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
case Code::BYTECODE_HANDLER: // No direct references to handlers.
CHECK(false);
case Code::BUILTIN:
SerializeBuiltin(code_object->builtin_index(), how_to_code,
where_to_point);
SerializeBuiltinReference(code_object, how_to_code, where_to_point, 0);
return;
case Code::STUB:
#define IC_KIND_CASE(KIND) case Code::KIND:
......@@ -94,8 +93,8 @@ void CodeSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
if (code_object->builtin_index() == -1) {
SerializeCodeStub(code_object, how_to_code, where_to_point);
} else {
SerializeBuiltin(code_object->builtin_index(), how_to_code,
where_to_point);
SerializeBuiltinReference(code_object, how_to_code, where_to_point,
0);
}
return;
case Code::FUNCTION:
......@@ -139,22 +138,6 @@ void CodeSerializer::SerializeGeneric(HeapObject* heap_object,
serializer.Serialize();
}
void CodeSerializer::SerializeBuiltin(int builtin_index, HowToCode how_to_code,
WhereToPoint where_to_point) {
DCHECK((how_to_code == kPlain && where_to_point == kStartOfObject) ||
(how_to_code == kFromCode && where_to_point == kInnerPointer));
DCHECK_LT(builtin_index, Builtins::builtin_count);
DCHECK_LE(0, builtin_index);
if (FLAG_trace_serializer) {
PrintF(" Encoding builtin: %s\n",
isolate()->builtins()->name(builtin_index));
}
sink_.Put(kBuiltin + how_to_code + where_to_point, "Builtin");
sink_.PutInt(builtin_index, "builtin_index");
}
void CodeSerializer::SerializeCodeStub(Code* code_stub, HowToCode how_to_code,
WhereToPoint where_to_point) {
// We only arrive here if we have not encountered this code stub before.
......@@ -283,7 +266,8 @@ void WasmCompiledModuleSerializer::SerializeCodeObject(
case Code::WASM_TO_JS_FUNCTION:
// Serialize the illegal builtin instead. On instantiation of a
// deserialized module, these will be replaced again.
SerializeBuiltin(Builtins::kIllegal, how_to_code, where_to_point);
SerializeBuiltinReference(*BUILTIN_CODE(isolate(), Illegal), how_to_code,
where_to_point, 0);
break;
default:
UNREACHABLE();
......
......@@ -39,8 +39,6 @@ class CodeSerializer : public Serializer {
virtual bool ElideObject(Object* obj) { return false; }
void SerializeGeneric(HeapObject* heap_object, HowToCode how_to_code,
WhereToPoint where_to_point);
void SerializeBuiltin(int builtin_index, HowToCode how_to_code,
WhereToPoint where_to_point);
private:
void SerializeObject(HeapObject* o, HowToCode how_to_code,
......
......@@ -13,7 +13,9 @@
#include "src/isolate.h"
#include "src/macro-assembler.h"
#include "src/objects-inl.h"
#include "src/snapshot/builtin-deserializer.h"
#include "src/snapshot/natives.h"
#include "src/snapshot/startup-deserializer.h"
#include "src/v8.h"
#include "src/v8threads.h"
......@@ -53,6 +55,77 @@ bool Deserializer::ReserveSpace() {
return true;
}
// static
bool Deserializer::ReserveSpace(StartupDeserializer* lhs,
BuiltinDeserializer* rhs) {
const int first_space = NEW_SPACE;
const int last_space = SerializerDeserializer::kNumberOfSpaces;
Isolate* isolate = lhs->isolate();
// Merge reservations to reserve space in one go.
Heap::Reservation merged_reservations[kNumberOfSpaces];
for (int i = first_space; i < last_space; i++) {
Heap::Reservation& r = merged_reservations[i];
Heap::Reservation& lhs_r = lhs->reservations_[i];
Heap::Reservation& rhs_r = rhs->reservations_[i];
DCHECK(!lhs_r.empty());
DCHECK(!rhs_r.empty());
r.insert(r.end(), lhs_r.begin(), lhs_r.end());
r.insert(r.end(), rhs_r.begin(), rhs_r.end());
}
std::vector<Address> merged_allocated_maps;
if (!isolate->heap()->ReserveSpace(merged_reservations,
&merged_allocated_maps)) {
return false;
}
// Distribute the successful allocations between both deserializers.
// There's nothing to be done here except for map space.
{
Heap::Reservation& lhs_r = lhs->reservations_[MAP_SPACE];
Heap::Reservation& rhs_r = rhs->reservations_[MAP_SPACE];
DCHECK_EQ(1, lhs_r.size());
DCHECK_EQ(1, rhs_r.size());
const int lhs_num_maps = lhs_r[0].size / Map::kSize;
const int rhs_num_maps = rhs_r[0].size / Map::kSize;
DCHECK_EQ(merged_allocated_maps.size(), lhs_num_maps + rhs_num_maps);
{
std::vector<Address>& dst = lhs->allocated_maps_;
DCHECK(dst.empty());
auto it = merged_allocated_maps.begin();
dst.insert(dst.end(), it, it + lhs_num_maps);
}
{
std::vector<Address>& dst = rhs->allocated_maps_;
DCHECK(dst.empty());
auto it = merged_allocated_maps.begin() + lhs_num_maps;
dst.insert(dst.end(), it, it + rhs_num_maps);
}
}
for (int i = first_space; i < last_space; i++) {
Heap::Reservation& r = merged_reservations[i];
Heap::Reservation& lhs_r = lhs->reservations_[i];
Heap::Reservation& rhs_r = rhs->reservations_[i];
const int lhs_num_reservations = static_cast<int>(lhs_r.size());
lhs_r.clear();
lhs_r.insert(lhs_r.end(), r.begin(), r.begin() + lhs_num_reservations);
rhs_r.clear();
rhs_r.insert(rhs_r.end(), r.begin() + lhs_num_reservations, r.end());
}
for (int i = first_space; i < kNumberOfPreallocatedSpaces; i++) {
lhs->high_water_[i] = lhs->reservations_[i][0].start;
rhs->high_water_[i] = rhs->reservations_[i][0].start;
}
return true;
}
void Deserializer::Initialize(Isolate* isolate) {
DCHECK_NULL(isolate_);
DCHECK_NOT_NULL(isolate);
......@@ -153,6 +226,35 @@ uint32_t StringTableInsertionKey::ComputeHashField(String* string) {
return string->hash_field();
}
void Deserializer::PostProcessDeferredBuiltinReferences() {
for (const DeferredBuiltinReference& ref : builtin_references_) {
DCHECK((ref.bytecode & kWhereMask) == kBuiltin);
const byte how = ref.bytecode & kHowToCodeMask;
const byte within = ref.bytecode & kWhereToPointMask;
Object* new_object = isolate()->builtins()->builtin(ref.builtin_name);
DCHECK(new_object->IsCode());
if (within == kInnerPointer) {
DCHECK(how == kFromCode);
Code* new_code_object = Code::cast(new_object);
new_object =
reinterpret_cast<Object*>(new_code_object->instruction_start());
}
if (how == kFromCode) {
Code* code = Code::cast(HeapObject::FromAddress(ref.current_object));
Assembler::deserialization_set_special_target_at(
isolate(), reinterpret_cast<Address>(ref.target_addr), code,
reinterpret_cast<Address>(new_object));
} else {
// TODO(jgruber): We could save one ptr-size per kPlain entry by using
// a separate struct for kPlain and kFromCode deferred references.
UnalignedCopy(ref.target_addr, &new_object);
}
}
}
HeapObject* Deserializer::PostProcessNewObject(HeapObject* obj, int space) {
if (deserializing_user_code()) {
if (obj->IsString()) {
......@@ -457,6 +559,7 @@ bool Deserializer::ReadData(Object** current, Object** limit, int source_space,
// Find an object in the partial snapshots cache and write a pointer to it
// to the current object.
SINGLE_CASE(kPartialSnapshotCache, kPlain, kStartOfObject, 0)
SINGLE_CASE(kPartialSnapshotCache, kFromCode, kInnerPointer, 0)
// Find an external reference and write a pointer to it to the current
// object.
SINGLE_CASE(kExternalReference, kPlain, kStartOfObject, 0)
......@@ -647,6 +750,15 @@ bool Deserializer::ReadData(Object** current, Object** limit, int source_space,
return true;
}
namespace {
Object* MagicPointer() {
// Returns a pointer to this static variable to mark builtin references that
// have not yet been post-processed.
static uint64_t magic = 0xfefefefefefefefe;
return reinterpret_cast<Object*>(&magic);
}
} // namespace
template <int where, int how, int within, int space_number_if_any>
Object** Deserializer::ReadDataCase(Isolate* isolate, Object** current,
Address current_object_address, byte data,
......@@ -694,17 +806,23 @@ Object** Deserializer::ReadDataCase(Isolate* isolate, Object** current,
emit_write_barrier = isolate->heap()->InNewSpace(new_object);
} else {
DCHECK(where == kBuiltin);
DCHECK(deserializing_user_code());
int builtin_id = source_.GetInt();
DCHECK_LE(0, builtin_id);
DCHECK_LT(builtin_id, Builtins::builtin_count);
Builtins::Name name = static_cast<Builtins::Name>(builtin_id);
new_object = isolate->builtins()->builtin(name);
// Record the builtin reference for post-processing after builtin
// deserialization, and replace new_object with a magic byte marker.
builtin_references_.emplace_back(data, name, current,
current_object_address);
if (new_object == nullptr) new_object = MagicPointer();
emit_write_barrier = false;
}
if (within == kInnerPointer) {
DCHECK(how == kFromCode);
if (new_object->IsCode()) {
if (new_object == MagicPointer()) {
DCHECK(where == kBuiltin);
} else if (new_object->IsCode()) {
Code* new_code_object = Code::cast(new_object);
new_object =
reinterpret_cast<Object*>(new_code_object->instruction_start());
......
......@@ -25,7 +25,9 @@ namespace internal {
#define V8_CODE_EMBEDS_OBJECT_POINTER 0
#endif
class BuiltinDeserializer;
class Heap;
class StartupDeserializer;
// A Deserializer reads a snapshot and reconstructs the Object graph it defines.
class Deserializer : public SerializerDeserializer {
......@@ -60,8 +62,14 @@ class Deserializer : public SerializerDeserializer {
off_heap_backing_stores_.push_back(nullptr);
}
void Initialize(Isolate* isolate);
bool ReserveSpace();
// Atomically reserves space for the two given deserializers. Guarantees
// reservation for both without garbage collection in-between.
// TODO(jgruber): Replace this with advance builtin handle allocation.
static bool ReserveSpace(StartupDeserializer* lhs, BuiltinDeserializer* rhs);
void Initialize(Isolate* isolate);
void DeserializeDeferredObjects();
void RegisterDeserializedObjectsForBlackAllocation();
......@@ -72,6 +80,10 @@ class Deserializer : public SerializerDeserializer {
// Sort descriptors of deserialized maps using new string hashes.
void SortMapDescriptors();
// Fix up all recorded pointers to builtins once builtins have been
// deserialized.
void PostProcessDeferredBuiltinReferences();
Isolate* isolate() const { return isolate_; }
SnapshotByteSource* source() { return &source_; }
const std::vector<Code*>& new_code_objects() const {
......@@ -111,6 +123,23 @@ class Deserializer : public SerializerDeserializer {
next_alignment_ = static_cast<AllocationAlignment>(alignment);
}
// Builtin references are collected during initial deserialization and later
// iterated and filled in with the correct addresses after builtins have
// themselves been deserialized.
struct DeferredBuiltinReference {
byte bytecode;
Builtins::Name builtin_name;
Object** target_addr;
Address current_object;
DeferredBuiltinReference(byte bytecode, Builtins::Name builtin_name,
Object** target_addr, Address current_object)
: bytecode(bytecode),
builtin_name(builtin_name),
target_addr(target_addr),
current_object(current_object) {}
};
// Fills in some heap data in an area from start to end (non-inclusive). The
// space id is used for the write barrier. The object_address is the address
// of the object we are writing into, or NULL if we are not writing into an
......@@ -161,6 +190,7 @@ class Deserializer : public SerializerDeserializer {
std::vector<Handle<Script>> new_scripts_;
std::vector<TransitionArray*> transition_arrays_;
std::vector<byte*> off_heap_backing_stores_;
std::vector<DeferredBuiltinReference> builtin_references_;
const bool deserializing_user_code_;
......
......@@ -35,15 +35,13 @@ void PartialSerializer::Serialize(Object** o, bool include_global_proxy) {
// and it's next context pointer may point to the code-stub context. Clear
// it before serializing, it will get re-added to the context list
// explicitly when it's loaded.
context->set(Context::NEXT_CONTEXT_LINK,
isolate_->heap()->undefined_value());
context->set(Context::NEXT_CONTEXT_LINK, isolate_->heap()->undefined_value());
DCHECK(!context->global_object()->IsUndefined(context->GetIsolate()));
// Reset math random cache to get fresh random numbers.
context->set_math_random_index(Smi::kZero);
context->set_math_random_cache(isolate_->heap()->undefined_value());
DCHECK_NULL(rehashable_global_dictionary_);
rehashable_global_dictionary_ =
context->global_object()->global_dictionary();
rehashable_global_dictionary_ = context->global_object()->global_dictionary();
VisitRootPointer(Root::kPartialSnapshotCache, o);
SerializeDeferredObjects();
......@@ -59,6 +57,12 @@ void PartialSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
DCHECK(Map::cast(obj)->code_cache() == obj->GetHeap()->empty_fixed_array());
}
BuiltinReferenceSerializationMode mode =
startup_serializer_->clear_function_code() ? kCanonicalizeCompileLazy
: kDefault;
if (SerializeBuiltinReference(obj, how_to_code, where_to_point, skip, mode)) {
return;
}
if (SerializeHotObject(obj, how_to_code, where_to_point, skip)) return;
int root_index = root_index_map_.Lookup(obj);
......
......@@ -217,6 +217,38 @@ bool Serializer::SerializeBackReference(HeapObject* obj, HowToCode how_to_code,
return true;
}
bool Serializer::SerializeBuiltinReference(
HeapObject* obj, HowToCode how_to_code, WhereToPoint where_to_point,
int skip, BuiltinReferenceSerializationMode mode) {
DCHECK((how_to_code == kPlain && where_to_point == kStartOfObject) ||
(how_to_code == kFromCode && where_to_point == kInnerPointer));
if (!obj->IsCode()) return false;
Code* code = Code::cast(obj);
int builtin_index = code->builtin_index();
if (builtin_index < 0) return false;
DCHECK_LT(builtin_index, Builtins::builtin_count);
DCHECK_LE(0, builtin_index);
if (mode == kCanonicalizeCompileLazy &&
code->is_interpreter_trampoline_builtin()) {
builtin_index = static_cast<int>(Builtins::kCompileLazy);
}
if (FLAG_trace_serializer) {
PrintF(" Encoding builtin reference: %s\n",
isolate()->builtins()->name(builtin_index));
}
FlushSkip(skip);
sink_.Put(kBuiltin + how_to_code + where_to_point, "Builtin");
sink_.PutInt(builtin_index, "builtin_index");
return true;
}
void Serializer::PutRoot(int root_index, HeapObject* object,
SerializerDeserializer::HowToCode how_to_code,
SerializerDeserializer::WhereToPoint where_to_point,
......
......@@ -183,6 +183,18 @@ class Serializer : public SerializerDeserializer {
bool SerializeBackReference(HeapObject* obj, HowToCode how_to_code,
WhereToPoint where_to_point, int skip);
// Determines whether the interpreter trampoline is replaced by CompileLazy.
enum BuiltinReferenceSerializationMode {
kDefault,
kCanonicalizeCompileLazy,
};
// Returns true if the object was successfully serialized as a builtin
// reference.
bool SerializeBuiltinReference(
HeapObject* obj, HowToCode how_to_code, WhereToPoint where_to_point,
int skip, BuiltinReferenceSerializationMode mode = kDefault);
inline void FlushSkip(int skip) {
if (skip != 0) {
sink_.Put(kSkip, "SkipFromSerializeObject");
......
......@@ -19,7 +19,7 @@ namespace v8 {
namespace internal {
#ifdef DEBUG
bool Snapshot::SnapshotIsValid(v8::StartupData* snapshot_blob) {
bool Snapshot::SnapshotIsValid(const v8::StartupData* snapshot_blob) {
return Snapshot::ExtractNumContexts(snapshot_blob) > 0;
}
#endif // DEBUG
......@@ -40,8 +40,11 @@ bool Snapshot::Initialize(Isolate* isolate) {
const v8::StartupData* blob = isolate->snapshot_blob();
Vector<const byte> startup_data = ExtractStartupData(blob);
SnapshotData snapshot_data(startup_data);
StartupDeserializer deserializer(&snapshot_data);
SnapshotData startup_snapshot_data(startup_data);
Vector<const byte> builtin_data = ExtractBuiltinData(blob);
SnapshotData builtin_snapshot_data(builtin_data);
StartupDeserializer deserializer(&startup_snapshot_data,
&builtin_snapshot_data);
deserializer.SetRehashability(ExtractRehashability(blob));
bool success = isolate->Init(&deserializer);
if (FLAG_profile_deserialization) {
......@@ -82,7 +85,7 @@ MaybeHandle<Context> Snapshot::NewContextFromSnapshot(
}
void ProfileDeserialization(
const SnapshotData* startup_snapshot,
const SnapshotData* startup_snapshot, const SnapshotData* builtin_snapshot,
const std::vector<SnapshotData*>& context_snapshots) {
if (FLAG_profile_deserialization) {
int startup_total = 0;
......@@ -90,6 +93,9 @@ void ProfileDeserialization(
for (const auto& reservation : startup_snapshot->Reservations()) {
startup_total += reservation.chunk_size();
}
for (const auto& reservation : builtin_snapshot->Reservations()) {
startup_total += reservation.chunk_size();
}
PrintF("%10d bytes per isolate\n", startup_total);
for (size_t i = 0; i < context_snapshots.size(); i++) {
int context_total = 0;
......@@ -102,22 +108,25 @@ void ProfileDeserialization(
}
v8::StartupData Snapshot::CreateSnapshotBlob(
const SnapshotData* startup_snapshot,
const SnapshotData* startup_snapshot, const SnapshotData* builtin_snapshot,
const std::vector<SnapshotData*>& context_snapshots, bool can_be_rehashed) {
uint32_t num_contexts = static_cast<uint32_t>(context_snapshots.size());
uint32_t startup_snapshot_offset = StartupSnapshotOffset(num_contexts);
uint32_t total_length = startup_snapshot_offset;
total_length += static_cast<uint32_t>(startup_snapshot->RawData().length());
total_length += static_cast<uint32_t>(builtin_snapshot->RawData().length());
for (const auto context_snapshot : context_snapshots) {
total_length += static_cast<uint32_t>(context_snapshot->RawData().length());
}
ProfileDeserialization(startup_snapshot, context_snapshots);
ProfileDeserialization(startup_snapshot, builtin_snapshot, context_snapshots);
char* data = new char[total_length];
SetHeaderValue(data, kNumberOfContextsOffset, num_contexts);
SetHeaderValue(data, kRehashabilityOffset, can_be_rehashed ? 1 : 0);
uint32_t payload_offset = StartupSnapshotOffset(num_contexts);
// Startup snapshot (isolate-specific data).
uint32_t payload_offset = startup_snapshot_offset;
uint32_t payload_length =
static_cast<uint32_t>(startup_snapshot->RawData().length());
CopyBytes(data + payload_offset,
......@@ -128,6 +137,19 @@ v8::StartupData Snapshot::CreateSnapshotBlob(
payload_length);
}
payload_offset += payload_length;
// Builtins.
SetHeaderValue(data, kBuiltinOffsetOffset, payload_offset);
payload_length = builtin_snapshot->RawData().length();
CopyBytes(data + payload_offset,
reinterpret_cast<const char*>(builtin_snapshot->RawData().start()),
payload_length);
if (FLAG_profile_deserialization) {
PrintF("%10d bytes for builtins\n", payload_length);
}
payload_offset += payload_length;
// Partial snapshots (context-specific data).
for (uint32_t i = 0; i < num_contexts; i++) {
SetHeaderValue(data, ContextSnapshotOffsetOffset(i), payload_offset);
SnapshotData* context_snapshot = context_snapshots[i];
......@@ -143,6 +165,7 @@ v8::StartupData Snapshot::CreateSnapshotBlob(
}
v8::StartupData result = {data, static_cast<int>(total_length)};
DCHECK_EQ(total_length, payload_offset);
return result;
}
......@@ -171,14 +194,31 @@ Vector<const byte> Snapshot::ExtractStartupData(const v8::StartupData* data) {
uint32_t num_contexts = ExtractNumContexts(data);
uint32_t startup_offset = StartupSnapshotOffset(num_contexts);
CHECK_LT(startup_offset, data->raw_size);
uint32_t first_context_offset = ExtractContextOffset(data, 0);
CHECK_LT(first_context_offset, data->raw_size);
uint32_t startup_length = first_context_offset - startup_offset;
uint32_t builtin_offset = GetHeaderValue(data, kBuiltinOffsetOffset);
CHECK_LT(builtin_offset, data->raw_size);
CHECK_GT(builtin_offset, startup_offset);
uint32_t startup_length = builtin_offset - startup_offset;
const byte* startup_data =
reinterpret_cast<const byte*>(data->data + startup_offset);
return Vector<const byte>(startup_data, startup_length);
}
Vector<const byte> Snapshot::ExtractBuiltinData(const v8::StartupData* data) {
DCHECK(SnapshotIsValid(data));
uint32_t from_offset = GetHeaderValue(data, kBuiltinOffsetOffset);
CHECK_LT(from_offset, data->raw_size);
uint32_t to_offset = GetHeaderValue(data, ContextSnapshotOffsetOffset(0));
CHECK_LT(to_offset, data->raw_size);
CHECK_GT(to_offset, from_offset);
uint32_t length = to_offset - from_offset;
const byte* builtin_data =
reinterpret_cast<const byte*>(data->data + from_offset);
return Vector<const byte>(builtin_data, length);
}
Vector<const byte> Snapshot::ExtractContextData(const v8::StartupData* data,
uint32_t index) {
uint32_t num_contexts = ExtractNumContexts(data);
......
......@@ -75,11 +75,12 @@ class Snapshot : public AllStatic {
static v8::StartupData CreateSnapshotBlob(
const SnapshotData* startup_snapshot,
const SnapshotData* builtin_snapshot,
const std::vector<SnapshotData*>& context_snapshots,
bool can_be_rehashed);
#ifdef DEBUG
static bool SnapshotIsValid(v8::StartupData* snapshot_blob);
static bool SnapshotIsValid(const v8::StartupData* snapshot_blob);
#endif // DEBUG
private:
......@@ -88,6 +89,7 @@ class Snapshot : public AllStatic {
uint32_t index);
static bool ExtractRehashability(const v8::StartupData* data);
static Vector<const byte> ExtractStartupData(const v8::StartupData* data);
static Vector<const byte> ExtractBuiltinData(const v8::StartupData* data);
static Vector<const byte> ExtractContextData(const v8::StartupData* data,
uint32_t index);
......@@ -101,11 +103,13 @@ class Snapshot : public AllStatic {
// Snapshot blob layout:
// [0] number of contexts N
// [1] rehashability
// [2] offset to context 0
// [3] offset to context 1
// [2] offset to builtins
// [3] offset to context 0
// [4] offset to context 1
// ...
// ... offset to context N - 1
// ... startup snapshot data
// ... builtin snapshot data
// ... context 0 snapshot data
// ... context 1 snapshot data
......@@ -113,8 +117,9 @@ class Snapshot : public AllStatic {
// TODO(yangguo): generalize rehashing, and remove this flag.
static const uint32_t kRehashabilityOffset =
kNumberOfContextsOffset + kUInt32Size;
static const int kBuiltinOffsetOffset = kRehashabilityOffset + kUInt32Size;
static const uint32_t kFirstContextOffsetOffset =
kRehashabilityOffset + kUInt32Size;
kBuiltinOffsetOffset + kUInt32Size;
static uint32_t StartupSnapshotOffset(int num_contexts) {
return kFirstContextOffsetOffset + num_contexts * kInt32Size;
......
......@@ -6,6 +6,7 @@
#include "src/assembler-inl.h"
#include "src/heap/heap-inl.h"
#include "src/snapshot/builtin-deserializer.h"
#include "src/snapshot/snapshot.h"
namespace v8 {
......@@ -13,7 +14,13 @@ namespace internal {
void StartupDeserializer::DeserializeInto(Isolate* isolate) {
Initialize(isolate);
if (!ReserveSpace()) V8::FatalProcessOutOfMemory("StartupDeserializer");
BuiltinDeserializer builtin_deserializer(builtin_data_);
builtin_deserializer.Initialize(isolate);
if (!Deserializer::ReserveSpace(this, &builtin_deserializer)) {
V8::FatalProcessOutOfMemory("StartupDeserializer");
}
// No active threads.
DCHECK_NULL(isolate->thread_manager()->FirstThreadStateInUse());
......@@ -26,6 +33,7 @@ void StartupDeserializer::DeserializeInto(Isolate* isolate) {
{
DisallowHeapAllocation no_gc;
isolate->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG_ROOT_LIST);
isolate->heap()->IterateSmiRoots(this);
isolate->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
......@@ -34,6 +42,11 @@ void StartupDeserializer::DeserializeInto(Isolate* isolate) {
DeserializeDeferredObjects();
FlushICacheForNewIsolate();
RestoreExternalReferenceRedirectors(accessor_infos());
// Eagerly deserialize all builtins from the builtin snapshot.
// TODO(6624): Deserialize lazily.
builtin_deserializer.DeserializeAllBuiltins();
PostProcessDeferredBuiltinReferences();
}
isolate->heap()->set_native_contexts_list(isolate->heap()->undefined_value());
......
......@@ -14,8 +14,9 @@ namespace internal {
// Initializes an isolate with context-independent data from a given snapshot.
class StartupDeserializer final : public Deserializer {
public:
explicit StartupDeserializer(const SnapshotData* data)
: Deserializer(data, false) {}
StartupDeserializer(const SnapshotData* startup_data,
const SnapshotData* builtin_data)
: Deserializer(startup_data, false), builtin_data_(builtin_data) {}
// Deserialize the snapshot into an empty heap.
void DeserializeInto(Isolate* isolate);
......@@ -26,6 +27,8 @@ class StartupDeserializer final : public Deserializer {
// Rehash after deserializing an isolate.
void Rehash();
const SnapshotData* builtin_data_;
};
} // namespace internal
......
......@@ -30,29 +30,24 @@ void StartupSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
WhereToPoint where_to_point, int skip) {
DCHECK(!obj->IsJSFunction());
if (clear_function_code_) {
if (obj->IsCode()) {
Code* code = Code::cast(obj);
// If the function code is compiled (either as native code or bytecode),
// replace it with lazy-compile builtin. Only exception is when we are
// serializing the canonical interpreter-entry-trampoline builtin.
if (code->kind() == Code::FUNCTION ||
(!serializing_builtins_ &&
code->is_interpreter_trampoline_builtin())) {
obj = isolate()->builtins()->builtin(Builtins::kCompileLazy);
}
} else if (obj->IsBytecodeArray()) {
obj = isolate()->heap()->undefined_value();
}
if (clear_function_code() && obj->IsBytecodeArray()) {
obj = isolate()->heap()->undefined_value();
}
BuiltinReferenceSerializationMode mode =
(clear_function_code() && !serializing_builtins_)
? kCanonicalizeCompileLazy
: kDefault;
if (SerializeBuiltinReference(obj, how_to_code, where_to_point, skip, mode)) {
return;
}
if (SerializeHotObject(obj, how_to_code, where_to_point, skip)) return;
int root_index = root_index_map_.Lookup(obj);
// We can only encode roots as such if it has already been serialized.
// That applies to root indices below the wave front.
if (root_index != RootIndexMap::kInvalidRootIndex) {
if (root_has_been_serialized_.test(root_index)) {
if (root_has_been_serialized(root_index)) {
PutRoot(root_index, obj, how_to_code, where_to_point, skip);
return;
}
......
......@@ -30,6 +30,10 @@ class StartupSerializer : public Serializer {
int PartialSnapshotCacheIndex(HeapObject* o);
bool can_be_rehashed() const { return can_be_rehashed_; }
bool clear_function_code() const { return clear_function_code_; }
bool root_has_been_serialized(int root_index) const {
return root_has_been_serialized_.test(root_index);
}
private:
class PartialCacheIndexMap {
......@@ -73,7 +77,7 @@ class StartupSerializer : public Serializer {
void CheckRehashability(HeapObject* hashtable);
bool clear_function_code_;
const bool clear_function_code_;
bool serializing_builtins_;
bool serializing_immortal_immovables_roots_;
std::bitset<Heap::kStrongRootListLength> root_has_been_serialized_;
......
......@@ -1334,6 +1334,10 @@
'signature.h',
'simulator.h',
'small-pointer-list.h',
'snapshot/builtin-deserializer.cc',
'snapshot/builtin-deserializer.h',
'snapshot/builtin-serializer.cc',
'snapshot/builtin-serializer.h',
'snapshot/code-serializer.cc',
'snapshot/code-serializer.h',
'snapshot/deserializer.cc',
......
......@@ -41,6 +41,8 @@
#include "src/macro-assembler-inl.h"
#include "src/objects-inl.h"
#include "src/runtime/runtime.h"
#include "src/snapshot/builtin-deserializer.h"
#include "src/snapshot/builtin-serializer.h"
#include "src/snapshot/code-serializer.h"
#include "src/snapshot/natives.h"
#include "src/snapshot/partial-deserializer.h"
......@@ -95,7 +97,19 @@ static Vector<const byte> WritePayload(const Vector<const byte>& payload) {
return Vector<const byte>(const_cast<const byte*>(blob), length);
}
static Vector<const byte> Serialize(v8::Isolate* isolate) {
// A convenience struct to simplify management of the two blobs required to
// deserialize an isolate.
struct StartupBlobs {
Vector<const byte> startup;
Vector<const byte> builtin;
void Dispose() {
startup.Dispose();
builtin.Dispose();
}
};
static StartupBlobs Serialize(v8::Isolate* isolate) {
// We have to create one context. One reason for this is so that the builtins
// can be loaded from v8natives.js and their addresses can be processed. This
// will clear the pending fixups array, which would otherwise contain GC roots
......@@ -112,9 +126,15 @@ static Vector<const byte> Serialize(v8::Isolate* isolate) {
StartupSerializer ser(internal_isolate,
v8::SnapshotCreator::FunctionCodeHandling::kClear);
ser.SerializeStrongReferences();
i::BuiltinSerializer builtin_serializer(internal_isolate, &ser);
builtin_serializer.SerializeBuiltins();
ser.SerializeWeakReferencesAndDeferred();
SnapshotData snapshot_data(&ser);
return WritePayload(snapshot_data.RawData());
SnapshotData startup_snapshot(&ser);
SnapshotData builtin_snapshot(&builtin_serializer);
return {WritePayload(startup_snapshot.RawData()),
WritePayload(builtin_snapshot.RawData())};
}
......@@ -134,11 +154,12 @@ Vector<const uint8_t> ConstructSource(Vector<const uint8_t> head,
source_length);
}
v8::Isolate* InitializeFromBlob(Vector<const byte> blob) {
v8::Isolate* InitializeFromBlob(StartupBlobs& blobs) {
v8::Isolate* v8_isolate = NULL;
{
SnapshotData snapshot_data(blob);
StartupDeserializer deserializer(&snapshot_data);
SnapshotData startup_snapshot(blobs.startup);
SnapshotData builtin_snapshot(blobs.builtin);
StartupDeserializer deserializer(&startup_snapshot, &builtin_snapshot);
TestIsolate* isolate = new TestIsolate(false);
v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
v8::Isolate::Scope isolate_scope(v8_isolate);
......@@ -148,8 +169,8 @@ v8::Isolate* InitializeFromBlob(Vector<const byte> blob) {
return v8_isolate;
}
static v8::Isolate* Deserialize(Vector<const byte> blob) {
v8::Isolate* isolate = InitializeFromBlob(blob);
static v8::Isolate* Deserialize(StartupBlobs& blobs) {
v8::Isolate* isolate = InitializeFromBlob(blobs);
CHECK(isolate);
return isolate;
}
......@@ -170,9 +191,9 @@ static void SanityCheck(v8::Isolate* v8_isolate) {
UNINITIALIZED_TEST(StartupSerializerOnce) {
DisableAlwaysOpt();
v8::Isolate* isolate = TestIsolate::NewInitialized(true);
Vector<const byte> blob = Serialize(isolate);
isolate = Deserialize(blob);
blob.Dispose();
StartupBlobs blobs = Serialize(isolate);
isolate = Deserialize(blobs);
blobs.Dispose();
{
v8::HandleScope handle_scope(isolate);
v8::Isolate::Scope isolate_scope(isolate);
......@@ -236,11 +257,11 @@ UNINITIALIZED_TEST(StartupSerializerRootMapDependencies) {
UNINITIALIZED_TEST(StartupSerializerTwice) {
DisableAlwaysOpt();
v8::Isolate* isolate = TestIsolate::NewInitialized(true);
Vector<const byte> blob1 = Serialize(isolate);
Vector<const byte> blob2 = Serialize(isolate);
blob1.Dispose();
isolate = Deserialize(blob2);
blob2.Dispose();
StartupBlobs blobs1 = Serialize(isolate);
StartupBlobs blobs2 = Serialize(isolate);
blobs1.Dispose();
isolate = Deserialize(blobs2);
blobs2.Dispose();
{
v8::Isolate::Scope isolate_scope(isolate);
v8::HandleScope handle_scope(isolate);
......@@ -256,9 +277,9 @@ UNINITIALIZED_TEST(StartupSerializerTwice) {
UNINITIALIZED_TEST(StartupSerializerOnceRunScript) {
DisableAlwaysOpt();
v8::Isolate* isolate = TestIsolate::NewInitialized(true);
Vector<const byte> blob = Serialize(isolate);
isolate = Deserialize(blob);
blob.Dispose();
StartupBlobs blobs = Serialize(isolate);
isolate = Deserialize(blobs);
blobs.Dispose();
{
v8::Isolate::Scope isolate_scope(isolate);
v8::HandleScope handle_scope(isolate);
......@@ -280,11 +301,11 @@ UNINITIALIZED_TEST(StartupSerializerOnceRunScript) {
UNINITIALIZED_TEST(StartupSerializerTwiceRunScript) {
DisableAlwaysOpt();
v8::Isolate* isolate = TestIsolate::NewInitialized(true);
Vector<const byte> blob1 = Serialize(isolate);
Vector<const byte> blob2 = Serialize(isolate);
blob1.Dispose();
isolate = Deserialize(blob2);
blob2.Dispose();
StartupBlobs blobs1 = Serialize(isolate);
StartupBlobs blobs2 = Serialize(isolate);
blobs1.Dispose();
isolate = Deserialize(blobs2);
blobs2.Dispose();
{
v8::Isolate::Scope isolate_scope(isolate);
v8::HandleScope handle_scope(isolate);
......@@ -303,6 +324,7 @@ UNINITIALIZED_TEST(StartupSerializerTwiceRunScript) {
}
static void PartiallySerializeContext(Vector<const byte>* startup_blob_out,
Vector<const byte>* builtin_blob_out,
Vector<const byte>* partial_blob_out) {
v8::Isolate* v8_isolate = TestIsolate::NewInitialized(true);
Isolate* isolate = reinterpret_cast<Isolate*>(v8_isolate);
......@@ -343,12 +365,18 @@ static void PartiallySerializeContext(Vector<const byte>* startup_blob_out,
PartialSerializer partial_serializer(isolate, &startup_serializer,
v8::SerializeInternalFieldsCallback());
partial_serializer.Serialize(&raw_context, false);
i::BuiltinSerializer builtin_serializer(isolate, &startup_serializer);
builtin_serializer.SerializeBuiltins();
startup_serializer.SerializeWeakReferencesAndDeferred();
SnapshotData startup_snapshot(&startup_serializer);
SnapshotData builtin_snapshot(&builtin_serializer);
SnapshotData partial_snapshot(&partial_serializer);
*partial_blob_out = WritePayload(partial_snapshot.RawData());
*builtin_blob_out = WritePayload(builtin_snapshot.RawData());
*startup_blob_out = WritePayload(startup_snapshot.RawData());
}
v8_isolate->Dispose();
......@@ -357,12 +385,14 @@ static void PartiallySerializeContext(Vector<const byte>* startup_blob_out,
UNINITIALIZED_TEST(PartialSerializerContext) {
DisableAlwaysOpt();
Vector<const byte> startup_blob;
Vector<const byte> builtin_blob;
Vector<const byte> partial_blob;
PartiallySerializeContext(&startup_blob, &partial_blob);
PartiallySerializeContext(&startup_blob, &builtin_blob, &partial_blob);
v8::Isolate* v8_isolate = InitializeFromBlob(startup_blob);
StartupBlobs blobs = {startup_blob, builtin_blob};
v8::Isolate* v8_isolate = InitializeFromBlob(blobs);
CHECK(v8_isolate);
startup_blob.Dispose();
blobs.Dispose();
{
v8::Isolate::Scope isolate_scope(v8_isolate);
......@@ -398,7 +428,7 @@ UNINITIALIZED_TEST(PartialSerializerContext) {
}
static void PartiallySerializeCustomContext(
Vector<const byte>* startup_blob_out,
Vector<const byte>* startup_blob_out, Vector<const byte>* builtin_blob_out,
Vector<const byte>* partial_blob_out) {
v8::Isolate* v8_isolate = TestIsolate::NewInitialized(true);
Isolate* isolate = reinterpret_cast<Isolate*>(v8_isolate);
......@@ -459,12 +489,18 @@ static void PartiallySerializeCustomContext(
PartialSerializer partial_serializer(isolate, &startup_serializer,
v8::SerializeInternalFieldsCallback());
partial_serializer.Serialize(&raw_context, false);
i::BuiltinSerializer builtin_serializer(isolate, &startup_serializer);
builtin_serializer.SerializeBuiltins();
startup_serializer.SerializeWeakReferencesAndDeferred();
SnapshotData startup_snapshot(&startup_serializer);
SnapshotData builtin_snapshot(&builtin_serializer);
SnapshotData partial_snapshot(&partial_serializer);
*partial_blob_out = WritePayload(partial_snapshot.RawData());
*builtin_blob_out = WritePayload(builtin_snapshot.RawData());
*startup_blob_out = WritePayload(startup_snapshot.RawData());
}
v8_isolate->Dispose();
......@@ -473,12 +509,14 @@ static void PartiallySerializeCustomContext(
UNINITIALIZED_TEST(PartialSerializerCustomContext) {
DisableAlwaysOpt();
Vector<const byte> startup_blob;
Vector<const byte> builtin_blob;
Vector<const byte> partial_blob;
PartiallySerializeCustomContext(&startup_blob, &partial_blob);
PartiallySerializeCustomContext(&startup_blob, &builtin_blob, &partial_blob);
v8::Isolate* v8_isolate = InitializeFromBlob(startup_blob);
StartupBlobs blobs = {startup_blob, builtin_blob};
v8::Isolate* v8_isolate = InitializeFromBlob(blobs);
CHECK(v8_isolate);
startup_blob.Dispose();
blobs.Dispose();
{
v8::Isolate::Scope isolate_scope(v8_isolate);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment