Commit 4104fd90 authored by Jakob Gruber's avatar Jakob Gruber Committed by Commit Bot

Reland "Reland "[snapshot] Add BuiltinDeserializerAllocator""

This is a reland of 526c31d0
Original change's description:
> Reland "[snapshot] Add BuiltinDeserializerAllocator"
> 
> This is a reland of 2b9a6d89
> Original change's description:
> > [snapshot] Add BuiltinDeserializerAllocator
> > 
> > Encapsulates special reservation / allocation behavior for builtin
> > deserialization.
> > 
> > Bug: v8:6624
> > Change-Id: Ic784ed43b607c881b356c6e535c9dbe185e1d4cd
> > Reviewed-on: https://chromium-review.googlesource.com/716229
> > Commit-Queue: Jakob Gruber <jgruber@chromium.org>
> > Reviewed-by: Yang Guo <yangguo@chromium.org>
> > Cr-Commit-Position: refs/heads/master@{#48638}
> 
> TBR=yangguo@chromium.org
> 
> Bug: v8:6624
> Change-Id: I07c49263b4ef128dfe9b97d364e9a279b343aa24
> Reviewed-on: https://chromium-review.googlesource.com/723520
> Reviewed-by: Jakob Gruber <jgruber@chromium.org>
> Commit-Queue: Jakob Gruber <jgruber@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#48647}

TBR=yangguo@chromium.org

Bug: v8:6624
Change-Id: I4186fcf89b9fce3433a02fc864346a300b90ffb5
Reviewed-on: https://chromium-review.googlesource.com/725439Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48666}
parent 57c6c979
......@@ -1962,6 +1962,8 @@ v8_source_set("v8_base") {
"src/setup-isolate.h",
"src/signature.h",
"src/simulator.h",
"src/snapshot/builtin-deserializer-allocator.cc",
"src/snapshot/builtin-deserializer-allocator.h",
"src/snapshot/builtin-deserializer.cc",
"src/snapshot/builtin-deserializer.h",
"src/snapshot/builtin-serializer.cc",
......
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/snapshot/builtin-deserializer-allocator.h"
#include "src/heap/heap-inl.h"
#include "src/snapshot/builtin-deserializer.h"
#include "src/snapshot/deserializer.h"
namespace v8 {
namespace internal {
BuiltinDeserializerAllocator::BuiltinDeserializerAllocator(
Deserializer<BuiltinDeserializerAllocator>* deserializer)
: deserializer_(deserializer) {}
Address BuiltinDeserializerAllocator::Allocate(AllocationSpace space,
int size) {
const int builtin_id = deserializer()->CurrentBuiltinId();
DCHECK_EQ(CODE_SPACE, space);
DCHECK_EQ(deserializer()->ExtractBuiltinSize(builtin_id), size);
#ifdef DEBUG
RegisterBuiltinAllocation(builtin_id);
#endif
Object* obj = isolate()->builtins()->builtin(builtin_id);
DCHECK(Internals::HasHeapObjectTag(obj));
return HeapObject::cast(obj)->address();
}
Heap::Reservation
BuiltinDeserializerAllocator::CreateReservationsForEagerBuiltins() {
Heap::Reservation result;
// DeserializeLazy is always the first reservation (to simplify logic in
// InitializeBuiltinsTable).
{
DCHECK(!Builtins::IsLazy(Builtins::kDeserializeLazy));
uint32_t builtin_size =
deserializer()->ExtractBuiltinSize(Builtins::kDeserializeLazy);
DCHECK_LE(builtin_size, MemoryAllocator::PageAreaSize(CODE_SPACE));
result.push_back({builtin_size, nullptr, nullptr});
}
for (int i = 0; i < Builtins::builtin_count; i++) {
if (i == Builtins::kDeserializeLazy) continue;
// Skip lazy builtins. These will be replaced by the DeserializeLazy code
// object in InitializeBuiltinsTable and thus require no reserved space.
if (deserializer()->IsLazyDeserializationEnabled() && Builtins::IsLazy(i)) {
continue;
}
uint32_t builtin_size = deserializer()->ExtractBuiltinSize(i);
DCHECK_LE(builtin_size, MemoryAllocator::PageAreaSize(CODE_SPACE));
result.push_back({builtin_size, nullptr, nullptr});
}
return result;
}
void BuiltinDeserializerAllocator::InitializeBuiltinFromReservation(
const Heap::Chunk& chunk, int builtin_id) {
DCHECK_EQ(deserializer()->ExtractBuiltinSize(builtin_id), chunk.size);
DCHECK_EQ(chunk.size, chunk.end - chunk.start);
SkipList::Update(chunk.start, chunk.size);
isolate()->builtins()->set_builtin(builtin_id,
HeapObject::FromAddress(chunk.start));
#ifdef DEBUG
RegisterBuiltinReservation(builtin_id);
#endif
}
void BuiltinDeserializerAllocator::InitializeBuiltinsTable(
const Heap::Reservation& reservation) {
DCHECK(!AllowHeapAllocation::IsAllowed());
Builtins* builtins = isolate()->builtins();
int reservation_index = 0;
// Other builtins can be replaced by DeserializeLazy so it may not be lazy.
// It always occupies the first reservation slot.
{
DCHECK(!Builtins::IsLazy(Builtins::kDeserializeLazy));
InitializeBuiltinFromReservation(reservation[reservation_index],
Builtins::kDeserializeLazy);
reservation_index++;
}
Code* deserialize_lazy = builtins->builtin(Builtins::kDeserializeLazy);
for (int i = 0; i < Builtins::builtin_count; i++) {
if (i == Builtins::kDeserializeLazy) continue;
if (deserializer()->IsLazyDeserializationEnabled() && Builtins::IsLazy(i)) {
builtins->set_builtin(i, deserialize_lazy);
} else {
InitializeBuiltinFromReservation(reservation[reservation_index], i);
reservation_index++;
}
}
DCHECK_EQ(reservation.size(), reservation_index);
}
void BuiltinDeserializerAllocator::ReserveAndInitializeBuiltinsTableForBuiltin(
int builtin_id) {
DCHECK(AllowHeapAllocation::IsAllowed());
DCHECK(isolate()->builtins()->is_initialized());
DCHECK(Builtins::IsBuiltinId(builtin_id));
DCHECK_NE(Builtins::kDeserializeLazy, builtin_id);
DCHECK_EQ(Builtins::kDeserializeLazy,
isolate()->builtins()->builtin(builtin_id)->builtin_index());
const uint32_t builtin_size = deserializer()->ExtractBuiltinSize(builtin_id);
DCHECK_LE(builtin_size, MemoryAllocator::PageAreaSize(CODE_SPACE));
Handle<HeapObject> o =
isolate()->factory()->NewCodeForDeserialization(builtin_size);
// Note: After this point and until deserialization finishes, heap allocation
// is disallowed. We currently can't safely assert this since we'd need to
// pass the DisallowHeapAllocation scope out of this function.
// Write the allocated filler object into the builtins table. It will be
// returned by our custom Allocate method below once needed.
isolate()->builtins()->set_builtin(builtin_id, *o);
#ifdef DEBUG
RegisterBuiltinReservation(builtin_id);
#endif
}
#ifdef DEBUG
void BuiltinDeserializerAllocator::RegisterBuiltinReservation(int builtin_id) {
const auto result = unused_reservations_.emplace(builtin_id);
CHECK(result.second); // False, iff builtin_id was already present in set.
}
void BuiltinDeserializerAllocator::RegisterBuiltinAllocation(int builtin_id) {
const size_t removed_elems = unused_reservations_.erase(builtin_id);
CHECK_EQ(removed_elems, 1);
}
bool BuiltinDeserializerAllocator::ReservationsAreFullyUsed() const {
// Not 100% precise but should be good enough.
return unused_reservations_.empty();
}
#endif // DEBUG
Isolate* BuiltinDeserializerAllocator::isolate() const {
return deserializer()->isolate();
}
BuiltinDeserializer* BuiltinDeserializerAllocator::deserializer() const {
return static_cast<BuiltinDeserializer*>(deserializer_);
}
} // namespace internal
} // namespace v8
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_SNAPSHOT_BUILTIN_DESERIALIZER_ALLOCATOR_H_
#define V8_SNAPSHOT_BUILTIN_DESERIALIZER_ALLOCATOR_H_
#include <unordered_set>
#include "src/globals.h"
#include "src/heap/heap.h"
#include "src/snapshot/serializer-common.h"
namespace v8 {
namespace internal {
template <class AllocatorT>
class Deserializer;
class BuiltinDeserializer;
class BuiltinDeserializerAllocator final {
public:
BuiltinDeserializerAllocator(
Deserializer<BuiltinDeserializerAllocator>* deserializer);
// ------- Allocation Methods -------
// Methods related to memory allocation during deserialization.
// Allocation works differently here than in other deserializers. Instead of
// a statically-known memory area determined at serialization-time, our
// memory requirements here are determined at runtime. Another major
// difference is that we create builtin Code objects up-front (before
// deserialization) in order to avoid having to patch builtin references
// later on. See also the kBuiltin case in deserializer.cc.
//
// Allocate simply returns the pre-allocated object prepared by
// InitializeBuiltinsTable.
Address Allocate(AllocationSpace space, int size);
void MoveToNextChunk(AllocationSpace space) { UNREACHABLE(); }
void SetAlignment(AllocationAlignment alignment) { UNREACHABLE(); }
HeapObject* GetMap(uint32_t index) { UNREACHABLE(); }
HeapObject* GetLargeObject(uint32_t index) { UNREACHABLE(); }
HeapObject* GetObject(AllocationSpace space, uint32_t chunk_index,
uint32_t chunk_offset) {
UNREACHABLE();
}
// ------- Reservation Methods -------
// Methods related to memory reservations (prior to deserialization).
// Builtin deserialization does not bake reservations into the snapshot, hence
// this is a nop.
void DecodeReservation(Vector<const SerializedData::Reservation> res) {}
// These methods are used to pre-allocate builtin objects prior to
// deserialization.
// TODO(jgruber): Refactor reservation/allocation logic in deserializers to
// make this less messy.
Heap::Reservation CreateReservationsForEagerBuiltins();
void InitializeBuiltinsTable(const Heap::Reservation& reservation);
// Creates reservations and initializes the builtins table in preparation for
// lazily deserializing a single builtin.
void ReserveAndInitializeBuiltinsTableForBuiltin(int builtin_id);
#ifdef DEBUG
bool ReservationsAreFullyUsed() const;
#endif
// For SortMapDescriptors();
const std::vector<Address>& GetAllocatedMaps() const {
static std::vector<Address> empty_vector(0);
return empty_vector;
}
private:
Isolate* isolate() const;
BuiltinDeserializer* deserializer() const;
// Used after memory allocation prior to isolate initialization, to register
// the newly created object in code space and add it to the builtins table.
void InitializeBuiltinFromReservation(const Heap::Chunk& chunk,
int builtin_id);
#ifdef DEBUG
void RegisterBuiltinReservation(int builtin_id);
void RegisterBuiltinAllocation(int builtin_id);
std::unordered_set<int> unused_reservations_;
#endif
private:
// The current deserializer. Note that this always points to a
// BuiltinDeserializer instance, but we can't perform the cast during
// construction since that makes vtable-based checks fail.
Deserializer<BuiltinDeserializerAllocator>* const deserializer_;
DISALLOW_COPY_AND_ASSIGN(BuiltinDeserializerAllocator)
};
} // namespace internal
} // namespace v8
#endif // V8_SNAPSHOT_BUILTIN_DESERIALIZER_ALLOCATOR_H_
......@@ -36,10 +36,6 @@ class DeserializingBuiltinScope {
BuiltinDeserializer::BuiltinDeserializer(Isolate* isolate,
const BuiltinSnapshotData* data)
: Deserializer(data, false) {
// We may have to relax this at some point to pack reloc infos and handler
// tables into the builtin blob (instead of the partial snapshot cache).
DCHECK(allocator()->ReservesOnlyCodeSpace());
builtin_offsets_ = data->BuiltinOffsets();
DCHECK_EQ(Builtins::builtin_count, builtin_offsets_.length());
DCHECK(std::is_sorted(builtin_offsets_.begin(), builtin_offsets_.end()));
......@@ -59,7 +55,7 @@ void BuiltinDeserializer::DeserializeEagerBuiltins() {
DCHECK_EQ(builtins->builtin(Builtins::kDeserializeLazy),
builtins->builtin(i));
} else {
builtins->set_builtin(i, DeserializeBuiltin(i));
builtins->set_builtin(i, DeserializeBuiltinRaw(i));
}
}
......@@ -72,6 +68,12 @@ void BuiltinDeserializer::DeserializeEagerBuiltins() {
}
Code* BuiltinDeserializer::DeserializeBuiltin(int builtin_id) {
allocator()->ReserveAndInitializeBuiltinsTableForBuiltin(builtin_id);
DisallowHeapAllocation no_gc;
return DeserializeBuiltinRaw(builtin_id);
}
Code* BuiltinDeserializer::DeserializeBuiltinRaw(int builtin_id) {
DCHECK(!AllowHeapAllocation::IsAllowed());
DCHECK(Builtins::IsBuiltinId(builtin_id));
......@@ -135,110 +137,5 @@ uint32_t BuiltinDeserializer::ExtractBuiltinSize(int builtin_id) {
return result;
}
Heap::Reservation BuiltinDeserializer::CreateReservationsForEagerBuiltins() {
DCHECK(allocator()->ReservesOnlyCodeSpace());
Heap::Reservation result;
// DeserializeLazy is always the first reservation (to simplify logic in
// InitializeBuiltinsTable).
{
DCHECK(!Builtins::IsLazy(Builtins::kDeserializeLazy));
uint32_t builtin_size = ExtractBuiltinSize(Builtins::kDeserializeLazy);
DCHECK_LE(builtin_size, MemoryAllocator::PageAreaSize(CODE_SPACE));
result.push_back({builtin_size, nullptr, nullptr});
}
for (int i = 0; i < Builtins::builtin_count; i++) {
if (i == Builtins::kDeserializeLazy) continue;
// Skip lazy builtins. These will be replaced by the DeserializeLazy code
// object in InitializeBuiltinsTable and thus require no reserved space.
if (IsLazyDeserializationEnabled() && Builtins::IsLazy(i)) continue;
uint32_t builtin_size = ExtractBuiltinSize(i);
DCHECK_LE(builtin_size, MemoryAllocator::PageAreaSize(CODE_SPACE));
result.push_back({builtin_size, nullptr, nullptr});
}
return result;
}
void BuiltinDeserializer::InitializeBuiltinFromReservation(
const Heap::Chunk& chunk, int builtin_id) {
DCHECK_EQ(ExtractBuiltinSize(builtin_id), chunk.size);
DCHECK_EQ(chunk.size, chunk.end - chunk.start);
SkipList::Update(chunk.start, chunk.size);
isolate()->builtins()->set_builtin(builtin_id,
HeapObject::FromAddress(chunk.start));
}
void BuiltinDeserializer::InitializeBuiltinsTable(
const Heap::Reservation& reservation) {
DCHECK(!AllowHeapAllocation::IsAllowed());
Builtins* builtins = isolate()->builtins();
int reservation_index = 0;
// Other builtins can be replaced by DeserializeLazy so it may not be lazy.
// It always occupies the first reservation slot.
{
DCHECK(!Builtins::IsLazy(Builtins::kDeserializeLazy));
InitializeBuiltinFromReservation(reservation[reservation_index],
Builtins::kDeserializeLazy);
reservation_index++;
}
Code* deserialize_lazy = builtins->builtin(Builtins::kDeserializeLazy);
for (int i = 0; i < Builtins::builtin_count; i++) {
if (i == Builtins::kDeserializeLazy) continue;
if (IsLazyDeserializationEnabled() && Builtins::IsLazy(i)) {
builtins->set_builtin(i, deserialize_lazy);
} else {
InitializeBuiltinFromReservation(reservation[reservation_index], i);
reservation_index++;
}
}
DCHECK_EQ(reservation.size(), reservation_index);
}
void BuiltinDeserializer::ReserveAndInitializeBuiltinsTableForBuiltin(
int builtin_id) {
DCHECK(AllowHeapAllocation::IsAllowed());
DCHECK(isolate()->builtins()->is_initialized());
DCHECK(Builtins::IsBuiltinId(builtin_id));
DCHECK_NE(Builtins::kDeserializeLazy, builtin_id);
DCHECK_EQ(Builtins::kDeserializeLazy,
isolate()->builtins()->builtin(builtin_id)->builtin_index());
const uint32_t builtin_size = ExtractBuiltinSize(builtin_id);
DCHECK_LE(builtin_size, MemoryAllocator::PageAreaSize(CODE_SPACE));
Handle<HeapObject> o =
isolate()->factory()->NewCodeForDeserialization(builtin_size);
// Note: After this point and until deserialization finishes, heap allocation
// is disallowed. We currently can't safely assert this since we'd need to
// pass the DisallowHeapAllocation scope out of this function.
// Write the allocated filler object into the builtins table. It will be
// returned by our custom Allocate method below once needed.
isolate()->builtins()->set_builtin(builtin_id, *o);
}
Address BuiltinDeserializer::Allocate(int space_index, int size) {
DCHECK_EQ(CODE_SPACE, space_index);
DCHECK_EQ(ExtractBuiltinSize(current_builtin_id_), size);
Object* obj = isolate()->builtins()->builtin(current_builtin_id_);
DCHECK(Internals::HasHeapObjectTag(obj));
HeapObject* heap_obj = HeapObject::cast(obj);
return heap_obj->address();
}
} // namespace internal
} // namespace v8
......@@ -5,7 +5,7 @@
#ifndef V8_SNAPSHOT_BUILTIN_DESERIALIZER_H_
#define V8_SNAPSHOT_BUILTIN_DESERIALIZER_H_
#include "src/heap/heap.h"
#include "src/snapshot/builtin-deserializer-allocator.h"
#include "src/snapshot/deserializer.h"
namespace v8 {
......@@ -14,7 +14,8 @@ namespace internal {
class BuiltinSnapshotData;
// Deserializes the builtins blob.
class BuiltinDeserializer final : public Deserializer<> {
class BuiltinDeserializer final
: public Deserializer<BuiltinDeserializerAllocator> {
public:
BuiltinDeserializer(Isolate* isolate, const BuiltinSnapshotData* data);
......@@ -27,50 +28,30 @@ class BuiltinDeserializer final : public Deserializer<> {
// do it ourselves since the startup serializer batch-flushes all code pages).
void DeserializeEagerBuiltins();
// Deserializes the single given builtin. Assumes that reservations have
// already been allocated.
// Deserializes the single given builtin. This is used whenever a builtin is
// lazily deserialized at runtime.
Code* DeserializeBuiltin(int builtin_id);
// These methods are used to pre-allocate builtin objects prior to
// deserialization.
// TODO(jgruber): Refactor reservation/allocation logic in deserializers to
// make this less messy.
Heap::Reservation CreateReservationsForEagerBuiltins();
void InitializeBuiltinsTable(const Heap::Reservation& reservation);
// Creates reservations and initializes the builtins table in preparation for
// lazily deserializing a single builtin.
void ReserveAndInitializeBuiltinsTableForBuiltin(int builtin_id);
private:
// Deserializes the single given builtin. Assumes that reservations have
// already been allocated.
Code* DeserializeBuiltinRaw(int builtin_id);
// TODO(jgruber): Remove once allocations have been refactored.
void SetPositionToBuiltin(int builtin_id);
// Extracts the size builtin Code objects (baked into the snapshot).
uint32_t ExtractBuiltinSize(int builtin_id);
// Used after memory allocation prior to isolate initialization, to register
// the newly created object in code space and add it to the builtins table.
void InitializeBuiltinFromReservation(const Heap::Chunk& chunk,
int builtin_id);
// Allocation works differently here than in other deserializers. Instead of
// a statically-known memory area determined at serialization-time, our
// memory requirements here are determined at runtime. Another major
// difference is that we create builtin Code objects up-front (before
// deserialization) in order to avoid having to patch builtin references
// later on. See also the kBuiltin case in deserializer.cc.
//
// Allocate simply returns the pre-allocated object prepared by
// InitializeBuiltinsTable.
Address Allocate(int space_index, int size) override;
// BuiltinDeserializer implements its own builtin iteration logic. Make sure
// the RootVisitor API is not used accidentally.
void VisitRootPointers(Root root, Object** start, Object** end) override {
UNREACHABLE();
}
int CurrentBuiltinId() const { return current_builtin_id_; }
private:
// Stores the builtin currently being deserialized. We need this to determine
// where to 'allocate' from during deserialization.
static const int kNoBuiltinId = -1;
......@@ -80,7 +61,12 @@ class BuiltinDeserializer final : public Deserializer<> {
// BuiltinSerializer::builtin_offsets_ but on the deserialization side.
Vector<const uint32_t> builtin_offsets_;
// For current_builtin_id_.
friend class DeserializingBuiltinScope;
// For isolate(), IsLazyDeserializationEnabled(), CurrentBuiltinId() and
// ExtractBuiltinSize().
friend class BuiltinDeserializerAllocator;
};
} // namespace internal
......
......@@ -169,7 +169,7 @@ bool DefaultDeserializerAllocator::ReserveSpace(
}
Heap::Reservation builtin_reservations =
builtin_deserializer->CreateReservationsForEagerBuiltins();
builtin_deserializer->allocator()->CreateReservationsForEagerBuiltins();
DCHECK(!builtin_reservations.empty());
for (const auto& c : builtin_reservations) {
......@@ -199,7 +199,8 @@ bool DefaultDeserializerAllocator::ReserveSpace(
merged_reservations[CODE_SPACE].pop_back();
}
builtin_deserializer->InitializeBuiltinsTable(builtin_reservations);
builtin_deserializer->allocator()->InitializeBuiltinsTable(
builtin_reservations);
}
// Write back startup reservations.
......@@ -212,22 +213,11 @@ bool DefaultDeserializerAllocator::ReserveSpace(
for (int i = first_space; i < kNumberOfPreallocatedSpaces; i++) {
startup_deserializer->allocator()->high_water_[i] =
startup_deserializer->allocator()->reservations_[i][0].start;
builtin_deserializer->allocator()->high_water_[i] = nullptr;
}
return true;
}
bool DefaultDeserializerAllocator::ReservesOnlyCodeSpace() const {
for (int space = NEW_SPACE; space < kNumberOfSpaces; space++) {
if (space == CODE_SPACE) continue;
const auto& r = reservations_[space];
for (const Heap::Chunk& c : r)
if (c.size != 0) return false;
}
return true;
}
bool DefaultDeserializerAllocator::ReservationsAreFullyUsed() const {
for (int space = 0; space < kNumberOfPreallocatedSpaces; space++) {
const uint32_t chunk_index = current_chunk_[space];
......
......@@ -52,7 +52,6 @@ class DefaultDeserializerAllocator final {
static bool ReserveSpace(StartupDeserializer* startup_deserializer,
BuiltinDeserializer* builtin_deserializer);
bool ReservesOnlyCodeSpace() const;
bool ReservationsAreFullyUsed() const;
// ------- Misc Utility Methods -------
......
......@@ -7,6 +7,7 @@
#include "src/assembler-inl.h"
#include "src/isolate.h"
#include "src/objects/string.h"
#include "src/snapshot/builtin-deserializer-allocator.h"
#include "src/snapshot/natives.h"
namespace v8 {
......@@ -272,7 +273,8 @@ void Deserializer<AllocatorT>::ReadObject(int space_number,
Object** write_back) {
const int size = source_.GetInt() << kObjectAlignmentBits;
Address address = Allocate(space_number, size);
Address address =
allocator()->Allocate(static_cast<AllocationSpace>(space_number), size);
HeapObject* obj = HeapObject::FromAddress(address);
isolate_->heap()->OnAllocationEvent(obj, size);
......@@ -295,13 +297,6 @@ void Deserializer<AllocatorT>::ReadObject(int space_number,
#endif // DEBUG
}
template <class AllocatorT>
Address Deserializer<AllocatorT>::Allocate(int space_index, int size) {
// TODO(jgruber): Remove this indirection once we have a
// BuiltinDeserializerAllocator.
return allocator()->Allocate(static_cast<AllocationSpace>(space_index), size);
}
template <class AllocatorT>
Object* Deserializer<AllocatorT>::ReadDataSingle() {
Object* o;
......@@ -718,6 +713,7 @@ Object** Deserializer<AllocatorT>::ReadDataCase(Isolate* isolate,
}
// Explicit instantiation.
template class Deserializer<BuiltinDeserializerAllocator>;
template class Deserializer<DefaultDeserializerAllocator>;
} // namespace internal
......
......@@ -55,8 +55,6 @@ class Deserializer : public SerializerDeserializer {
void Initialize(Isolate* isolate);
void DeserializeDeferredObjects();
virtual Address Allocate(int space_index, int size);
// Deserializes into a single pointer and returns the resulting object.
Object* ReadDataSingle();
......
......@@ -97,10 +97,6 @@ Code* Snapshot::DeserializeBuiltin(Isolate* isolate, int builtin_id) {
BuiltinSnapshotData builtin_snapshot_data(builtin_data);
BuiltinDeserializer builtin_deserializer(isolate, &builtin_snapshot_data);
builtin_deserializer.ReserveAndInitializeBuiltinsTableForBuiltin(builtin_id);
DisallowHeapAllocation no_gc;
Code* code = builtin_deserializer.DeserializeBuiltin(builtin_id);
DCHECK_EQ(code, isolate->builtins()->builtin(builtin_id));
......
......@@ -1339,6 +1339,8 @@
'setup-isolate.h',
'signature.h',
'simulator.h',
'snapshot/builtin-deserializer-allocator.cc',
'snapshot/builtin-deserializer-allocator.h',
'snapshot/builtin-deserializer.cc',
'snapshot/builtin-deserializer.h',
'snapshot/builtin-serializer.cc',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment