Commit 2ddc26fc authored by Maciej Goszczycki's avatar Maciej Goszczycki Committed by Commit Bot

[cleanup] Switch DeserializerAllocator to use Heap more directly.

Bug: v8:8562
Change-Id: I05421c9fa96e6c1d424c6d529922cde02ba2dc37
Reviewed-on: https://chromium-review.googlesource.com/c/1494760Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarDan Elphick <delphick@chromium.org>
Commit-Queue: Maciej Goszczycki <goszczycki@google.com>
Cr-Commit-Position: refs/heads/master@{#59981}
parent e5ec83fb
......@@ -566,11 +566,13 @@ void Heap::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
base::CheckedDecrement(&backing_store_bytes_, amount);
}
AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
: heap_(isolate->heap()) {
AlwaysAllocateScope::AlwaysAllocateScope(Heap* heap) : heap_(heap) {
heap_->always_allocate_scope_count_++;
}
AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
: AlwaysAllocateScope(isolate->heap()) {}
AlwaysAllocateScope::~AlwaysAllocateScope() {
heap_->always_allocate_scope_count_--;
}
......
......@@ -2094,6 +2094,7 @@ class HeapStats {
class AlwaysAllocateScope {
public:
explicit inline AlwaysAllocateScope(Heap* heap);
explicit inline AlwaysAllocateScope(Isolate* isolate);
inline ~AlwaysAllocateScope();
......
......@@ -5,15 +5,10 @@
#include "src/snapshot/deserializer-allocator.h"
#include "src/heap/heap-inl.h" // crbug.com/v8/8499
#include "src/snapshot/deserializer.h"
#include "src/snapshot/startup-deserializer.h"
namespace v8 {
namespace internal {
DeserializerAllocator::DeserializerAllocator(Deserializer* deserializer)
: deserializer_(deserializer) {}
// We know the space requirements before deserialization and can
// pre-allocate that reserved space. During deserialization, all we need
// to do is to bump up the pointer for each space in the reserved
......@@ -27,10 +22,10 @@ DeserializerAllocator::DeserializerAllocator(Deserializer* deserializer)
// reference large objects by index.
Address DeserializerAllocator::AllocateRaw(AllocationSpace space, int size) {
if (space == LO_SPACE) {
AlwaysAllocateScope scope(isolate());
AlwaysAllocateScope scope(heap_);
// Note that we currently do not support deserialization of large code
// objects.
LargeObjectSpace* lo_space = isolate()->heap()->lo_space();
LargeObjectSpace* lo_space = heap_->lo_space();
AllocationResult result = lo_space->AllocateRaw(size);
HeapObject obj = result.ToObjectChecked();
deserialized_large_objects_.push_back(obj);
......@@ -65,11 +60,10 @@ Address DeserializerAllocator::Allocate(AllocationSpace space, int size) {
// If one of the following assertions fails, then we are deserializing an
// aligned object when the filler maps have not been deserialized yet.
// We require filler maps as padding to align the object.
Heap* heap = isolate()->heap();
DCHECK(ReadOnlyRoots(heap).free_space_map()->IsMap());
DCHECK(ReadOnlyRoots(heap).one_pointer_filler_map()->IsMap());
DCHECK(ReadOnlyRoots(heap).two_pointer_filler_map()->IsMap());
obj = heap->AlignWithFiller(obj, size, reserved, next_alignment_);
DCHECK(ReadOnlyRoots(heap_).free_space_map()->IsMap());
DCHECK(ReadOnlyRoots(heap_).one_pointer_filler_map()->IsMap());
DCHECK(ReadOnlyRoots(heap_).two_pointer_filler_map()->IsMap());
obj = heap_->AlignWithFiller(obj, size, reserved, next_alignment_);
address = obj->address();
next_alignment_ = kWordAligned;
return address;
......@@ -135,7 +129,7 @@ bool DeserializerAllocator::ReserveSpace() {
}
#endif // DEBUG
DCHECK(allocated_maps_.empty());
if (!isolate()->heap()->ReserveSpace(reservations_, &allocated_maps_)) {
if (!heap_->ReserveSpace(reservations_, &allocated_maps_)) {
return false;
}
for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) {
......@@ -158,13 +152,9 @@ bool DeserializerAllocator::ReservationsAreFullyUsed() const {
}
void DeserializerAllocator::RegisterDeserializedObjectsForBlackAllocation() {
isolate()->heap()->RegisterDeserializedObjectsForBlackAllocation(
heap_->RegisterDeserializedObjectsForBlackAllocation(
reservations_, deserialized_large_objects_, allocated_maps_);
}
Isolate* DeserializerAllocator::isolate() const {
return deserializer_->isolate();
}
} // namespace internal
} // namespace v8
......@@ -18,7 +18,9 @@ class StartupDeserializer;
class DeserializerAllocator final {
public:
explicit DeserializerAllocator(Deserializer* deserializer);
DeserializerAllocator() = default;
void Initialize(Heap* heap) { heap_ = heap; }
// ------- Allocation Methods -------
// Methods related to memory allocation during deserialization.
......@@ -65,8 +67,6 @@ class DeserializerAllocator final {
void RegisterDeserializedObjectsForBlackAllocation();
private:
Isolate* isolate() const;
// Raw allocation without considering alignment.
Address AllocateRaw(AllocationSpace space, int size);
......@@ -97,8 +97,7 @@ class DeserializerAllocator final {
// back-references.
std::vector<HeapObject> deserialized_large_objects_;
// The current deserializer.
Deserializer* const deserializer_;
Heap* heap_;
DISALLOW_COPY_AND_ASSIGN(DeserializerAllocator);
};
......
......@@ -47,6 +47,7 @@ void Deserializer::Initialize(Isolate* isolate) {
DCHECK_NULL(isolate_);
DCHECK_NOT_NULL(isolate);
isolate_ = isolate;
allocator()->Initialize(isolate->heap());
DCHECK_NULL(external_reference_table_);
external_reference_table_ = isolate->external_reference_table();
#ifdef DEBUG
......
......@@ -48,7 +48,6 @@ class Deserializer : public SerializerDeserializer {
source_(data->Payload()),
magic_number_(data->GetMagicNumber()),
external_reference_table_(nullptr),
allocator_(this),
deserializing_user_code_(deserializing_user_code),
can_rehash_(false) {
allocator()->DecodeReservation(data->Reservations());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment