Commit 83786cb4 authored by Yang Guo's avatar Yang Guo Committed by Commit Bot

Delay setting up deserialized JSArrayBuffer

Setting up JSArrayBuffer may trigger GC. Delay this until we
are done with deserialization.

R=ulan@chromium.org

Bug: chromium:1033395
Change-Id: I6c79bc47421bc2662dc1906534fc8e820c351ced
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1965580Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Yang Guo <yangguo@chromium.org>
Cr-Commit-Position: refs/heads/master@{#65441}
parent f47a8a88
...@@ -9257,6 +9257,7 @@ class V8_EXPORT Isolate { ...@@ -9257,6 +9257,7 @@ class V8_EXPORT Isolate {
internal::Address* GetDataFromSnapshotOnce(size_t index); internal::Address* GetDataFromSnapshotOnce(size_t index);
void ReportExternalAllocationLimitReached(); void ReportExternalAllocationLimitReached();
void CheckMemoryPressure(); void CheckMemoryPressure();
void CheckGarbageCollectionIsAllowed();
}; };
class V8_EXPORT StartupData { class V8_EXPORT StartupData {
...@@ -11663,6 +11664,9 @@ MaybeLocal<T> Isolate::GetDataFromSnapshotOnce(size_t index) { ...@@ -11663,6 +11664,9 @@ MaybeLocal<T> Isolate::GetDataFromSnapshotOnce(size_t index) {
int64_t Isolate::AdjustAmountOfExternalAllocatedMemory( int64_t Isolate::AdjustAmountOfExternalAllocatedMemory(
int64_t change_in_bytes) { int64_t change_in_bytes) {
#ifdef V8_ENABLE_CHECKS
CheckGarbageCollectionIsAllowed();
#endif
typedef internal::Internals I; typedef internal::Internals I;
constexpr int64_t kMemoryReducerActivationLimit = 32 * 1024 * 1024; constexpr int64_t kMemoryReducerActivationLimit = 32 * 1024 * 1024;
int64_t* external_memory = reinterpret_cast<int64_t*>( int64_t* external_memory = reinterpret_cast<int64_t*>(
......
...@@ -8067,6 +8067,10 @@ void Isolate::CheckMemoryPressure() { ...@@ -8067,6 +8067,10 @@ void Isolate::CheckMemoryPressure() {
heap->CheckMemoryPressure(); heap->CheckMemoryPressure();
} }
void Isolate::CheckGarbageCollectionIsAllowed() {
DCHECK(i::AllowHeapAllocation::IsAllowed());
}
HeapProfiler* Isolate::GetHeapProfiler() { HeapProfiler* Isolate::GetHeapProfiler() {
i::HeapProfiler* heap_profiler = i::HeapProfiler* heap_profiler =
reinterpret_cast<i::Isolate*>(this)->heap_profiler(); reinterpret_cast<i::Isolate*>(this)->heap_profiler();
......
...@@ -274,9 +274,16 @@ HeapObject Deserializer::PostProcessNewObject(HeapObject obj, ...@@ -274,9 +274,16 @@ HeapObject Deserializer::PostProcessNewObject(HeapObject obj,
} else if (obj.IsJSDataView()) { } else if (obj.IsJSDataView()) {
JSDataView data_view = JSDataView::cast(obj); JSDataView data_view = JSDataView::cast(obj);
JSArrayBuffer buffer = JSArrayBuffer::cast(data_view.buffer()); JSArrayBuffer buffer = JSArrayBuffer::cast(data_view.buffer());
data_view.set_data_pointer( void* backing_store = nullptr;
reinterpret_cast<uint8_t*>(buffer.backing_store()) + if (buffer.backing_store() != nullptr) {
data_view.byte_offset()); // The backing store of the JSArrayBuffer has not been correctly restored
// yet, as that may trigger GC. The backing_store field currently contains
// a numbered reference to an already deserialized backing store.
size_t store_index = reinterpret_cast<size_t>(buffer.backing_store());
backing_store = backing_stores_[store_index]->buffer_start();
}
data_view.set_data_pointer(reinterpret_cast<uint8_t*>(backing_store) +
data_view.byte_offset());
} else if (obj.IsJSTypedArray()) { } else if (obj.IsJSTypedArray()) {
JSTypedArray typed_array = JSTypedArray::cast(obj); JSTypedArray typed_array = JSTypedArray::cast(obj);
// Fixup typed array pointers. // Fixup typed array pointers.
...@@ -294,15 +301,9 @@ HeapObject Deserializer::PostProcessNewObject(HeapObject obj, ...@@ -294,15 +301,9 @@ HeapObject Deserializer::PostProcessNewObject(HeapObject obj,
} }
} else if (obj.IsJSArrayBuffer()) { } else if (obj.IsJSArrayBuffer()) {
JSArrayBuffer buffer = JSArrayBuffer::cast(obj); JSArrayBuffer buffer = JSArrayBuffer::cast(obj);
// Only fixup for the off-heap case. // Only fixup for the off-heap case. This may trigger GC.
if (buffer.backing_store() != nullptr) { if (buffer.backing_store() != nullptr) {
// Serializer writes backing store ref in |backing_store| field. new_off_heap_array_buffers_.push_back(handle(buffer, isolate_));
size_t store_index = reinterpret_cast<size_t>(buffer.backing_store());
auto backing_store = backing_stores_[store_index];
SharedFlag shared = backing_store && backing_store->is_shared()
? SharedFlag::kShared
: SharedFlag::kNotShared;
buffer.Setup(shared, backing_store);
} }
} else if (obj.IsBytecodeArray()) { } else if (obj.IsBytecodeArray()) {
// TODO(mythria): Remove these once we store the default values for these // TODO(mythria): Remove these once we store the default values for these
......
...@@ -76,6 +76,10 @@ class V8_EXPORT_PRIVATE Deserializer : public SerializerDeserializer { ...@@ -76,6 +76,10 @@ class V8_EXPORT_PRIVATE Deserializer : public SerializerDeserializer {
attached_objects_.push_back(attached_object); attached_objects_.push_back(attached_object);
} }
void CheckNoArrayBufferBackingStores() {
CHECK_EQ(new_off_heap_array_buffers().size(), 0);
}
Isolate* isolate() const { return isolate_; } Isolate* isolate() const { return isolate_; }
SnapshotByteSource* source() { return &source_; } SnapshotByteSource* source() { return &source_; }
const std::vector<AllocationSite>& new_allocation_sites() const { const std::vector<AllocationSite>& new_allocation_sites() const {
...@@ -98,6 +102,14 @@ class V8_EXPORT_PRIVATE Deserializer : public SerializerDeserializer { ...@@ -98,6 +102,14 @@ class V8_EXPORT_PRIVATE Deserializer : public SerializerDeserializer {
return new_scripts_; return new_scripts_;
} }
const std::vector<Handle<JSArrayBuffer>>& new_off_heap_array_buffers() const {
return new_off_heap_array_buffers_;
}
std::shared_ptr<BackingStore> backing_store(size_t i) {
return backing_stores_[i];
}
DeserializerAllocator* allocator() { return &allocator_; } DeserializerAllocator* allocator() { return &allocator_; }
bool deserializing_user_code() const { return deserializing_user_code_; } bool deserializing_user_code() const { return deserializing_user_code_; }
bool can_rehash() const { return can_rehash_; } bool can_rehash() const { return can_rehash_; }
...@@ -172,6 +184,7 @@ class V8_EXPORT_PRIVATE Deserializer : public SerializerDeserializer { ...@@ -172,6 +184,7 @@ class V8_EXPORT_PRIVATE Deserializer : public SerializerDeserializer {
std::vector<CallHandlerInfo> call_handler_infos_; std::vector<CallHandlerInfo> call_handler_infos_;
std::vector<Handle<String>> new_internalized_strings_; std::vector<Handle<String>> new_internalized_strings_;
std::vector<Handle<Script>> new_scripts_; std::vector<Handle<Script>> new_scripts_;
std::vector<Handle<JSArrayBuffer>> new_off_heap_array_buffers_;
std::vector<std::shared_ptr<BackingStore>> backing_stores_; std::vector<std::shared_ptr<BackingStore>> backing_stores_;
DeserializerAllocator allocator_; DeserializerAllocator allocator_;
......
...@@ -90,6 +90,15 @@ void ObjectDeserializer::CommitPostProcessedObjects() { ...@@ -90,6 +90,15 @@ void ObjectDeserializer::CommitPostProcessedObjects() {
MaybeObjectHandle::Weak(script)); MaybeObjectHandle::Weak(script));
heap->SetRootScriptList(*list); heap->SetRootScriptList(*list);
} }
for (Handle<JSArrayBuffer> buffer : new_off_heap_array_buffers()) {
// Serializer writes backing store ref in |backing_store| field.
size_t store_index = reinterpret_cast<size_t>(buffer->backing_store());
auto bs = backing_store(store_index);
SharedFlag shared =
bs && bs->is_shared() ? SharedFlag::kShared : SharedFlag::kNotShared;
buffer->Setup(shared, bs);
}
} }
void ObjectDeserializer::LinkAllocationSites() { void ObjectDeserializer::LinkAllocationSites() {
......
...@@ -37,27 +37,46 @@ MaybeHandle<Object> PartialDeserializer::Deserialize( ...@@ -37,27 +37,46 @@ MaybeHandle<Object> PartialDeserializer::Deserialize(
AddAttachedObject(global_proxy); AddAttachedObject(global_proxy);
DisallowHeapAllocation no_gc; Handle<Object> result;
// Keep track of the code space start and end pointers in case new {
// code objects were unserialized DisallowHeapAllocation no_gc;
CodeSpace* code_space = isolate->heap()->code_space(); // Keep track of the code space start and end pointers in case new
Address start_address = code_space->top(); // code objects were unserialized
Object root; CodeSpace* code_space = isolate->heap()->code_space();
VisitRootPointer(Root::kPartialSnapshotCache, nullptr, FullObjectSlot(&root)); Address start_address = code_space->top();
DeserializeDeferredObjects(); Object root;
DeserializeEmbedderFields(embedder_fields_deserializer); VisitRootPointer(Root::kPartialSnapshotCache, nullptr,
FullObjectSlot(&root));
allocator()->RegisterDeserializedObjectsForBlackAllocation(); DeserializeDeferredObjects();
DeserializeEmbedderFields(embedder_fields_deserializer);
// There's no code deserialized here. If this assert fires then that's
// changed and logging should be added to notify the profiler et al of the allocator()->RegisterDeserializedObjectsForBlackAllocation();
// new code, which also has to be flushed from instruction cache.
CHECK_EQ(start_address, code_space->top()); // There's no code deserialized here. If this assert fires then that's
// changed and logging should be added to notify the profiler et al of the
if (FLAG_rehash_snapshot && can_rehash()) Rehash(); // new code, which also has to be flushed from instruction cache.
LogNewMapEvents(); CHECK_EQ(start_address, code_space->top());
return Handle<Object>(root, isolate); if (FLAG_rehash_snapshot && can_rehash()) Rehash();
LogNewMapEvents();
result = handle(root, isolate);
}
SetupOffHeapArrayBufferBackingStores();
return result;
}
void PartialDeserializer::SetupOffHeapArrayBufferBackingStores() {
for (Handle<JSArrayBuffer> buffer : new_off_heap_array_buffers()) {
// Serializer writes backing store ref in |backing_store| field.
size_t store_index = reinterpret_cast<size_t>(buffer->backing_store());
auto bs = backing_store(store_index);
SharedFlag shared =
bs && bs->is_shared() ? SharedFlag::kShared : SharedFlag::kNotShared;
buffer->Setup(shared, bs);
}
} }
void PartialDeserializer::DeserializeEmbedderFields( void PartialDeserializer::DeserializeEmbedderFields(
......
...@@ -33,6 +33,8 @@ class V8_EXPORT_PRIVATE PartialDeserializer final : public Deserializer { ...@@ -33,6 +33,8 @@ class V8_EXPORT_PRIVATE PartialDeserializer final : public Deserializer {
void DeserializeEmbedderFields( void DeserializeEmbedderFields(
v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer); v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer);
void SetupOffHeapArrayBufferBackingStores();
}; };
} // namespace internal } // namespace internal
......
...@@ -51,6 +51,7 @@ void ReadOnlyDeserializer::DeserializeInto(Isolate* isolate) { ...@@ -51,6 +51,7 @@ void ReadOnlyDeserializer::DeserializeInto(Isolate* isolate) {
if (object->IsUndefined(roots)) break; if (object->IsUndefined(roots)) break;
} }
DeserializeDeferredObjects(); DeserializeDeferredObjects();
CheckNoArrayBufferBackingStores();
} }
if (FLAG_rehash_snapshot && can_rehash()) { if (FLAG_rehash_snapshot && can_rehash()) {
......
...@@ -44,6 +44,8 @@ void StartupDeserializer::DeserializeInto(Isolate* isolate) { ...@@ -44,6 +44,8 @@ void StartupDeserializer::DeserializeInto(Isolate* isolate) {
FlushICache(); FlushICache();
} }
CheckNoArrayBufferBackingStores();
isolate->heap()->set_native_contexts_list( isolate->heap()->set_native_contexts_list(
ReadOnlyRoots(isolate).undefined_value()); ReadOnlyRoots(isolate).undefined_value());
// The allocation site list is build during root iteration, but if no sites // The allocation site list is build during root iteration, but if no sites
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment