Commit 83786cb4 authored by Yang Guo's avatar Yang Guo Committed by Commit Bot

Delay setting up deserialized JSArrayBuffer

Setting up JSArrayBuffer may trigger GC. Delay this until we
are done with deserialization.

R=ulan@chromium.org

Bug: chromium:1033395
Change-Id: I6c79bc47421bc2662dc1906534fc8e820c351ced
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1965580Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Yang Guo <yangguo@chromium.org>
Cr-Commit-Position: refs/heads/master@{#65441}
parent f47a8a88
......@@ -9257,6 +9257,7 @@ class V8_EXPORT Isolate {
internal::Address* GetDataFromSnapshotOnce(size_t index);
void ReportExternalAllocationLimitReached();
void CheckMemoryPressure();
void CheckGarbageCollectionIsAllowed();
};
class V8_EXPORT StartupData {
......@@ -11663,6 +11664,9 @@ MaybeLocal<T> Isolate::GetDataFromSnapshotOnce(size_t index) {
int64_t Isolate::AdjustAmountOfExternalAllocatedMemory(
int64_t change_in_bytes) {
#ifdef V8_ENABLE_CHECKS
CheckGarbageCollectionIsAllowed();
#endif
typedef internal::Internals I;
constexpr int64_t kMemoryReducerActivationLimit = 32 * 1024 * 1024;
int64_t* external_memory = reinterpret_cast<int64_t*>(
......
......@@ -8067,6 +8067,10 @@ void Isolate::CheckMemoryPressure() {
heap->CheckMemoryPressure();
}
void Isolate::CheckGarbageCollectionIsAllowed() {
DCHECK(i::AllowHeapAllocation::IsAllowed());
}
HeapProfiler* Isolate::GetHeapProfiler() {
i::HeapProfiler* heap_profiler =
reinterpret_cast<i::Isolate*>(this)->heap_profiler();
......
......@@ -274,8 +274,15 @@ HeapObject Deserializer::PostProcessNewObject(HeapObject obj,
} else if (obj.IsJSDataView()) {
JSDataView data_view = JSDataView::cast(obj);
JSArrayBuffer buffer = JSArrayBuffer::cast(data_view.buffer());
data_view.set_data_pointer(
reinterpret_cast<uint8_t*>(buffer.backing_store()) +
void* backing_store = nullptr;
if (buffer.backing_store() != nullptr) {
// The backing store of the JSArrayBuffer has not been correctly restored
// yet, as that may trigger GC. The backing_store field currently contains
// a numbered reference to an already deserialized backing store.
size_t store_index = reinterpret_cast<size_t>(buffer.backing_store());
backing_store = backing_stores_[store_index]->buffer_start();
}
data_view.set_data_pointer(reinterpret_cast<uint8_t*>(backing_store) +
data_view.byte_offset());
} else if (obj.IsJSTypedArray()) {
JSTypedArray typed_array = JSTypedArray::cast(obj);
......@@ -294,15 +301,9 @@ HeapObject Deserializer::PostProcessNewObject(HeapObject obj,
}
} else if (obj.IsJSArrayBuffer()) {
JSArrayBuffer buffer = JSArrayBuffer::cast(obj);
// Only fixup for the off-heap case.
// Only fixup for the off-heap case. This may trigger GC.
if (buffer.backing_store() != nullptr) {
// Serializer writes backing store ref in |backing_store| field.
size_t store_index = reinterpret_cast<size_t>(buffer.backing_store());
auto backing_store = backing_stores_[store_index];
SharedFlag shared = backing_store && backing_store->is_shared()
? SharedFlag::kShared
: SharedFlag::kNotShared;
buffer.Setup(shared, backing_store);
new_off_heap_array_buffers_.push_back(handle(buffer, isolate_));
}
} else if (obj.IsBytecodeArray()) {
// TODO(mythria): Remove these once we store the default values for these
......
......@@ -76,6 +76,10 @@ class V8_EXPORT_PRIVATE Deserializer : public SerializerDeserializer {
attached_objects_.push_back(attached_object);
}
void CheckNoArrayBufferBackingStores() {
CHECK_EQ(new_off_heap_array_buffers().size(), 0);
}
Isolate* isolate() const { return isolate_; }
SnapshotByteSource* source() { return &source_; }
const std::vector<AllocationSite>& new_allocation_sites() const {
......@@ -98,6 +102,14 @@ class V8_EXPORT_PRIVATE Deserializer : public SerializerDeserializer {
return new_scripts_;
}
const std::vector<Handle<JSArrayBuffer>>& new_off_heap_array_buffers() const {
return new_off_heap_array_buffers_;
}
std::shared_ptr<BackingStore> backing_store(size_t i) {
return backing_stores_[i];
}
DeserializerAllocator* allocator() { return &allocator_; }
bool deserializing_user_code() const { return deserializing_user_code_; }
bool can_rehash() const { return can_rehash_; }
......@@ -172,6 +184,7 @@ class V8_EXPORT_PRIVATE Deserializer : public SerializerDeserializer {
std::vector<CallHandlerInfo> call_handler_infos_;
std::vector<Handle<String>> new_internalized_strings_;
std::vector<Handle<Script>> new_scripts_;
std::vector<Handle<JSArrayBuffer>> new_off_heap_array_buffers_;
std::vector<std::shared_ptr<BackingStore>> backing_stores_;
DeserializerAllocator allocator_;
......
......@@ -90,6 +90,15 @@ void ObjectDeserializer::CommitPostProcessedObjects() {
MaybeObjectHandle::Weak(script));
heap->SetRootScriptList(*list);
}
for (Handle<JSArrayBuffer> buffer : new_off_heap_array_buffers()) {
// Serializer writes backing store ref in |backing_store| field.
size_t store_index = reinterpret_cast<size_t>(buffer->backing_store());
auto bs = backing_store(store_index);
SharedFlag shared =
bs && bs->is_shared() ? SharedFlag::kShared : SharedFlag::kNotShared;
buffer->Setup(shared, bs);
}
}
void ObjectDeserializer::LinkAllocationSites() {
......
......@@ -37,13 +37,16 @@ MaybeHandle<Object> PartialDeserializer::Deserialize(
AddAttachedObject(global_proxy);
Handle<Object> result;
{
DisallowHeapAllocation no_gc;
// Keep track of the code space start and end pointers in case new
// code objects were unserialized
CodeSpace* code_space = isolate->heap()->code_space();
Address start_address = code_space->top();
Object root;
VisitRootPointer(Root::kPartialSnapshotCache, nullptr, FullObjectSlot(&root));
VisitRootPointer(Root::kPartialSnapshotCache, nullptr,
FullObjectSlot(&root));
DeserializeDeferredObjects();
DeserializeEmbedderFields(embedder_fields_deserializer);
......@@ -57,7 +60,23 @@ MaybeHandle<Object> PartialDeserializer::Deserialize(
if (FLAG_rehash_snapshot && can_rehash()) Rehash();
LogNewMapEvents();
return Handle<Object>(root, isolate);
result = handle(root, isolate);
}
SetupOffHeapArrayBufferBackingStores();
return result;
}
void PartialDeserializer::SetupOffHeapArrayBufferBackingStores() {
for (Handle<JSArrayBuffer> buffer : new_off_heap_array_buffers()) {
// Serializer writes backing store ref in |backing_store| field.
size_t store_index = reinterpret_cast<size_t>(buffer->backing_store());
auto bs = backing_store(store_index);
SharedFlag shared =
bs && bs->is_shared() ? SharedFlag::kShared : SharedFlag::kNotShared;
buffer->Setup(shared, bs);
}
}
void PartialDeserializer::DeserializeEmbedderFields(
......
......@@ -33,6 +33,8 @@ class V8_EXPORT_PRIVATE PartialDeserializer final : public Deserializer {
void DeserializeEmbedderFields(
v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer);
void SetupOffHeapArrayBufferBackingStores();
};
} // namespace internal
......
......@@ -51,6 +51,7 @@ void ReadOnlyDeserializer::DeserializeInto(Isolate* isolate) {
if (object->IsUndefined(roots)) break;
}
DeserializeDeferredObjects();
CheckNoArrayBufferBackingStores();
}
if (FLAG_rehash_snapshot && can_rehash()) {
......
......@@ -44,6 +44,8 @@ void StartupDeserializer::DeserializeInto(Isolate* isolate) {
FlushICache();
}
CheckNoArrayBufferBackingStores();
isolate->heap()->set_native_contexts_list(
ReadOnlyRoots(isolate).undefined_value());
// The allocation site list is build during root iteration, but if no sites
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment