Commit e1b5db6c authored by machenbach's avatar machenbach Committed by Commit bot

Revert of Prevent stack overflow in the serializer/deserializer. (patchset #6...

Revert of Prevent stack overflow in the serializer/deserializer. (patchset #6 id:100001 of https://codereview.chromium.org/1125073004/)

Reason for revert:
[Sheriff] Breaks msan:
http://build.chromium.org/p/client.v8/builders/V8%20Linux%20-%20arm64%20-%20sim%20-%20MSAN/builds/2266

Original issue's description:
> Prevent stack overflow in the serializer/deserializer.
>
> We keep an eye on the recursion depth. Once it exceeds a limit, we serialize
> only the object header and size, but defer serializing the object body for
> after we have unwound the stack.
>
> R=mvstanton@chromium.org
>
> Committed: https://crrev.com/36b4a498d6614243454d5a182e4946b0dad24f0a
> Cr-Commit-Position: refs/heads/master@{#28385}

TBR=mvstanton@chromium.org,yangguo@chromium.org
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true

Review URL: https://codereview.chromium.org/1139113002

Cr-Commit-Position: refs/heads/master@{#28403}
parent 2a6a87d7
......@@ -379,7 +379,7 @@ StartupData V8::CreateSnapshotDataBlob(const char* custom_source) {
i::SnapshotByteSink context_sink;
i::PartialSerializer context_ser(internal_isolate, &ser, &context_sink);
context_ser.Serialize(&raw_context);
ser.SerializeWeakReferencesAndDeferred();
ser.SerializeWeakReferences();
result = i::Snapshot::CreateSnapshotBlob(ser, context_ser, metadata);
}
......
......@@ -2831,7 +2831,6 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from,
if (value->IsPropertyCell()) {
value = handle(PropertyCell::cast(*value)->value(), isolate());
}
if (value->IsTheHole()) continue;
PropertyDetails details = properties->DetailsAt(i);
DCHECK_EQ(kData, details.kind());
JSObject::AddProperty(to, key, value, details.attributes());
......
......@@ -14689,10 +14689,9 @@ Handle<Derived> HashTable<Derived, Shape, Key>::New(
PretenureFlag pretenure) {
DCHECK(0 <= at_least_space_for);
DCHECK(!capacity_option || base::bits::IsPowerOfTwo32(at_least_space_for));
int capacity =
(capacity_option == USE_CUSTOM_MINIMUM_CAPACITY)
int capacity = (capacity_option == USE_CUSTOM_MINIMUM_CAPACITY)
? at_least_space_for
: isolate->serializer_enabled() && isolate->bootstrapper()->IsActive()
: isolate->serializer_enabled()
? ComputeCapacityForSerialization(at_least_space_for)
: ComputeCapacity(at_least_space_for);
if (capacity > HashTable::kMaxCapacity) {
......
......@@ -560,7 +560,6 @@ void Deserializer::Deserialize(Isolate* isolate) {
isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
isolate_->heap()->RepairFreeListsAfterDeserialization();
isolate_->heap()->IterateWeakRoots(this, VISIT_ALL);
DeserializeDeferredObjects();
isolate_->heap()->set_native_contexts_list(
isolate_->heap()->undefined_value());
......@@ -610,7 +609,6 @@ MaybeHandle<Object> Deserializer::DeserializePartial(
Object* outdated_contexts;
VisitPointer(&root);
VisitPointer(&outdated_contexts);
DeserializeDeferredObjects();
// There's no code deserialized here. If this assert fires
// then that's changed and logging should be added to notify
......@@ -633,7 +631,6 @@ MaybeHandle<SharedFunctionInfo> Deserializer::DeserializeCode(
DisallowHeapAllocation no_gc;
Object* root;
VisitPointer(&root);
DeserializeDeferredObjects();
return Handle<SharedFunctionInfo>(SharedFunctionInfo::cast(root));
}
}
......@@ -655,22 +652,13 @@ void Deserializer::VisitPointers(Object** start, Object** end) {
}
void Deserializer::DeserializeDeferredObjects() {
for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) {
int space = code & kSpaceMask;
DCHECK(space <= kNumberOfSpaces);
DCHECK(code - space == kNewObject);
HeapObject* object = GetBackReferencedObject(space);
int size = source_.GetInt() << kPointerSizeLog2;
Address obj_address = object->address();
Object** start = reinterpret_cast<Object**>(obj_address + kPointerSize);
Object** end = reinterpret_cast<Object**>(obj_address + size);
bool filled = ReadData(start, end, space, obj_address);
CHECK(filled);
if (object->IsAllocationSite()) {
RelinkAllocationSite(AllocationSite::cast(object));
}
void Deserializer::RelinkAllocationSite(AllocationSite* site) {
if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) {
site->set_weak_next(isolate_->heap()->undefined_value());
} else {
site->set_weak_next(isolate_->heap()->allocation_sites_list());
}
isolate_->heap()->set_allocation_sites_list(site);
}
......@@ -705,8 +693,7 @@ class StringTableInsertionKey : public HashTableKey {
};
HeapObject* Deserializer::PostProcessNewObject(HeapObject* obj) {
DCHECK(deserializing_user_code());
HeapObject* Deserializer::ProcessNewObjectFromSerializedCode(HeapObject* obj) {
if (obj->IsString()) {
String* string = String::cast(obj);
// Uninitialize hash field as the hash seed may have changed.
......@@ -721,30 +708,11 @@ HeapObject* Deserializer::PostProcessNewObject(HeapObject* obj) {
}
} else if (obj->IsScript()) {
Script::cast(obj)->set_id(isolate_->heap()->NextScriptId());
} else {
DCHECK(CanBeDeferred(obj));
}
return obj;
}
void Deserializer::RelinkAllocationSite(AllocationSite* obj) {
DCHECK(obj->IsAllocationSite());
// Allocation sites are present in the snapshot, and must be linked into
// a list at deserialization time.
AllocationSite* site = AllocationSite::cast(obj);
// TODO(mvstanton): consider treating the heap()->allocation_sites_list()
// as a (weak) root. If this root is relocated correctly,
// RelinkAllocationSite() isn't necessary.
if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) {
site->set_weak_next(isolate_->heap()->undefined_value());
} else {
site->set_weak_next(isolate_->heap()->allocation_sites_list());
}
isolate_->heap()->set_allocation_sites_list(site);
}
HeapObject* Deserializer::GetBackReferencedObject(int space) {
HeapObject* obj;
BackReference back_reference(source_.GetInt());
......@@ -800,21 +768,24 @@ void Deserializer::ReadObject(int space_number, Object** write_back) {
if (FLAG_log_snapshot_positions) {
LOG(isolate_, SnapshotPositionEvent(address, source_.position()));
}
ReadData(current, limit, space_number, address);
if (ReadData(current, limit, space_number, address)) {
// Only post process if object content has not been deferred.
if (obj->IsAllocationSite()) {
RelinkAllocationSite(AllocationSite::cast(obj));
}
}
// TODO(mvstanton): consider treating the heap()->allocation_sites_list()
// as a (weak) root. If this root is relocated correctly,
// RelinkAllocationSite() isn't necessary.
if (obj->IsAllocationSite()) RelinkAllocationSite(AllocationSite::cast(obj));
if (deserializing_user_code()) obj = PostProcessNewObject(obj);
// Fix up strings from serialized user code.
if (deserializing_user_code()) obj = ProcessNewObjectFromSerializedCode(obj);
Object* write_back_obj = obj;
UnalignedCopy(write_back, &write_back_obj);
#ifdef DEBUG
if (obj->IsCode()) {
DCHECK(space_number == CODE_SPACE || space_number == LO_SPACE);
#ifdef VERIFY_HEAP
obj->ObjectVerify();
#endif // VERIFY_HEAP
} else {
DCHECK(space_number != CODE_SPACE);
}
......@@ -858,7 +829,7 @@ Address Deserializer::Allocate(int space_index, int size) {
}
bool Deserializer::ReadData(Object** current, Object** limit, int source_space,
void Deserializer::ReadData(Object** current, Object** limit, int source_space,
Address current_object_address) {
Isolate* const isolate = isolate_;
// Write barrier support costs around 1% in startup time. In fact there
......@@ -1115,14 +1086,6 @@ bool Deserializer::ReadData(Object** current, Object** limit, int source_space,
break;
}
case kDeferred: {
// Deferred can only occur right after the heap object header.
DCHECK(current == reinterpret_cast<Object**>(current_object_address +
kPointerSize));
current = limit;
return false;
}
case kSynchronize:
// If we get here then that indicates that you have a mismatch between
// the number of GC roots when serializing and deserializing.
......@@ -1229,7 +1192,6 @@ bool Deserializer::ReadData(Object** current, Object** limit, int source_space,
}
}
CHECK_EQ(limit, current);
return true;
}
......@@ -1238,7 +1200,6 @@ Serializer::Serializer(Isolate* isolate, SnapshotByteSink* sink)
sink_(sink),
external_reference_encoder_(isolate),
root_index_map_(isolate),
recursion_depth_(0),
code_address_map_(NULL),
large_objects_total_size_(0),
seen_large_objects_index_(0) {
......@@ -1314,16 +1275,6 @@ void Serializer::OutputStatistics(const char* name) {
}
void Serializer::SerializeDeferredObjects() {
while (deferred_objects_.length() > 0) {
HeapObject* obj = deferred_objects_.RemoveLast();
ObjectSerializer obj_serializer(this, obj, sink_, kPlain, kStartOfObject);
obj_serializer.SerializeDeferred();
}
sink_->Put(kSynchronize, "Finished with deferred objects");
}
void StartupSerializer::SerializeStrongReferences() {
Isolate* isolate = this->isolate();
// No active threads.
......@@ -1368,7 +1319,6 @@ void PartialSerializer::Serialize(Object** o) {
}
VisitPointer(o);
SerializeOutdatedContextsAsFixedArray();
SerializeDeferredObjects();
Pad();
}
......@@ -1392,10 +1342,10 @@ void PartialSerializer::SerializeOutdatedContextsAsFixedArray() {
sink_->Put(reinterpret_cast<byte*>(&length_smi)[i], "Byte");
}
for (int i = 0; i < length; i++) {
Context* context = outdated_contexts_[i];
BackReference back_reference = back_reference_map_.Lookup(context);
sink_->Put(kBackref + back_reference.space(), "BackRef");
PutBackReference(context, back_reference);
BackReference back_ref = outdated_contexts_[i];
DCHECK(BackReferenceIsAlreadyAllocated(back_ref));
sink_->Put(kBackref + back_ref.space(), "BackRef");
sink_->PutInt(back_ref.reference(), "BackRefValue");
}
}
}
......@@ -1558,7 +1508,10 @@ bool Serializer::SerializeKnownObject(HeapObject* obj, HowToCode how_to_code,
"BackRefWithSkip");
sink_->PutInt(skip, "BackRefSkipDistance");
}
PutBackReference(obj, back_reference);
DCHECK(BackReferenceIsAlreadyAllocated(back_reference));
sink_->PutInt(back_reference.reference(), "BackRefValue");
hot_objects_.Add(obj);
}
return true;
}
......@@ -1594,7 +1547,7 @@ void StartupSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
}
void StartupSerializer::SerializeWeakReferencesAndDeferred() {
void StartupSerializer::SerializeWeakReferences() {
// This phase comes right after the serialization (of the snapshot).
// After we have done the partial serialization the partial snapshot cache
// will contain some references needed to decode the partial snapshot. We
......@@ -1603,7 +1556,6 @@ void StartupSerializer::SerializeWeakReferencesAndDeferred() {
Object* undefined = isolate()->heap()->undefined_value();
VisitPointer(&undefined);
isolate()->heap()->IterateWeakRoots(this, VISIT_ALL);
SerializeDeferredObjects();
Pad();
}
......@@ -1636,13 +1588,6 @@ void Serializer::PutRoot(int root_index,
}
void Serializer::PutBackReference(HeapObject* object, BackReference reference) {
DCHECK(BackReferenceIsAlreadyAllocated(reference));
sink_->PutInt(reference.reference(), "BackRefValue");
hot_objects_.Add(object);
}
void PartialSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
WhereToPoint where_to_point, int skip) {
if (obj->IsMap()) {
......@@ -1696,7 +1641,9 @@ void PartialSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
Context::cast(obj)->global_object() == global_object_) {
// Context refers to the current global object. This reference will
// become outdated after deserialization.
outdated_contexts_.Add(Context::cast(obj));
BackReference back_reference = back_reference_map_.Lookup(obj);
DCHECK(back_reference.is_valid());
outdated_contexts_.Add(back_reference);
}
}
......@@ -1863,39 +1810,6 @@ void Serializer::ObjectSerializer::Serialize() {
CHECK_EQ(0, bytes_processed_so_far_);
bytes_processed_so_far_ = kPointerSize;
RecursionScope recursion(serializer_);
// Objects that are immediately post processed during deserialization
// cannot be deferred, since post processing requires the object content.
if (recursion.ExceedsMaximum() && CanBeDeferred(object_)) {
serializer_->QueueDeferredObject(object_);
sink_->Put(kDeferred, "Deferring object content");
return;
}
object_->IterateBody(map->instance_type(), size, this);
OutputRawData(object_->address() + size);
}
void Serializer::ObjectSerializer::SerializeDeferred() {
if (FLAG_trace_serializer) {
PrintF(" Encoding deferred heap object: ");
object_->ShortPrint();
PrintF("\n");
}
int size = object_->Size();
Map* map = object_->map();
BackReference reference = serializer_->back_reference_map()->Lookup(object_);
// Serialize the rest of the object.
CHECK_EQ(0, bytes_processed_so_far_);
bytes_processed_so_far_ = kPointerSize;
sink_->Put(kNewObject + reference.space(), "deferred object");
serializer_->PutBackReference(object_, reference);
sink_->PutInt(size >> kPointerSizeLog2, "deferred object size");
object_->IterateBody(map->instance_type(), size, this);
OutputRawData(object_->address() + size);
}
......@@ -2220,7 +2134,6 @@ ScriptData* CodeSerializer::Serialize(Isolate* isolate,
DisallowHeapAllocation no_gc;
Object** location = Handle<Object>::cast(info).location();
cs.VisitPointer(location);
cs.SerializeDeferredObjects();
cs.Pad();
SerializedCodeData data(sink.data(), cs);
......
......@@ -306,10 +306,6 @@ class SerializerDeserializer: public ObjectVisitor {
static const int kNumberOfSpaces = LAST_SPACE + 1;
protected:
static bool CanBeDeferred(HeapObject* o) {
return !o->IsString() && !o->IsScript();
}
// ---------- byte code range 0x00..0x7f ----------
// Byte codes in this range represent Where, HowToCode and WhereToPoint.
// Where the pointed-to object can be found:
......@@ -377,8 +373,6 @@ class SerializerDeserializer: public ObjectVisitor {
static const int kNop = 0x3d;
// Move to next reserved chunk.
static const int kNextChunk = 0x3e;
// Deferring object content.
static const int kDeferred = 0x3f;
// A tag emitted at strategic points in the snapshot to delineate sections.
// If the deserializer does not find these at the expected moments then it
// is an indication that the snapshot and the VM do not fit together.
......@@ -559,22 +553,22 @@ class Deserializer: public SerializerDeserializer {
memcpy(dest, src, sizeof(*src));
}
void DeserializeDeferredObjects();
// Allocation sites are present in the snapshot, and must be linked into
// a list at deserialization time.
void RelinkAllocationSite(AllocationSite* site);
// Fills in some heap data in an area from start to end (non-inclusive). The
// space id is used for the write barrier. The object_address is the address
// of the object we are writing into, or NULL if we are not writing into an
// object, i.e. if we are writing a series of tagged values that are not on
// the heap. Return false if the object content has been deferred.
bool ReadData(Object** start, Object** end, int space,
// the heap.
void ReadData(Object** start, Object** end, int space,
Address object_address);
void ReadObject(int space_number, Object** write_back);
Address Allocate(int space_index, int size);
// Special handling for serialized code like hooking up internalized strings.
HeapObject* PostProcessNewObject(HeapObject* obj);
void RelinkAllocationSite(AllocationSite* obj);
HeapObject* ProcessNewObjectFromSerializedCode(HeapObject* obj);
// This returns the address of an object that has been described in the
// snapshot by chunk index and offset.
......@@ -618,8 +612,6 @@ class Serializer : public SerializerDeserializer {
void EncodeReservations(List<SerializedData::Reservation>* out) const;
void SerializeDeferredObjects();
Isolate* isolate() const { return isolate_; }
BackReferenceMap* back_reference_map() { return &back_reference_map_; }
......@@ -642,7 +634,6 @@ class Serializer : public SerializerDeserializer {
is_code_object_(o->IsCode()),
code_has_been_output_(false) {}
void Serialize();
void SerializeDeferred();
void VisitPointers(Object** start, Object** end);
void VisitEmbeddedPointer(RelocInfo* target);
void VisitExternalReference(Address* p);
......@@ -684,29 +675,12 @@ class Serializer : public SerializerDeserializer {
bool code_has_been_output_;
};
class RecursionScope {
public:
explicit RecursionScope(Serializer* serializer) : serializer_(serializer) {
serializer_->recursion_depth_++;
}
~RecursionScope() { serializer_->recursion_depth_--; }
bool ExceedsMaximum() {
return serializer_->recursion_depth_ >= kMaxRecursionDepth;
}
private:
static const int kMaxRecursionDepth = 32;
Serializer* serializer_;
};
virtual void SerializeObject(HeapObject* o, HowToCode how_to_code,
WhereToPoint where_to_point, int skip) = 0;
void PutRoot(int index, HeapObject* object, HowToCode how, WhereToPoint where,
int skip);
void PutBackReference(HeapObject* object, BackReference reference);
// Returns true if the object was successfully serialized.
bool SerializeKnownObject(HeapObject* obj, HowToCode how_to_code,
WhereToPoint where_to_point, int skip);
......@@ -748,11 +722,6 @@ class Serializer : public SerializerDeserializer {
SnapshotByteSink* sink() const { return sink_; }
void QueueDeferredObject(HeapObject* obj) {
DCHECK(back_reference_map_.Lookup(obj).is_valid());
deferred_objects_.Add(obj);
}
void OutputStatistics(const char* name);
Isolate* isolate_;
......@@ -763,11 +732,8 @@ class Serializer : public SerializerDeserializer {
BackReferenceMap back_reference_map_;
RootIndexMap root_index_map_;
int recursion_depth_;
friend class Deserializer;
friend class ObjectSerializer;
friend class RecursionScope;
friend class SnapshotData;
private:
......@@ -786,9 +752,6 @@ class Serializer : public SerializerDeserializer {
List<byte> code_buffer_;
// To handle stack overflow.
List<HeapObject*> deferred_objects_;
#ifdef OBJECT_PRINT
static const int kInstanceTypes = 256;
int* instance_type_count_;
......@@ -834,7 +797,7 @@ class PartialSerializer : public Serializer {
void SerializeOutdatedContextsAsFixedArray();
Serializer* startup_serializer_;
List<Context*> outdated_contexts_;
List<BackReference> outdated_contexts_;
Object* global_object_;
PartialCacheIndexMap partial_cache_index_map_;
DISALLOW_COPY_AND_ASSIGN(PartialSerializer);
......@@ -866,10 +829,11 @@ class StartupSerializer : public Serializer {
virtual void SerializeStrongReferences();
virtual void SerializeObject(HeapObject* o, HowToCode how_to_code,
WhereToPoint where_to_point, int skip) override;
void SerializeWeakReferencesAndDeferred();
void SerializeWeakReferences();
void Serialize() {
SerializeStrongReferences();
SerializeWeakReferencesAndDeferred();
SerializeWeakReferences();
Pad();
}
private:
......
......@@ -329,7 +329,7 @@ UNINITIALIZED_TEST(PartialSerialization) {
&partial_sink);
partial_serializer.Serialize(&raw_foo);
startup_serializer.SerializeWeakReferencesAndDeferred();
startup_serializer.SerializeWeakReferences();
SnapshotData startup_snapshot(startup_serializer);
SnapshotData partial_snapshot(partial_serializer);
......@@ -447,7 +447,7 @@ UNINITIALIZED_TEST(ContextSerialization) {
PartialSerializer partial_serializer(isolate, &startup_serializer,
&partial_sink);
partial_serializer.Serialize(&raw_context);
startup_serializer.SerializeWeakReferencesAndDeferred();
startup_serializer.SerializeWeakReferences();
SnapshotData startup_snapshot(startup_serializer);
SnapshotData partial_snapshot(partial_serializer);
......@@ -582,7 +582,7 @@ UNINITIALIZED_TEST(CustomContextSerialization) {
PartialSerializer partial_serializer(isolate, &startup_serializer,
&partial_sink);
partial_serializer.Serialize(&raw_context);
startup_serializer.SerializeWeakReferencesAndDeferred();
startup_serializer.SerializeWeakReferences();
SnapshotData startup_snapshot(startup_serializer);
SnapshotData partial_snapshot(partial_serializer);
......@@ -738,44 +738,6 @@ TEST(PerIsolateSnapshotBlobsWithLocker) {
}
TEST(SnapshotBlobsStackOverflow) {
DisableTurbofan();
const char* source =
"var a = [0];"
"var b = a;"
"for (var i = 0; i < 10000; i++) {"
" var c = [i];"
" b.push(c);"
" b.push(c);"
" b = c;"
"}";
v8::StartupData data = v8::V8::CreateSnapshotDataBlob(source);
v8::Isolate::CreateParams params;
params.snapshot_blob = &data;
params.array_buffer_allocator = CcTest::array_buffer_allocator();
v8::Isolate* isolate = v8::Isolate::New(params);
{
v8::Isolate::Scope i_scope(isolate);
v8::HandleScope h_scope(isolate);
v8::Local<v8::Context> context = v8::Context::New(isolate);
delete[] data.data; // We can dispose of the snapshot blob now.
v8::Context::Scope c_scope(context);
const char* test =
"var sum = 0;"
"while (a) {"
" sum += a[0];"
" a = a[1];"
"}"
"sum";
CHECK_EQ(9999 * 5000, CompileRun(test)->ToInt32(isolate)->Int32Value());
}
isolate->Dispose();
}
TEST(TestThatAlwaysSucceeds) {
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment