Commit 6bfee50e authored by jkummerow's avatar jkummerow Committed by Commit bot

[deserializer] Make large object deserialization GC safe

When black allocation is turned on at deserialization time, then
slots in deserialized objects have to be visited by the incremental
marker. For spaces with reservations, this has always been done; for
large object space with its special handling, this patch adds it.

Additionally, we must ensure that no incremental steps that might
cause incremental marking to finish are performed while there is an
AlwaysAllocateScope around.

BUG=chromium:718859

Review-Url: https://codereview.chromium.org/2868103002
Cr-Commit-Position: refs/heads/master@{#45231}
parent 61101232
......@@ -4238,34 +4238,39 @@ void Heap::FinalizeIncrementalMarkingIfComplete(
}
}
void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) {
void Heap::RegisterDeserializedObjectsForBlackAllocation(
Reservation* reservations, List<HeapObject*>* large_objects) {
// TODO(hpayer): We do not have to iterate reservations on black objects
// for marking. We just have to execute the special visiting side effect
// code that adds objects to global data structures, e.g. for array buffers.
if (incremental_marking()->black_allocation()) {
// Iterate black objects in old space, code space, map space, and large
// object space for side effects.
for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) {
const Heap::Reservation& res = reservations[i];
for (auto& chunk : res) {
Address addr = chunk.start;
while (addr < chunk.end) {
HeapObject* obj = HeapObject::FromAddress(addr);
// There might be grey objects due to black to grey transitions in
// incremental marking. E.g. see VisitNativeContextIncremental.
DCHECK(
ObjectMarking::IsBlackOrGrey(obj, MarkingState::Internal(obj)));
if (ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))) {
incremental_marking()->IterateBlackObject(obj);
}
addr += obj->Size();
if (!incremental_marking()->black_allocation()) return;
// Iterate black objects in old space, code space, map space, and large
// object space for side effects.
for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) {
const Heap::Reservation& res = reservations[i];
for (auto& chunk : res) {
Address addr = chunk.start;
while (addr < chunk.end) {
HeapObject* obj = HeapObject::FromAddress(addr);
// There might be grey objects due to black to grey transitions in
// incremental marking. E.g. see VisitNativeContextIncremental.
DCHECK(ObjectMarking::IsBlackOrGrey(obj, MarkingState::Internal(obj)));
if (ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))) {
incremental_marking()->IterateBlackObject(obj);
}
addr += obj->Size();
}
}
// We potentially deserialized wrappers which require registering with the
// embedder as the marker will not find them.
local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
}
// We potentially deserialized wrappers which require registering with the
// embedder as the marker will not find them.
local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
// Large object space doesn't use reservations, so it needs custom handling.
for (HeapObject* object : *large_objects) {
incremental_marking()->IterateBlackObject(object);
}
}
......@@ -5508,7 +5513,9 @@ bool Heap::ShouldExpandOldGenerationOnSlowAllocation() {
// The kSoftLimit means that incremental marking should be started soon.
// The kHardLimit means that incremental marking should be started immediately.
Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() {
if (!incremental_marking()->CanBeActivated() ||
// Code using an AlwaysAllocateScope assumes that the GC state does not
// change; that implies that no marking steps must be performed.
if (!incremental_marking()->CanBeActivated() || always_allocate() ||
PromotedSpaceSizeOfObjects() <=
IncrementalMarking::kActivationThreshold) {
// Incremental marking is disabled or it is too early to start.
......
......@@ -1245,7 +1245,8 @@ class Heap {
void FinalizeIncrementalMarkingIfComplete(GarbageCollectionReason gc_reason);
void RegisterReservationsForBlackAllocation(Reservation* reservations);
void RegisterDeserializedObjectsForBlackAllocation(
Reservation* reservations, List<HeapObject*>* large_objects);
IncrementalMarking* incremental_marking() { return incremental_marking_; }
......
......@@ -1118,8 +1118,10 @@ size_t IncrementalMarking::StepSizeToMakeProgress() {
}
void IncrementalMarking::AdvanceIncrementalMarkingOnAllocation() {
// Code using an AlwaysAllocateScope assumes that the GC state does not
// change; that implies that no marking steps must be performed.
if (heap_->gc_state() != Heap::NOT_IN_GC || !FLAG_incremental_marking ||
(state_ != SWEEPING && state_ != MARKING)) {
(state_ != SWEEPING && state_ != MARKING) || heap_->always_allocate()) {
return;
}
......
......@@ -148,7 +148,8 @@ MaybeHandle<Object> Deserializer::DeserializePartial(
DeserializeDeferredObjects();
DeserializeEmbedderFields(embedder_fields_deserializer);
isolate->heap()->RegisterReservationsForBlackAllocation(reservations_);
isolate->heap()->RegisterDeserializedObjectsForBlackAllocation(
reservations_, &deserialized_large_objects_);
// There's no code deserialized here. If this assert fires then that's
// changed and logging should be added to notify the profiler et al of the
......@@ -172,7 +173,8 @@ MaybeHandle<HeapObject> Deserializer::DeserializeObject(Isolate* isolate) {
DeserializeDeferredObjects();
FlushICacheForNewCodeObjectsAndRecordEmbeddedObjects();
result = Handle<HeapObject>(HeapObject::cast(root));
isolate->heap()->RegisterReservationsForBlackAllocation(reservations_);
isolate->heap()->RegisterDeserializedObjectsForBlackAllocation(
reservations_, &deserialized_large_objects_);
}
CommitPostProcessedObjects(isolate);
return scope.CloseAndEscape(result);
......
......@@ -1126,6 +1126,87 @@ TEST(CodeSerializerLargeCodeObject) {
source.Dispose();
}
TEST(CodeSerializerLargeCodeObjectWithIncrementalMarking) {
FLAG_serialize_toplevel = true;
FLAG_always_opt = false;
// This test relies on (full-codegen) code objects going to large object
// space. Once FCG goes away, it must either be redesigned (to put some
// other large deserialized object into LO space), or it can be deleted.
FLAG_ignition = false;
const char* filter_flag = "--turbo-filter=NOTHING";
FlagList::SetFlagsFromString(filter_flag, StrLength(filter_flag));
FLAG_black_allocation = true;
FLAG_manual_evacuation_candidates_selection = true;
LocalContext context;
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
isolate->compilation_cache()->Disable(); // Disable same-isolate code cache.
v8::HandleScope scope(CcTest::isolate());
Vector<const uint8_t> source = ConstructSource(
STATIC_CHAR_VECTOR("var j=1; if (j == 0) {"),
STATIC_CHAR_VECTOR("for (var i = 0; i < Object.prototype; i++);"),
STATIC_CHAR_VECTOR("} j=7; var s = 'happy_hippo'; j"), 2100);
Handle<String> source_str =
isolate->factory()->NewStringFromOneByte(source).ToHandleChecked();
// Create a string on an evacuation candidate in old space.
Handle<String> moving_object;
Page* ec_page;
{
AlwaysAllocateScope always_allocate(isolate);
heap::SimulateFullSpace(heap->old_space());
moving_object = isolate->factory()->InternalizeString(
isolate->factory()->NewStringFromAsciiChecked("happy_hippo"));
ec_page = Page::FromAddress(moving_object->address());
}
Handle<JSObject> global(isolate->context()->global_object());
ScriptData* cache = NULL;
Handle<SharedFunctionInfo> orig =
CompileScript(isolate, source_str, Handle<String>(), &cache,
v8::ScriptCompiler::kProduceCodeCache);
CHECK(heap->InSpace(orig->abstract_code(), LO_SPACE));
// Pretend that incremental marking is on when deserialization begins.
heap::ForceEvacuationCandidate(ec_page);
heap::SimulateIncrementalMarking(heap, false);
IncrementalMarking* marking = heap->incremental_marking();
marking->StartBlackAllocationForTesting();
CHECK(marking->IsCompacting());
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*moving_object));
Handle<SharedFunctionInfo> copy;
{
DisallowCompilation no_compile_expected(isolate);
copy = CompileScript(isolate, source_str, Handle<String>(), &cache,
v8::ScriptCompiler::kConsumeCodeCache);
}
CHECK_NE(*orig, *copy);
// We should have missed a write barrier. Complete incremental marking
// to flush out the bug.
heap::SimulateIncrementalMarking(heap, true);
CcTest::CollectAllGarbage();
Handle<JSFunction> copy_fun =
isolate->factory()->NewFunctionFromSharedFunctionInfo(
copy, isolate->native_context());
Handle<Object> copy_result =
Execution::Call(isolate, copy_fun, global, 0, NULL).ToHandleChecked();
int result_int;
CHECK(copy_result->ToInt32(&result_int));
CHECK_EQ(7, result_int);
delete cache;
source.Dispose();
}
TEST(CodeSerializerLargeStrings) {
FLAG_serialize_toplevel = true;
LocalContext context;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment