Commit 2083600a authored by hpayer@chromium.org's avatar hpayer@chromium.org

Remove all encountered weak maps from the list of weak collections when...

Remove all encountered weak maps from the list of weak collections when incremental marking is aborted.

BUG=399527
LOG=n
R=mstarzinger@chromium.org

Review URL: https://codereview.chromium.org/439233002

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@22844 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 1ed9516f
......@@ -258,6 +258,8 @@ void GCTracer::PrintNVP() const {
current_.scopes[Scope::MC_WEAKCOLLECTION_PROCESS]);
PrintF("weakcollection_clear=%.1f ",
current_.scopes[Scope::MC_WEAKCOLLECTION_CLEAR]);
PrintF("weakcollection_abort=%.1f ",
current_.scopes[Scope::MC_WEAKCOLLECTION_ABORT]);
PrintF("total_size_before=%" V8_PTR_PREFIX "d ", current_.start_object_size);
PrintF("total_size_after=%" V8_PTR_PREFIX "d ", current_.end_object_size);
......
......@@ -103,6 +103,7 @@ class GCTracer BASE_EMBEDDED {
MC_UPDATE_MISC_POINTERS,
MC_WEAKCOLLECTION_PROCESS,
MC_WEAKCOLLECTION_CLEAR,
MC_WEAKCOLLECTION_ABORT,
MC_FLUSH_CODE,
NUMBER_OF_SCOPES
};
......
......@@ -968,6 +968,7 @@ void MarkCompactCollector::Prepare() {
if (was_marked_incrementally_ && abort_incremental_marking_) {
heap()->incremental_marking()->Abort();
ClearMarkbits();
AbortWeakCollections();
AbortCompaction();
was_marked_incrementally_ = false;
}
......@@ -2800,6 +2801,20 @@ void MarkCompactCollector::ClearWeakCollections() {
}
void MarkCompactCollector::AbortWeakCollections() {
GCTracer::Scope gc_scope(heap()->tracer(),
GCTracer::Scope::MC_WEAKCOLLECTION_ABORT);
Object* weak_collection_obj = heap()->encountered_weak_collections();
while (weak_collection_obj != Smi::FromInt(0)) {
JSWeakCollection* weak_collection =
reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
weak_collection_obj = weak_collection->next();
weak_collection->set_next(heap()->undefined_value());
}
heap()->set_encountered_weak_collections(Smi::FromInt(0));
}
void MarkCompactCollector::RecordMigratedSlot(Object* value, Address slot) {
if (heap_->InNewSpace(value)) {
heap_->store_buffer()->Mark(slot);
......
......@@ -858,6 +858,10 @@ class MarkCompactCollector {
// The linked list of all encountered weak maps is destroyed.
void ClearWeakCollections();
// We have to remove all encountered weak maps from the list of weak
// collections when incremental marking is aborted.
void AbortWeakCollections();
// -----------------------------------------------------------------------
// Phase 2: Sweeping to clear mark bits and free non-live objects for
// a non-compacting collection.
......
......@@ -495,6 +495,26 @@ static inline void SimulateFullSpace(v8::internal::PagedSpace* space) {
}
// Helper function that simulates many incremental marking steps until
// marking is completed.
static inline void SimulateIncrementalMarking(i::Heap* heap) {
i::MarkCompactCollector* collector = heap->mark_compact_collector();
i::IncrementalMarking* marking = heap->incremental_marking();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
}
CHECK(marking->IsMarking() || marking->IsStopped());
if (marking->IsStopped()) {
marking->Start();
}
CHECK(marking->IsMarking());
while (!marking->IsComplete()) {
marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD);
}
CHECK(marking->IsComplete());
}
// Helper class for new allocations tracking and checking.
// To use checking of JS allocations tracking in a test,
// just create an instance of this class.
......
......@@ -40,25 +40,6 @@
using namespace v8::internal;
// Go through all incremental marking steps in one swoop.
static void SimulateIncrementalMarking() {
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
IncrementalMarking* marking = CcTest::heap()->incremental_marking();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
}
CHECK(marking->IsMarking() || marking->IsStopped());
if (marking->IsStopped()) {
marking->Start();
}
CHECK(marking->IsMarking());
while (!marking->IsComplete()) {
marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
}
CHECK(marking->IsComplete());
}
static void CheckMap(Map* map, int type, int instance_size) {
CHECK(map->IsHeapObject());
#ifdef DEBUG
......@@ -1223,7 +1204,7 @@ TEST(TestCodeFlushingIncremental) {
// Simulate several GCs that use incremental marking.
const int kAgingThreshold = 6;
for (int i = 0; i < kAgingThreshold; i++) {
SimulateIncrementalMarking();
SimulateIncrementalMarking(CcTest::heap());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
}
CHECK(!function->shared()->is_compiled() || function->IsOptimized());
......@@ -1237,7 +1218,7 @@ TEST(TestCodeFlushingIncremental) {
// Simulate several GCs that use incremental marking but make sure
// the loop breaks once the function is enqueued as a candidate.
for (int i = 0; i < kAgingThreshold; i++) {
SimulateIncrementalMarking();
SimulateIncrementalMarking(CcTest::heap());
if (!function->next_function_link()->IsUndefined()) break;
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
}
......@@ -1313,7 +1294,7 @@ TEST(TestCodeFlushingIncrementalScavenge) {
// Simulate incremental marking so that the functions are enqueued as
// code flushing candidates. Then kill one of the functions. Finally
// perform a scavenge while incremental marking is still running.
SimulateIncrementalMarking();
SimulateIncrementalMarking(CcTest::heap());
*function2.location() = NULL;
CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
......@@ -1367,7 +1348,7 @@ TEST(TestCodeFlushingIncrementalAbort) {
// Simulate incremental marking so that the function is enqueued as
// code flushing candidate.
SimulateIncrementalMarking();
SimulateIncrementalMarking(heap);
// Enable the debugger and add a breakpoint while incremental marking
// is running so that incremental marking aborts and code flushing is
......@@ -2758,7 +2739,7 @@ TEST(Regress1465) {
CompileRun("%DebugPrint(root);");
CHECK_EQ(transitions_count, transitions_before);
SimulateIncrementalMarking();
SimulateIncrementalMarking(CcTest::heap());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
// Count number of live transitions after marking. Note that one transition
......@@ -2923,7 +2904,7 @@ TEST(Regress2143a) {
"root.foo = 0;"
"root = new Object;");
SimulateIncrementalMarking();
SimulateIncrementalMarking(CcTest::heap());
// Compile a StoreIC that performs the prepared map transition. This
// will restart incremental marking and should make sure the root is
......@@ -2964,7 +2945,7 @@ TEST(Regress2143b) {
"root.foo = 0;"
"root = new Object;");
SimulateIncrementalMarking();
SimulateIncrementalMarking(CcTest::heap());
// Compile an optimized LStoreNamedField that performs the prepared
// map transition. This will restart incremental marking and should
......@@ -3167,7 +3148,7 @@ TEST(IncrementalMarkingClearsTypeFeedbackInfo) {
}
}
SimulateIncrementalMarking();
SimulateIncrementalMarking(CcTest::heap());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CHECK_EQ(expected_length, feedback_vector->length());
......@@ -3210,7 +3191,7 @@ TEST(IncrementalMarkingPreservesMonomorphicIC) {
Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
CHECK(ic_before->ic_state() == MONOMORPHIC);
SimulateIncrementalMarking();
SimulateIncrementalMarking(CcTest::heap());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
......@@ -3244,7 +3225,7 @@ TEST(IncrementalMarkingClearsMonomorphicIC) {
// Fire context dispose notification.
CcTest::isolate()->ContextDisposedNotification();
SimulateIncrementalMarking();
SimulateIncrementalMarking(CcTest::heap());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
......@@ -3285,7 +3266,7 @@ TEST(IncrementalMarkingClearsPolymorphicIC) {
// Fire context dispose notification.
CcTest::isolate()->ContextDisposedNotification();
SimulateIncrementalMarking();
SimulateIncrementalMarking(CcTest::heap());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
......@@ -3446,7 +3427,7 @@ TEST(Regress159140) {
// Simulate incremental marking so that the functions are enqueued as
// code flushing candidates. Then optimize one function. Finally
// finish the GC to complete code flushing.
SimulateIncrementalMarking();
SimulateIncrementalMarking(heap);
CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
heap->CollectAllGarbage(Heap::kNoGCFlags);
......@@ -3493,7 +3474,7 @@ TEST(Regress165495) {
// Simulate incremental marking so that unoptimized code is flushed
// even though it still is cached in the optimized code map.
SimulateIncrementalMarking();
SimulateIncrementalMarking(heap);
heap->CollectAllGarbage(Heap::kNoGCFlags);
// Make a new closure that will get code installed from the code map.
......@@ -3561,7 +3542,7 @@ TEST(Regress169209) {
}
// Simulate incremental marking and collect code flushing candidates.
SimulateIncrementalMarking();
SimulateIncrementalMarking(heap);
CHECK(shared1->code()->gc_metadata() != NULL);
// Optimize function and make sure the unoptimized code is replaced.
......@@ -3707,7 +3688,7 @@ TEST(Regress168801) {
// Simulate incremental marking so that unoptimized function is enqueued as a
// candidate for code flushing. The shared function info however will not be
// explicitly enqueued.
SimulateIncrementalMarking();
SimulateIncrementalMarking(heap);
// Now optimize the function so that it is taken off the candidate list.
{
......@@ -3764,7 +3745,7 @@ TEST(Regress173458) {
// Simulate incremental marking so that unoptimized function is enqueued as a
// candidate for code flushing. The shared function info however will not be
// explicitly enqueued.
SimulateIncrementalMarking();
SimulateIncrementalMarking(heap);
// Now enable the debugger which in turn will disable code flushing.
CHECK(isolate->debug()->Load());
......@@ -4012,7 +3993,7 @@ TEST(NoWeakHashTableLeakWithIncrementalMarking) {
if (!isolate->use_crankshaft()) return;
HandleScope outer_scope(heap->isolate());
for (int i = 0; i < 3; i++) {
SimulateIncrementalMarking();
SimulateIncrementalMarking(heap);
{
LocalContext context;
HandleScope scope(heap->isolate());
......
......@@ -255,3 +255,20 @@ TEST(Regress2060b) {
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage(Heap::kNoGCFlags);
}
TEST(Regress399527) {
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
{
HandleScope scope(isolate);
AllocateJSWeakMap(isolate);
SimulateIncrementalMarking(heap);
}
// The weak map is marked black here but leaving the handle scope will make
// the object unreachable. Aborting incremental marking will clear all the
// marking bits which makes the weak map garbage.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment