Commit 58d0682f authored by jarin@chromium.org's avatar jarin@chromium.org

Add filler at the new space top when forcing scavenge.

We only seem to force scavenge in our cctest test suite, so this is
expected to fix some flakiness in our tests, but it will not
improve stability of v8 itself.

R=hpayer@chromium.org
BUG=

Review URL: https://codereview.chromium.org/167423004

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@19460 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent ff0dbe37
......@@ -773,6 +773,21 @@ void Heap::CollectAllAvailableGarbage(const char* gc_reason) {
}
void Heap::EnsureFillerObjectAtTop() {
// There may be an allocation memento behind every object in new space.
// If we evacuate a not full new space or if we are on the last page of
// the new space, then there may be uninitialized memory behind the top
// pointer of the new space page. We store a filler object there to
// identify the unused space.
Address from_top = new_space_.top();
Address from_limit = new_space_.limit();
if (from_top < from_limit) {
int remaining_in_page = static_cast<int>(from_limit - from_top);
CreateFillerObjectAt(from_top, remaining_in_page);
}
}
bool Heap::CollectGarbage(GarbageCollector collector,
const char* gc_reason,
const char* collector_reason,
......@@ -789,17 +804,7 @@ bool Heap::CollectGarbage(GarbageCollector collector,
allocation_timeout_ = Max(6, FLAG_gc_interval);
#endif
// There may be an allocation memento behind every object in new space.
// If we evacuate a not full new space or if we are on the last page of
// the new space, then there may be uninitialized memory behind the top
// pointer of the new space page. We store a filler object there to
// identify the unused space.
Address from_top = new_space_.top();
Address from_limit = new_space_.limit();
if (from_top < from_limit) {
int remaining_in_page = static_cast<int>(from_limit - from_top);
CreateFillerObjectAt(from_top, remaining_in_page);
}
EnsureFillerObjectAtTop();
if (collector == SCAVENGER && !incremental_marking()->IsStopped()) {
if (FLAG_trace_incremental_marking) {
......@@ -873,16 +878,6 @@ int Heap::NotifyContextDisposed() {
}
void Heap::PerformScavenge() {
GCTracer tracer(this, NULL, NULL);
if (incremental_marking()->IsStopped()) {
PerformGarbageCollection(SCAVENGER, &tracer);
} else {
PerformGarbageCollection(MARK_COMPACTOR, &tracer);
}
}
void Heap::MoveElements(FixedArray* array,
int dst_index,
int src_index,
......
......@@ -1259,10 +1259,6 @@ class Heap {
// Notify the heap that a context has been disposed.
int NotifyContextDisposed();
// Utility to invoke the scavenger. This is needed in test code to
// ensure correct callback for weak global handles.
void PerformScavenge();
inline void increment_scan_on_scavenge_pages() {
scan_on_scavenge_pages_++;
if (FLAG_gc_verbose) {
......@@ -2135,6 +2131,11 @@ class Heap {
GarbageCollector SelectGarbageCollector(AllocationSpace space,
const char** reason);
// Make sure there is a filler value behind the top of the new space
// so that the GC does not confuse some unintialized/stale memory
// with the allocation memento of the object at the top
void EnsureFillerObjectAtTop();
// Performs garbage collection operation.
// Returns whether there is a chance that another major GC could
// collect more garbage.
......
......@@ -7099,14 +7099,14 @@ THREADED_TEST(IndependentWeakHandle) {
object_a.handle.MarkIndependent();
object_b.handle.MarkIndependent();
CHECK(object_b.handle.IsIndependent());
CcTest::heap()->PerformScavenge();
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
CHECK(object_a.flag);
CHECK(object_b.flag);
}
static void InvokeScavenge() {
CcTest::heap()->PerformScavenge();
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
}
......@@ -7188,7 +7188,7 @@ THREADED_TEST(IndependentHandleRevival) {
object.flag = false;
object.handle.SetWeak(&object, &RevivingCallback);
object.handle.MarkIndependent();
CcTest::heap()->PerformScavenge();
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
CHECK(object.flag);
CcTest::heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
{
......@@ -17650,7 +17650,7 @@ TEST(Regress2107) {
TEST(Regress2333) {
LocalContext env;
for (int i = 0; i < 3; i++) {
CcTest::heap()->PerformScavenge();
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
}
}
......
......@@ -433,7 +433,7 @@ TEST(WeakGlobalHandlesScavenge) {
&TestWeakGlobalHandleCallback);
// Scavenge treats weak pointers as normal roots.
heap->PerformScavenge();
heap->CollectGarbage(NEW_SPACE);
CHECK((*h1)->IsString());
CHECK((*h2)->IsHeapNumber());
......@@ -518,7 +518,7 @@ TEST(DeleteWeakGlobalHandle) {
&TestWeakGlobalHandleCallback);
// Scanvenge does not recognize weak reference.
heap->PerformScavenge();
heap->CollectGarbage(NEW_SPACE);
CHECK(!WeakPointerCleared);
......@@ -1436,7 +1436,7 @@ TEST(TestInternalWeakLists) {
// Scavenge treats these references as strong.
for (int j = 0; j < 10; j++) {
CcTest::heap()->PerformScavenge();
CcTest::heap()->CollectGarbage(NEW_SPACE);
CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
}
......@@ -1448,14 +1448,14 @@ TEST(TestInternalWeakLists) {
// Get rid of f3 and f5 in the same way.
CompileRun("f3=null");
for (int j = 0; j < 10; j++) {
CcTest::heap()->PerformScavenge();
CcTest::heap()->CollectGarbage(NEW_SPACE);
CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
}
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
CompileRun("f5=null");
for (int j = 0; j < 10; j++) {
CcTest::heap()->PerformScavenge();
CcTest::heap()->CollectGarbage(NEW_SPACE);
CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
}
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
......@@ -1477,7 +1477,7 @@ TEST(TestInternalWeakLists) {
// Scavenge treats these references as strong.
for (int j = 0; j < 10; j++) {
CcTest::heap()->PerformScavenge();
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
}
......
......@@ -29,11 +29,8 @@
using namespace v8::internal;
TEST(Regress340063) {
CcTest::InitializeVM();
if (!i::FLAG_allocation_site_pretenuring) return;
v8::HandleScope scope(CcTest::isolate());
static void SetUpNewSpaceWithPoisonedMementoAtTop() {
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
NewSpace* new_space = heap->new_space();
......@@ -52,8 +49,31 @@ TEST(Regress340063) {
memento->set_map_no_write_barrier(heap->allocation_memento_map());
memento->set_allocation_site(
reinterpret_cast<AllocationSite*>(kHeapObjectTag), SKIP_WRITE_BARRIER);
}
TEST(Regress340063) {
CcTest::InitializeVM();
if (!i::FLAG_allocation_site_pretenuring) return;
v8::HandleScope scope(CcTest::isolate());
SetUpNewSpaceWithPoisonedMementoAtTop();
// Call GC to see if we can handle a poisonous memento right after the
// current new space top pointer.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CcTest::i_isolate()->heap()->CollectAllGarbage(
Heap::kAbortIncrementalMarkingMask);
}
TEST(BadMementoAfterTopForceScavenge) {
CcTest::InitializeVM();
if (!i::FLAG_allocation_site_pretenuring) return;
v8::HandleScope scope(CcTest::isolate());
SetUpNewSpaceWithPoisonedMementoAtTop();
// Force GC to test the poisoned memento handling
CcTest::i_isolate()->heap()->CollectGarbage(i::NEW_SPACE);
}
......@@ -37,7 +37,7 @@ TEST(Create) {
#endif
}
CcTest::heap()->PerformScavenge();
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
// All symbols should be distinct.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment