Commit 9d2d8a9c authored by ishell's avatar ishell Committed by Commit bot

This fixes missing incremental write barrier issue when double fields unboxing is enabled.

This CL also adds useful machinery that helps triggering incremental write barriers.

BUG=chromium:469146
LOG=Y

Review URL: https://codereview.chromium.org/1039733003

Cr-Commit-Position: refs/heads/master@{#27503}
parent 15ef61d4
...@@ -767,6 +767,11 @@ DEFINE_BOOL(stress_compaction, false, ...@@ -767,6 +767,11 @@ DEFINE_BOOL(stress_compaction, false,
"stress the GC compactor to flush out bugs (implies " "stress the GC compactor to flush out bugs (implies "
"--force_marking_deque_overflows)") "--force_marking_deque_overflows)")
DEFINE_BOOL(manual_evacuation_candidates_selection, false,
"Test mode only flag. It allows an unit test to select evacuation "
"candidates pages (requires --stress_compaction).")
// //
// Dev shell flags // Dev shell flags
// //
......
...@@ -1903,6 +1903,18 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, ...@@ -1903,6 +1903,18 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
// to new space. // to new space.
DCHECK(!target->IsMap()); DCHECK(!target->IsMap());
Address obj_address = target->address(); Address obj_address = target->address();
// We are not collecting slots on new space objects during mutation
// thus we have to scan for pointers to evacuation candidates when we
// promote objects. But we should not record any slots in non-black
// objects. Grey object's slots would be rescanned.
// White object might not survive until the end of collection
// it would be a violation of the invariant to record it's slots.
bool record_slots = false;
if (incremental_marking()->IsCompacting()) {
MarkBit mark_bit = Marking::MarkBitFrom(target);
record_slots = Marking::IsBlack(mark_bit);
}
#if V8_DOUBLE_FIELDS_UNBOXING #if V8_DOUBLE_FIELDS_UNBOXING
LayoutDescriptorHelper helper(target->map()); LayoutDescriptorHelper helper(target->map());
bool has_only_tagged_fields = helper.all_fields_tagged(); bool has_only_tagged_fields = helper.all_fields_tagged();
...@@ -1912,15 +1924,15 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, ...@@ -1912,15 +1924,15 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
int end_of_region_offset; int end_of_region_offset;
if (helper.IsTagged(offset, size, &end_of_region_offset)) { if (helper.IsTagged(offset, size, &end_of_region_offset)) {
IterateAndMarkPointersToFromSpace( IterateAndMarkPointersToFromSpace(
obj_address + offset, obj_address + end_of_region_offset, record_slots, obj_address + offset,
&ScavengeObject); obj_address + end_of_region_offset, &ScavengeObject);
} }
offset = end_of_region_offset; offset = end_of_region_offset;
} }
} else { } else {
#endif #endif
IterateAndMarkPointersToFromSpace(obj_address, obj_address + size, IterateAndMarkPointersToFromSpace(
&ScavengeObject); record_slots, obj_address, obj_address + size, &ScavengeObject);
#if V8_DOUBLE_FIELDS_UNBOXING #if V8_DOUBLE_FIELDS_UNBOXING
} }
#endif #endif
...@@ -4892,22 +4904,11 @@ void Heap::ZapFromSpace() { ...@@ -4892,22 +4904,11 @@ void Heap::ZapFromSpace() {
} }
void Heap::IterateAndMarkPointersToFromSpace(Address start, Address end, void Heap::IterateAndMarkPointersToFromSpace(bool record_slots, Address start,
Address end,
ObjectSlotCallback callback) { ObjectSlotCallback callback) {
Address slot_address = start; Address slot_address = start;
// We are not collecting slots on new space objects during mutation
// thus we have to scan for pointers to evacuation candidates when we
// promote objects. But we should not record any slots in non-black
// objects. Grey object's slots would be rescanned.
// White object might not survive until the end of collection
// it would be a violation of the invariant to record it's slots.
bool record_slots = false;
if (incremental_marking()->IsCompacting()) {
MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::FromAddress(start));
record_slots = Marking::IsBlack(mark_bit);
}
while (slot_address < end) { while (slot_address < end) {
Object** slot = reinterpret_cast<Object**>(slot_address); Object** slot = reinterpret_cast<Object**>(slot_address);
Object* object = *slot; Object* object = *slot;
......
...@@ -909,7 +909,8 @@ class Heap { ...@@ -909,7 +909,8 @@ class Heap {
// Iterate pointers to from semispace of new space found in memory interval // Iterate pointers to from semispace of new space found in memory interval
// from start to end. // from start to end.
void IterateAndMarkPointersToFromSpace(Address start, Address end, void IterateAndMarkPointersToFromSpace(bool record_slots, Address start,
Address end,
ObjectSlotCallback callback); ObjectSlotCallback callback);
// Returns whether the object resides in new space. // Returns whether the object resides in new space.
......
...@@ -723,6 +723,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { ...@@ -723,6 +723,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
int count = 0; int count = 0;
int fragmentation = 0; int fragmentation = 0;
int page_number = 0;
Candidate* least = NULL; Candidate* least = NULL;
PageIterator it(space); PageIterator it(space);
...@@ -737,9 +738,16 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { ...@@ -737,9 +738,16 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
CHECK(p->slots_buffer() == NULL); CHECK(p->slots_buffer() == NULL);
if (FLAG_stress_compaction) { if (FLAG_stress_compaction) {
unsigned int counter = space->heap()->ms_count(); if (FLAG_manual_evacuation_candidates_selection) {
uintptr_t page_number = reinterpret_cast<uintptr_t>(p) >> kPageSizeBits; if (p->IsFlagSet(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING)) {
if ((counter & 1) == (page_number & 1)) fragmentation = 1; p->ClearFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
fragmentation = 1;
}
} else {
unsigned int counter = space->heap()->ms_count();
if ((counter & 1) == (page_number & 1)) fragmentation = 1;
page_number++;
}
} else if (mode == REDUCE_MEMORY_FOOTPRINT && !FLAG_always_compact) { } else if (mode == REDUCE_MEMORY_FOOTPRINT && !FLAG_always_compact) {
// Don't try to release too many pages. // Don't try to release too many pages.
if (estimated_release >= over_reserved) { if (estimated_release >= over_reserved) {
......
...@@ -385,6 +385,12 @@ class MemoryChunk { ...@@ -385,6 +385,12 @@ class MemoryChunk {
// to grey transition is performed in the value. // to grey transition is performed in the value.
HAS_PROGRESS_BAR, HAS_PROGRESS_BAR,
// This flag is intended to be used for testing. Works only when both
// FLAG_stress_compaction and FLAG_manual_evacuation_candidates_selection
// are set. It forces the page to become an evacuation candidate at next
// candidates selection cycle.
FORCE_EVACUATION_CANDIDATE_FOR_TESTING,
// Last flag, keep at bottom. // Last flag, keep at bottom.
NUM_MEMORY_CHUNK_FLAGS NUM_MEMORY_CHUNK_FLAGS
}; };
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
using namespace v8::base; using namespace v8::base;
using namespace v8::internal; using namespace v8::internal;
#if (V8_DOUBLE_FIELDS_UNBOXING) #if V8_DOUBLE_FIELDS_UNBOXING
// //
...@@ -909,40 +909,38 @@ TEST(Regress436816) { ...@@ -909,40 +909,38 @@ TEST(Regress436816) {
TEST(DoScavenge) { TEST(DoScavenge) {
CcTest::InitializeVM(); CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Isolate* isolate = CcTest::i_isolate(); Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory(); Factory* factory = isolate->factory();
v8::HandleScope scope(CcTest::isolate());
CompileRun( // The plan: create |obj| with double field in new space, do scanvenge so
"function A() {" // that |obj| is moved to old space, construct a double value that looks like
" this.x = 42.5;" // a pointer to "from space" pointer. Do scavenge one more time and ensure
" this.o = {};" // that it didn't crash or corrupt the double value stored in the object.
"};"
"var o = new A();");
Handle<String> obj_name = factory->InternalizeUtf8String("o"); Handle<HeapType> any_type = HeapType::Any(isolate);
Handle<Map> map = Map::Create(isolate, 10);
map = Map::CopyWithField(map, MakeName("prop", 0), any_type, NONE,
Representation::Double(),
INSERT_TRANSITION).ToHandleChecked();
Handle<Object> obj_value = // Create object in new space.
Object::GetProperty(isolate->global_object(), obj_name).ToHandleChecked(); Handle<JSObject> obj = factory->NewJSObjectFromMap(map, NOT_TENURED, false);
CHECK(obj_value->IsJSObject());
Handle<JSObject> obj = Handle<JSObject>::cast(obj_value); Handle<HeapNumber> heap_number = factory->NewHeapNumber(42.5);
obj->WriteToField(0, *heap_number);
{ {
// Ensure the object is properly set up. // Ensure the object is properly set up.
Map* map = obj->map(); FieldIndex field_index = FieldIndex::ForDescriptor(*map, 0);
DescriptorArray* descriptors = map->instance_descriptors();
CHECK(map->NumberOfOwnDescriptors() == 2);
CHECK(descriptors->GetDetails(0).representation().IsDouble());
CHECK(descriptors->GetDetails(1).representation().IsHeapObject());
FieldIndex field_index = FieldIndex::ForDescriptor(map, 0);
CHECK(field_index.is_inobject() && field_index.is_double()); CHECK(field_index.is_inobject() && field_index.is_double());
CHECK_EQ(FLAG_unbox_double_fields, map->IsUnboxedDoubleField(field_index)); CHECK_EQ(FLAG_unbox_double_fields, map->IsUnboxedDoubleField(field_index));
CHECK_EQ(42.5, GetDoubleFieldValue(*obj, field_index)); CHECK_EQ(42.5, GetDoubleFieldValue(*obj, field_index));
} }
CHECK(isolate->heap()->new_space()->Contains(*obj)); CHECK(isolate->heap()->new_space()->Contains(*obj));
// Trigger GCs so that the newly allocated object moves to old gen. // Do scavenge so that |obj| is moved to survivor space.
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now CcTest::heap()->CollectGarbage(i::NEW_SPACE);
// Create temp object in the new space. // Create temp object in the new space.
Handle<JSArray> temp = factory->NewJSArray(FAST_ELEMENTS, NOT_TENURED); Handle<JSArray> temp = factory->NewJSArray(FAST_ELEMENTS, NOT_TENURED);
...@@ -957,9 +955,9 @@ TEST(DoScavenge) { ...@@ -957,9 +955,9 @@ TEST(DoScavenge) {
Handle<HeapNumber> boom_number = factory->NewHeapNumber(boom_value, MUTABLE); Handle<HeapNumber> boom_number = factory->NewHeapNumber(boom_value, MUTABLE);
obj->FastPropertyAtPut(field_index, *boom_number); obj->FastPropertyAtPut(field_index, *boom_number);
// Now the object moves to old gen and it has a double field that looks like // Now |obj| moves to old gen and it has a double field that looks like
// a pointer to a from semi-space. // a pointer to a from semi-space.
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now CcTest::heap()->CollectGarbage(i::NEW_SPACE, "boom");
CHECK(isolate->heap()->old_pointer_space()->Contains(*obj)); CHECK(isolate->heap()->old_pointer_space()->Contains(*obj));
...@@ -967,6 +965,96 @@ TEST(DoScavenge) { ...@@ -967,6 +965,96 @@ TEST(DoScavenge) {
} }
TEST(DoScavengeWithIncrementalWriteBarrier) {
if (FLAG_never_compact || !FLAG_incremental_marking) return;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
Heap* heap = CcTest::heap();
PagedSpace* old_pointer_space = heap->old_pointer_space();
// The plan: create |obj_value| in old space and ensure that it is allocated
// on evacuation candidate page, create |obj| with double and tagged fields
// in new space and write |obj_value| to tagged field of |obj|, do two
// scavenges to promote |obj| to old space, a GC in old space and ensure that
// the tagged value was properly updated after candidates evacuation.
Handle<HeapType> any_type = HeapType::Any(isolate);
Handle<Map> map = Map::Create(isolate, 10);
map = Map::CopyWithField(map, MakeName("prop", 0), any_type, NONE,
Representation::Double(),
INSERT_TRANSITION).ToHandleChecked();
map = Map::CopyWithField(map, MakeName("prop", 1), any_type, NONE,
Representation::Tagged(),
INSERT_TRANSITION).ToHandleChecked();
// Create |obj_value| in old space.
Handle<HeapObject> obj_value;
Page* ec_page;
{
AlwaysAllocateScope always_allocate(isolate);
// Make sure |obj_value| is placed on an old-space evacuation candidate.
SimulateFullSpace(old_pointer_space);
obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
ec_page = Page::FromAddress(obj_value->address());
}
// Create object in new space.
Handle<JSObject> obj = factory->NewJSObjectFromMap(map, NOT_TENURED, false);
Handle<HeapNumber> heap_number = factory->NewHeapNumber(42.5);
obj->WriteToField(0, *heap_number);
obj->WriteToField(1, *obj_value);
{
// Ensure the object is properly set up.
FieldIndex field_index = FieldIndex::ForDescriptor(*map, 0);
CHECK(field_index.is_inobject() && field_index.is_double());
CHECK_EQ(FLAG_unbox_double_fields, map->IsUnboxedDoubleField(field_index));
CHECK_EQ(42.5, GetDoubleFieldValue(*obj, field_index));
field_index = FieldIndex::ForDescriptor(*map, 1);
CHECK(field_index.is_inobject() && !field_index.is_double());
CHECK(!map->IsUnboxedDoubleField(field_index));
}
CHECK(isolate->heap()->new_space()->Contains(*obj));
// Heap is ready, force |ec_page| to become an evacuation candidate and
// simulate incremental marking.
FLAG_stress_compaction = true;
FLAG_manual_evacuation_candidates_selection = true;
ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
SimulateIncrementalMarking(heap);
// Disable stress compaction mode in order to let GC do scavenge.
FLAG_stress_compaction = false;
// Check that everything is ready for triggering incremental write barrier
// during scavenge (i.e. that |obj| is black and incremental marking is
// in compacting mode and |obj_value|'s page is an evacuation candidate).
IncrementalMarking* marking = heap->incremental_marking();
CHECK(marking->IsCompacting());
CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj)));
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
// Trigger GCs so that |obj| moves to old gen.
heap->CollectGarbage(i::NEW_SPACE); // in survivor space now
heap->CollectGarbage(i::NEW_SPACE); // in old gen now
CHECK(isolate->heap()->old_pointer_space()->Contains(*obj));
CHECK(isolate->heap()->old_pointer_space()->Contains(*obj_value));
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
heap->CollectGarbage(i::OLD_POINTER_SPACE, "boom");
// |obj_value| must be evacuated.
CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
FieldIndex field_index = FieldIndex::ForDescriptor(*map, 1);
CHECK_EQ(*obj_value, obj->RawFastPropertyAt(field_index));
}
static void TestLayoutDescriptorHelper(Isolate* isolate, static void TestLayoutDescriptorHelper(Isolate* isolate,
int inobject_properties, int inobject_properties,
Handle<DescriptorArray> descriptors, Handle<DescriptorArray> descriptors,
...@@ -1163,28 +1251,23 @@ TEST(StoreBufferScanOnScavenge) { ...@@ -1163,28 +1251,23 @@ TEST(StoreBufferScanOnScavenge) {
Factory* factory = isolate->factory(); Factory* factory = isolate->factory();
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
CompileRun( Handle<HeapType> any_type = HeapType::Any(isolate);
"function A() {" Handle<Map> map = Map::Create(isolate, 10);
" this.x = 42.5;" map = Map::CopyWithField(map, MakeName("prop", 0), any_type, NONE,
" this.o = {};" Representation::Double(),
"};" INSERT_TRANSITION).ToHandleChecked();
"var o = new A();");
Handle<String> obj_name = factory->InternalizeUtf8String("o"); // Create object in new space.
Handle<JSObject> obj = factory->NewJSObjectFromMap(map, NOT_TENURED, false);
Handle<Object> obj_value = Handle<HeapNumber> heap_number = factory->NewHeapNumber(42.5);
Object::GetProperty(isolate->global_object(), obj_name).ToHandleChecked(); obj->WriteToField(0, *heap_number);
CHECK(obj_value->IsJSObject());
Handle<JSObject> obj = Handle<JSObject>::cast(obj_value);
{ {
// Ensure the object is properly set up. // Ensure the object is properly set up.
Map* map = obj->map();
DescriptorArray* descriptors = map->instance_descriptors(); DescriptorArray* descriptors = map->instance_descriptors();
CHECK(map->NumberOfOwnDescriptors() == 2);
CHECK(descriptors->GetDetails(0).representation().IsDouble()); CHECK(descriptors->GetDetails(0).representation().IsDouble());
CHECK(descriptors->GetDetails(1).representation().IsHeapObject()); FieldIndex field_index = FieldIndex::ForDescriptor(*map, 0);
FieldIndex field_index = FieldIndex::ForDescriptor(map, 0);
CHECK(field_index.is_inobject() && field_index.is_double()); CHECK(field_index.is_inobject() && field_index.is_double());
CHECK_EQ(FLAG_unbox_double_fields, map->IsUnboxedDoubleField(field_index)); CHECK_EQ(FLAG_unbox_double_fields, map->IsUnboxedDoubleField(field_index));
CHECK_EQ(42.5, GetDoubleFieldValue(*obj, field_index)); CHECK_EQ(42.5, GetDoubleFieldValue(*obj, field_index));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment