Commit 33ae0e67 authored by machenbach's avatar machenbach Committed by Commit bot

Revert of Serializer: support all alignment kinds. (patchset #3 id:40001 of...

Revert of Serializer: support all alignment kinds. (patchset #3 id:40001 of https://codereview.chromium.org/1179873003/)

Reason for revert:
[Sheriff] Breaks arm debug compile:
http://build.chromium.org/p/client.v8/builders/V8%20Arm%20-%20debug%20builder/builds/4308

Original issue's description:
> Serializer: support all alignment kinds.
>
> We use an alignment prefix for unusual alignment kinds (not kWordAligned).
> This will cause new objects to be aligned in ReadObject, and back references
> to be aligned to skip padding.
>
> The motivation to change the undefined next sentinel in WeakCell is this:
> When the deserializer aligns an object, it requires filler maps to already
> exist to create filler objects as padding. However, deserializing the
> filler map leads to deserializing NaN, which as heap number is aligned:
> filler map > meta map > weak cell cache > undefined > NaN
> If we use the-hole instead of undefined as sentinel, it works.
>
> R=jochen@chromium.org,bbudge@chromium.org
> BUG=v8:4178
> LOG=N
>
> Committed: https://crrev.com/2146ab75387d47eef6582bd8c2d0cfc6c42b68b6
> Cr-Commit-Position: refs/heads/master@{#29044}

TBR=jochen@chromium.org,bbudge@chromium.org,yangguo@chromium.org
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=v8:4178

Review URL: https://codereview.chromium.org/1173253006

Cr-Commit-Position: refs/heads/master@{#29045}
parent 2146ab75
......@@ -641,7 +641,7 @@ HValue* CodeStubGraphBuilder<CreateWeakCellStub>::BuildCodeStub() {
HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex);
Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellValue(), value);
Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellNext(),
graph()->GetConstantHole());
graph()->GetConstantUndefined());
HInstruction* feedback_vector =
GetParameter(CreateWeakCellDescriptor::kVectorIndex);
......
......@@ -3102,7 +3102,7 @@ AllocationResult Heap::AllocateWeakCell(HeapObject* value) {
}
result->set_map_no_write_barrier(weak_cell_map());
WeakCell::cast(result)->initialize(value);
WeakCell::cast(result)->set_next(the_hole_value(), SKIP_WRITE_BARRIER);
WeakCell::cast(result)->set_next(undefined_value(), SKIP_WRITE_BARRIER);
return result;
}
......@@ -3706,18 +3706,19 @@ AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
void Heap::CreateFillerObjectAt(Address addr, int size) {
if (size == 0) return;
HeapObject* filler = HeapObject::FromAddress(addr);
// At this point, we may be deserializing the heap from a snapshot, and
// none of the maps have been created yet and are NULL.
if (size == kPointerSize) {
filler->set_map_no_write_barrier(raw_unchecked_one_pointer_filler_map());
DCHECK(filler->map() == NULL || filler->map() == one_pointer_filler_map());
} else if (size == 2 * kPointerSize) {
filler->set_map_no_write_barrier(raw_unchecked_two_pointer_filler_map());
DCHECK(filler->map() == NULL || filler->map() == two_pointer_filler_map());
} else {
filler->set_map_no_write_barrier(raw_unchecked_free_space_map());
DCHECK(filler->map() == NULL || filler->map() == free_space_map());
FreeSpace::cast(filler)->nobarrier_set_size(size);
}
// At this point, we may be deserializing the heap from a snapshot, and
// none of the maps have been created yet and are NULL.
DCHECK(filler->map() == NULL && !deserialization_complete_ ||
filler->map()->IsMap());
}
......
......@@ -2637,7 +2637,7 @@ void MarkCompactCollector::AbortWeakCollections() {
void MarkCompactCollector::ProcessAndClearWeakCells() {
HeapObject* the_hole = heap()->the_hole_value();
HeapObject* undefined = heap()->undefined_value();
Object* weak_cell_obj = heap()->encountered_weak_cells();
while (weak_cell_obj != Smi::FromInt(0)) {
WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
......@@ -2672,19 +2672,19 @@ void MarkCompactCollector::ProcessAndClearWeakCells() {
RecordSlot(slot, slot, *slot);
}
weak_cell_obj = weak_cell->next();
weak_cell->set_next(the_hole, SKIP_WRITE_BARRIER);
weak_cell->set_next(undefined, SKIP_WRITE_BARRIER);
}
heap()->set_encountered_weak_cells(Smi::FromInt(0));
}
void MarkCompactCollector::AbortWeakCells() {
Object* the_hole = heap()->the_hole_value();
Object* undefined = heap()->undefined_value();
Object* weak_cell_obj = heap()->encountered_weak_cells();
while (weak_cell_obj != Smi::FromInt(0)) {
WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
weak_cell_obj = weak_cell->next();
weak_cell->set_next(the_hole, SKIP_WRITE_BARRIER);
weak_cell->set_next(undefined, SKIP_WRITE_BARRIER);
}
heap()->set_encountered_weak_cells(Smi::FromInt(0));
}
......
......@@ -329,11 +329,11 @@ void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
HeapObject* object) {
Heap* heap = map->GetHeap();
WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
Object* the_hole = heap->the_hole_value();
Object* undefined = heap->undefined_value();
// Enqueue weak cell in linked list of encountered weak collections.
// We can ignore weak cells with cleared values because they will always
// contain smi zero.
if (weak_cell->next() == the_hole && !weak_cell->cleared()) {
if (weak_cell->next() == undefined && !weak_cell->cleared()) {
weak_cell->set_next(heap->encountered_weak_cells(),
UPDATE_WEAK_WRITE_BARRIER);
heap->set_encountered_weak_cells(weak_cell);
......
This diff is collapsed.
......@@ -324,14 +324,12 @@ class SerializerDeserializer: public ObjectVisitor {
// 0x07 Unused (including 0x27, 0x47, 0x67).
// 0x08..0x0c Reference to previous object from space.
kBackref = 0x08,
// 0x0d Unused (including 0x2d, 0x4d, 0x6d).
// 0x0e Unused (including 0x2e, 0x4e, 0x6e).
// 0x0f Unused (including 0x2f, 0x4f, 0x6f).
// 0x10..0x14 Reference to previous object from space after skip.
kBackrefWithSkip = 0x10,
// 0x15 Unused (including 0x35, 0x55, 0x75).
// 0x16 Unused (including 0x36, 0x56, 0x76).
// 0x17 Misc (including 0x37, 0x57, 0x77).
// 0x17 Unused (including 0x37, 0x57, 0x77).
// 0x18 Root array item.
kRootArray = 0x18,
// 0x19 Object in the partial snapshot cache.
......@@ -386,18 +384,14 @@ class SerializerDeserializer: public ObjectVisitor {
// is an indication that the snapshot and the VM do not fit together.
// Examine the build process for architecture, version or configuration
// mismatches.
static const int kSynchronize = 0x17;
static const int kSynchronize = 0x5d;
// Used for the source code of the natives, which is in the executable, but
// is referred to from external strings in the snapshot.
static const int kNativesStringResource = 0x37;
static const int kNativesStringResource = 0x5e;
// Raw data of variable length.
static const int kVariableRawData = 0x57;
static const int kVariableRawData = 0x7d;
// Repeats of variable length.
static const int kVariableRepeat = 0x77;
// Alignment prefixes 0x7d..0x7f
static const int kAlignmentPrefix = 0x7d;
// 0x5d..0x5f unused
static const int kVariableRepeat = 0x7e;
// ---------- byte code range 0x80..0xff ----------
// First 32 root array items.
......@@ -521,8 +515,7 @@ class Deserializer: public SerializerDeserializer {
magic_number_(data->GetMagicNumber()),
external_reference_table_(NULL),
deserialized_large_objects_(0),
deserializing_user_code_(false),
next_alignment_(kWordAligned) {
deserializing_user_code_(false) {
DecodeReservation(data->Reservations());
}
......@@ -609,8 +602,6 @@ class Deserializer: public SerializerDeserializer {
bool deserializing_user_code_;
AllocationAlignment next_alignment_;
DISALLOW_COPY_AND_ASSIGN(Deserializer);
};
......@@ -715,9 +706,6 @@ class Serializer : public SerializerDeserializer {
void PutBackReference(HeapObject* object, BackReference reference);
// Emit alignment prefix if necessary, return required padding space in bytes.
int PutAlignmentPrefix(HeapObject* object);
// Returns true if the object was successfully serialized.
bool SerializeKnownObject(HeapObject* obj, HowToCode how_to_code,
WhereToPoint where_to_point, int skip);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment