Commit a7159577 authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Iterate handles with special left-trim visitor

BUG=chromium:620553
LOG=N
R=hpayer@chromium.org

Review-Url: https://codereview.chromium.org/2102243002
Cr-Commit-Position: refs/heads/master@{#37366}
parent 356a85be
...@@ -482,31 +482,6 @@ void Heap::CopyBlock(Address dst, Address src, int byte_size) { ...@@ -482,31 +482,6 @@ void Heap::CopyBlock(Address dst, Address src, int byte_size) {
static_cast<size_t>(byte_size / kPointerSize)); static_cast<size_t>(byte_size / kPointerSize));
} }
bool Heap::PurgeLeftTrimmedObject(Object** object) {
HeapObject* current = reinterpret_cast<HeapObject*>(*object);
const MapWord map_word = current->map_word();
if (current->IsFiller() && !map_word.IsForwardingAddress()) {
#ifdef DEBUG
// We need to find a FixedArrayBase map after walking the fillers.
while (current->IsFiller()) {
Address next = reinterpret_cast<Address>(current);
if (current->map() == one_pointer_filler_map()) {
next += kPointerSize;
} else if (current->map() == two_pointer_filler_map()) {
next += 2 * kPointerSize;
} else {
next += current->Size();
}
current = reinterpret_cast<HeapObject*>(next);
}
DCHECK(current->IsFixedArrayBase());
#endif // DEBUG
*object = nullptr;
return true;
}
return false;
}
template <Heap::FindMementoMode mode> template <Heap::FindMementoMode mode>
AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) { AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
// Check if there is potentially a memento behind the object. If // Check if there is potentially a memento behind the object. If
......
...@@ -4755,6 +4755,49 @@ void Heap::IterateSmiRoots(ObjectVisitor* v) { ...@@ -4755,6 +4755,49 @@ void Heap::IterateSmiRoots(ObjectVisitor* v) {
v->Synchronize(VisitorSynchronization::kSmiRootList); v->Synchronize(VisitorSynchronization::kSmiRootList);
} }
// We cannot avoid stale handles to left-trimmed objects, but can only make
// sure all handles still needed are updated. Filter out a stale pointer
// and clear the slot to allow post processing of handles (needed because
// the sweeper might actually free the underlying page).
class FixStaleLeftTrimmedHandlesVisitor : public ObjectVisitor {
public:
explicit FixStaleLeftTrimmedHandlesVisitor(Heap* heap) : heap_(heap) {
USE(heap_);
}
void VisitPointer(Object** p) override { FixHandle(p); }
void VisitPointers(Object** start, Object** end) override {
for (Object** p = start; p < end; p++) FixHandle(p);
}
private:
inline void FixHandle(Object** p) {
HeapObject* current = reinterpret_cast<HeapObject*>(*p);
if (!current->IsHeapObject()) return;
const MapWord map_word = current->map_word();
if (!map_word.IsForwardingAddress() && current->IsFiller()) {
#ifdef DEBUG
// We need to find a FixedArrayBase map after walking the fillers.
while (current->IsFiller()) {
Address next = reinterpret_cast<Address>(current);
if (current->map() == heap_->one_pointer_filler_map()) {
next += kPointerSize;
} else if (current->map() == heap_->two_pointer_filler_map()) {
next += 2 * kPointerSize;
} else {
next += current->Size();
}
current = reinterpret_cast<HeapObject*>(next);
}
DCHECK(current->IsFixedArrayBase());
#endif // DEBUG
*p = nullptr;
}
}
Heap* heap_;
};
void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
...@@ -4777,6 +4820,8 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { ...@@ -4777,6 +4820,8 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
v->Synchronize(VisitorSynchronization::kCompilationCache); v->Synchronize(VisitorSynchronization::kCompilationCache);
// Iterate over local handles in handle scopes. // Iterate over local handles in handle scopes.
FixStaleLeftTrimmedHandlesVisitor left_trim_visitor(this);
isolate_->handle_scope_implementer()->Iterate(&left_trim_visitor);
isolate_->handle_scope_implementer()->Iterate(v); isolate_->handle_scope_implementer()->Iterate(v);
isolate_->IterateDeferredHandles(v); isolate_->IterateDeferredHandles(v);
v->Synchronize(VisitorSynchronization::kHandleScope); v->Synchronize(VisitorSynchronization::kHandleScope);
......
...@@ -632,12 +632,6 @@ class Heap { ...@@ -632,12 +632,6 @@ class Heap {
// stored on the map to facilitate fast dispatch for {StaticVisitorBase}. // stored on the map to facilitate fast dispatch for {StaticVisitorBase}.
static int GetStaticVisitorIdForMap(Map* map); static int GetStaticVisitorIdForMap(Map* map);
// We cannot avoid stale handles to left-trimmed objects, but can only make
// sure all handles still needed are updated. Filter out a stale pointer
// and clear the slot to allow post processing of handles (needed because
// the sweeper might actually free the underlying page).
inline bool PurgeLeftTrimmedObject(Object** object);
// Notifies the heap that is ok to start marking or other activities that // Notifies the heap that is ok to start marking or other activities that
// should not happen during deserialization. // should not happen during deserialization.
void NotifyDeserializationComplete(); void NotifyDeserializationComplete();
......
...@@ -1413,8 +1413,6 @@ class RootMarkingVisitor : public ObjectVisitor { ...@@ -1413,8 +1413,6 @@ class RootMarkingVisitor : public ObjectVisitor {
HeapObject* object = HeapObject::cast(*p); HeapObject* object = HeapObject::cast(*p);
if (collector_->heap()->PurgeLeftTrimmedObject(p)) return;
MarkBit mark_bit = Marking::MarkBitFrom(object); MarkBit mark_bit = Marking::MarkBitFrom(object);
if (Marking::IsBlackOrGrey(mark_bit)) return; if (Marking::IsBlackOrGrey(mark_bit)) return;
......
...@@ -445,8 +445,6 @@ void ScavengeVisitor::ScavengePointer(Object** p) { ...@@ -445,8 +445,6 @@ void ScavengeVisitor::ScavengePointer(Object** p) {
Object* object = *p; Object* object = *p;
if (!heap_->InNewSpace(object)) return; if (!heap_->InNewSpace(object)) return;
if (heap_->PurgeLeftTrimmedObject(p)) return;
Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p), Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p),
reinterpret_cast<HeapObject*>(object)); reinterpret_cast<HeapObject*>(object));
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment