Commit 8ff8d674 authored by ulan's avatar ulan Committed by Commit bot

[heap] Introduce HeapVisitor interface.

HeapVisitor is similar to StaticVisitor but uses virtual dispatch
instead of static function table. It is intended as replacement
of StaticVisitor using the CRTP.

This CL also changes the concurrent marker to use the HeapVisitor.

BUG=chromium:709075

Review-Url: https://codereview.chromium.org/2808093003
Cr-Commit-Position: refs/heads/master@{#44948}
parent 0f82f0d3
......@@ -10,6 +10,8 @@
#include "src/heap/heap-inl.h"
#include "src/heap/heap.h"
#include "src/heap/marking.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/objects-visiting.h"
#include "src/isolate.h"
#include "src/locked-queue-inl.h"
#include "src/utils-inl.h"
......@@ -50,8 +52,11 @@ class ConcurrentMarkingMarkbits {
std::unordered_map<MemoryChunk*, Bitmap*> bitmap_;
};
class ConcurrentMarkingVisitor : public ObjectVisitor {
class ConcurrentMarkingVisitor final
: public HeapVisitor<int, ConcurrentMarkingVisitor> {
public:
using BaseClass = HeapVisitor<int, ConcurrentMarkingVisitor>;
ConcurrentMarkingVisitor() : bytes_marked_(0) {}
void VisitPointers(HeapObject* host, Object** start, Object** end) override {
......@@ -61,9 +66,95 @@ class ConcurrentMarkingVisitor : public ObjectVisitor {
}
}
// ===========================================================================
// JS object =================================================================
// ===========================================================================
int VisitJSObject(Map* map, JSObject* object) override {
// TODO(ulan): impement snapshot iteration.
return BaseClass::VisitJSObject(map, object);
}
int VisitJSObjectFast(Map* map, JSObject* object) override {
return VisitJSObject(map, object);
}
int VisitJSApiObject(Map* map, JSObject* object) override {
return VisitJSObject(map, object);
}
// ===========================================================================
// Fixed array object ========================================================
// ===========================================================================
int VisitFixedArray(Map* map, FixedArray* object) override {
// TODO(ulan): implement iteration with prefetched length.
return BaseClass::VisitFixedArray(map, object);
}
// ===========================================================================
// Code object ===============================================================
// ===========================================================================
int VisitCode(Map* map, Code* object) override {
// TODO(ulan): push the object to the bail-out deque.
return 0;
}
// ===========================================================================
// Objects with weak fields and/or side-effectiful visitation.
// ===========================================================================
int VisitBytecodeArray(Map* map, BytecodeArray* object) override {
// TODO(ulan): implement iteration of strong fields and push the object to
// the bailout deque.
return 0;
}
int VisitJSFunction(Map* map, JSFunction* object) override {
// TODO(ulan): implement iteration of strong fields and push the object to
// the bailout deque.
return 0;
}
int VisitMap(Map* map, Map* object) override {
// TODO(ulan): implement iteration of strong fields and push the object to
// the bailout deque.
return 0;
}
int VisitNativeContext(Map* map, Context* object) override {
// TODO(ulan): implement iteration of strong fields and push the object to
// the bailout deque.
return 0;
}
int VisitSharedFunctionInfo(Map* map, SharedFunctionInfo* object) override {
// TODO(ulan): implement iteration of strong fields and push the object to
// the bailout deque.
return 0;
}
int VisitTransitionArray(Map* map, TransitionArray* object) override {
// TODO(ulan): implement iteration of strong fields and push the object to
// the bailout deque.
return 0;
}
int VisitWeakCell(Map* map, WeakCell* object) override {
// TODO(ulan): implement iteration of strong fields and push the object to
// the bailout deque.
return 0;
}
int VisitJSWeakCollection(Map* map, JSWeakCollection* object) override {
// TODO(ulan): implement iteration of strong fields and push the object to
// the bailout deque.
return 0;
}
void MarkObject(HeapObject* obj) {
if (markbits_.Mark(obj)) {
bytes_marked_ += obj->Size();
marking_stack_.push(obj);
}
}
......@@ -72,7 +163,7 @@ class ConcurrentMarkingVisitor : public ObjectVisitor {
while (!marking_stack_.empty()) {
HeapObject* obj = marking_stack_.top();
marking_stack_.pop();
obj->Iterate(this);
bytes_marked_ += IterateBody(obj);
}
}
......
......@@ -2441,8 +2441,7 @@ bool Heap::CreateInitialMaps() {
ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, native_context)
native_context_map()->set_dictionary_map(true);
native_context_map()->set_visitor_id(
StaticVisitorBase::kVisitNativeContext);
native_context_map()->set_visitor_id(kVisitNativeContext);
ALLOCATE_MAP(SHARED_FUNCTION_INFO_TYPE, SharedFunctionInfo::kAlignedSize,
shared_function_info)
......
......@@ -50,7 +50,7 @@ void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
table_.Register(
kVisitFixedTypedArray,
kVisitFixedTypedArrayBase,
&FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
int>::Visit);
......@@ -136,7 +136,7 @@ void StaticMarkingVisitor<StaticVisitor>::Initialize() {
table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
table_.Register(
kVisitFixedTypedArray,
kVisitFixedTypedArrayBase,
&FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
void>::Visit);
......@@ -628,7 +628,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
JSFunctionStrongCodeBodyVisitor::Visit(map, object);
}
template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
Map* map, HeapObject* object) {
......@@ -637,6 +636,105 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
JSFunctionWeakCodeBodyVisitor::Visit(map, object);
}
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::IterateBody(
HeapObject* object) {
Map* map = object->map();
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
switch (static_cast<VisitorId>(map->visitor_id())) {
#define CASE(type) \
case kVisit##type: \
return visitor->Visit##type(map, type::cast(object));
TYPED_VISITOR_ID_LIST(CASE)
#undef CASE
case kVisitShortcutCandidate:
return visitor->VisitShortcutCandidate(map, ConsString::cast(object));
case kVisitNativeContext:
return visitor->VisitNativeContext(map, Context::cast(object));
case kVisitDataObject:
return visitor->VisitDataObject(map, HeapObject::cast(object));
case kVisitJSObjectFast:
return visitor->VisitJSObjectFast(map, JSObject::cast(object));
case kVisitJSApiObject:
return visitor->VisitJSApiObject(map, JSObject::cast(object));
case kVisitStruct:
return visitor->VisitStruct(map, HeapObject::cast(object));
case kVisitFreeSpace:
return visitor->VisitFreeSpace(map, FreeSpace::cast(object));
case kVisitorIdCount:
UNREACHABLE();
}
UNREACHABLE();
// Make the compiler happy.
return ResultType();
}
#define VISIT(type) \
template <typename ResultType, typename ConcreteVisitor> \
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit##type( \
Map* map, type* object) { \
int size = type::BodyDescriptor::SizeOf(map, object); \
type::BodyDescriptor::IterateBody(object, size, \
static_cast<ConcreteVisitor*>(this)); \
return static_cast<ResultType>(size); \
}
TYPED_VISITOR_ID_LIST(VISIT)
#undef VISIT
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitShortcutCandidate(
Map* map, ConsString* object) {
int size = ConsString::BodyDescriptor::SizeOf(map, object);
ConsString::BodyDescriptor::IterateBody(object, size,
static_cast<ConcreteVisitor*>(this));
return static_cast<ResultType>(size);
}
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitNativeContext(
Map* map, Context* object) {
int size = Context::BodyDescriptor::SizeOf(map, object);
Context::BodyDescriptor::IterateBody(object, size,
static_cast<ConcreteVisitor*>(this));
return static_cast<ResultType>(size);
}
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitDataObject(
Map* map, HeapObject* object) {
int size = map->instance_size();
return static_cast<ResultType>(size);
}
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSObjectFast(
Map* map, JSObject* object) {
int size = JSObject::FastBodyDescriptor::SizeOf(map, object);
JSObject::FastBodyDescriptor::IterateBody(
object, size, static_cast<ConcreteVisitor*>(this));
return static_cast<ResultType>(size);
}
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSApiObject(
Map* map, JSObject* object) {
int size = JSObject::BodyDescriptor::SizeOf(map, object);
JSObject::BodyDescriptor::IterateBody(object, size,
static_cast<ConcreteVisitor*>(this));
return static_cast<ResultType>(size);
}
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitStruct(
Map* map, HeapObject* object) {
int size = map->instance_size();
StructBodyDescriptor::IterateBody(object, size,
static_cast<ConcreteVisitor*>(this));
return static_cast<ResultType>(size);
}
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitFreeSpace(
Map* map, FreeSpace* object) {
return static_cast<ResultType>(FreeSpace::cast(object)->size());
}
} // namespace internal
} // namespace v8
......
......@@ -11,15 +11,13 @@
namespace v8 {
namespace internal {
StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(Map* map) {
VisitorId StaticVisitorBase::GetVisitorId(Map* map) {
return GetVisitorId(map->instance_type(), map->instance_size(),
FLAG_unbox_double_fields && !map->HasFastPointerLayout());
}
StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
int instance_type, int instance_size, bool has_unboxed_fields) {
VisitorId StaticVisitorBase::GetVisitorId(int instance_type, int instance_size,
bool has_unboxed_fields) {
if (instance_type < FIRST_NONSTRING_TYPE) {
switch (instance_type & kStringRepresentationMask) {
case kSeqStringTag:
......@@ -187,7 +185,7 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
case FIXED_INT32_ARRAY_TYPE:
case FIXED_FLOAT32_ARRAY_TYPE:
case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
return kVisitFixedTypedArray;
return kVisitFixedTypedArrayBase;
case FIXED_FLOAT64_ARRAY_TYPE:
return kVisitFixedFloat64Array;
......
......@@ -24,10 +24,6 @@
namespace v8 {
namespace internal {
// Base class for all static visitors.
class StaticVisitorBase : public AllStatic {
public:
#define VISITOR_ID_LIST(V) \
V(SeqOneByteString) \
V(SeqTwoByteString) \
......@@ -37,7 +33,7 @@ class StaticVisitorBase : public AllStatic {
V(FreeSpace) \
V(FixedArray) \
V(FixedDoubleArray) \
V(FixedTypedArray) \
V(FixedTypedArrayBase) \
V(FixedFloat64Array) \
V(NativeContext) \
V(AllocationSite) \
......@@ -63,22 +59,25 @@ class StaticVisitorBase : public AllStatic {
V(JSArrayBuffer) \
V(JSRegExp)
// For data objects, JS objects and structs along with generic visitor which
// can visit object of any size we provide visitors specialized by
// object size in words.
// Ids of specialized visitors are declared in a linear order (without
// holes) starting from the id of visitor specialized for 2 words objects
// (base visitor id) and ending with the id of generic visitor.
// Method GetVisitorIdForSize depends on this ordering to calculate visitor
// id of specialized visitor from given instance size, base visitor id and
// generic visitor's id.
enum VisitorId {
// For data objects, JS objects and structs along with generic visitor which
// can visit object of any size we provide visitors specialized by
// object size in words.
// Ids of specialized visitors are declared in a linear order (without
// holes) starting from the id of visitor specialized for 2 words objects
// (base visitor id) and ending with the id of generic visitor.
// Method GetVisitorIdForSize depends on this ordering to calculate visitor
// id of specialized visitor from given instance size, base visitor id and
// generic visitor's id.
enum VisitorId {
#define VISITOR_ID_ENUM_DECL(id) kVisit##id,
VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL)
VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL)
#undef VISITOR_ID_ENUM_DECL
kVisitorIdCount
};
kVisitorIdCount
};
// Base class for all static visitors.
class StaticVisitorBase : public AllStatic {
public:
// Visitor ID should fit in one byte.
STATIC_ASSERT(kVisitorIdCount <= 256);
......@@ -99,24 +98,24 @@ class VisitorDispatchTable {
// We are not using memcpy to guarantee that during update
// every element of callbacks_ array will remain correct
// pointer (memcpy might be implemented as a byte copying loop).
for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) {
for (int i = 0; i < kVisitorIdCount; i++) {
base::NoBarrier_Store(&callbacks_[i], other->callbacks_[i]);
}
}
inline Callback GetVisitor(Map* map);
inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) {
inline Callback GetVisitorById(VisitorId id) {
return reinterpret_cast<Callback>(callbacks_[id]);
}
void Register(StaticVisitorBase::VisitorId id, Callback callback) {
DCHECK(id < StaticVisitorBase::kVisitorIdCount); // id is unsigned.
void Register(VisitorId id, Callback callback) {
DCHECK(id < kVisitorIdCount); // id is unsigned.
callbacks_[id] = reinterpret_cast<base::AtomicWord>(callback);
}
private:
base::AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount];
base::AtomicWord callbacks_[kVisitorIdCount];
};
......@@ -349,10 +348,70 @@ template <typename StaticVisitor>
VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
StaticMarkingVisitor<StaticVisitor>::table_;
#define TYPED_VISITOR_ID_LIST(V) \
V(AllocationSite) \
V(ByteArray) \
V(BytecodeArray) \
V(Cell) \
V(Code) \
V(ConsString) \
V(FixedArray) \
V(FixedDoubleArray) \
V(FixedFloat64Array) \
V(FixedTypedArrayBase) \
V(JSArrayBuffer) \
V(JSFunction) \
V(JSObject) \
V(JSRegExp) \
V(JSWeakCollection) \
V(Map) \
V(Oddball) \
V(PropertyCell) \
V(SeqOneByteString) \
V(SeqTwoByteString) \
V(SharedFunctionInfo) \
V(SlicedString) \
V(Symbol) \
V(TransitionArray) \
V(ThinString) \
V(WeakCell)
// The base class for visitors that need to dispatch on object type.
// It is similar to StaticVisitor except it uses virtual dispatch
// instead of static dispatch table. The default behavour of all
// visit functions is to iterate body of the given object using
// the BodyDescriptor of the object.
//
// The visit functions return the size of the object cast to ResultType.
//
// This class is intended to be used in the following way:
//
// class SomeVisitor : public HeapVisitor<ResultType, SomeVisitor> {
// ...
// }
//
// This is an example of Curiously recurring template pattern.
// TODO(ulan): replace static visitors with the HeapVisitor.
template <typename ResultType, typename ConcreteVisitor>
class HeapVisitor : public ObjectVisitor {
public:
ResultType IterateBody(HeapObject* object);
protected:
#define VISIT(type) virtual ResultType Visit##type(Map* map, type* object);
TYPED_VISITOR_ID_LIST(VISIT)
#undef VISIT
virtual ResultType VisitShortcutCandidate(Map* map, ConsString* object);
virtual ResultType VisitNativeContext(Map* map, Context* object);
virtual ResultType VisitDataObject(Map* map, HeapObject* object);
virtual ResultType VisitJSObjectFast(Map* map, JSObject* object);
virtual ResultType VisitJSApiObject(Map* map, JSObject* object);
virtual ResultType VisitStruct(Map* map, HeapObject* object);
virtual ResultType VisitFreeSpace(Map* map, FreeSpace* object);
};
class WeakObjectRetainer;
// A weak list is single linked list where each element has a weak pointer to
// the next element. Given the head of the list, this function removes dead
// elements from the list and if requested records slots for next-element
......
......@@ -36,7 +36,7 @@ class ScavengingVisitor : public StaticVisitorBase {
table_.Register(kVisitByteArray, &EvacuateByteArray);
table_.Register(kVisitFixedArray, &EvacuateFixedArray);
table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
table_.Register(kVisitFixedTypedArrayBase, &EvacuateFixedTypedArray);
table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
table_.Register(kVisitJSArrayBuffer,
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
......@@ -446,11 +446,10 @@ void Scavenger::SelectScavengingVisitorsTable() {
// can't be evacuated into evacuation candidate but
// short-circuiting violates this assumption.
scavenging_visitors_table_.Register(
StaticVisitorBase::kVisitShortcutCandidate,
scavenging_visitors_table_.GetVisitorById(
StaticVisitorBase::kVisitConsString));
kVisitShortcutCandidate,
scavenging_visitors_table_.GetVisitorById(kVisitConsString));
scavenging_visitors_table_.Register(
StaticVisitorBase::kVisitThinString,
kVisitThinString,
&ScavengingVisitor<TRANSFER_MARKS, LOGGING_AND_PROFILING_DISABLED>::
EvacuateThinStringNoShortcut);
}
......
......@@ -239,6 +239,22 @@ class JSArrayBuffer::BodyDescriptor final : public BodyDescriptorBase {
}
};
class ByteArray::BodyDescriptor final : public BodyDescriptorBase {
public:
static bool IsValidSlot(HeapObject* obj, int offset) { return false; }
template <typename ObjectVisitor>
static inline void IterateBody(HeapObject* obj, int object_size,
ObjectVisitor* v) {}
template <typename StaticVisitor>
static inline void IterateBody(HeapObject* obj, int object_size) {}
static inline int SizeOf(Map* map, HeapObject* obj) {
return reinterpret_cast<ByteArray*>(obj)->ByteArraySize();
}
};
class BytecodeArray::BodyDescriptor final : public BodyDescriptorBase {
public:
static bool IsValidSlot(HeapObject* obj, int offset) {
......@@ -267,6 +283,23 @@ class BytecodeArray::BodyDescriptor final : public BodyDescriptorBase {
}
};
class FixedDoubleArray::BodyDescriptor final : public BodyDescriptorBase {
public:
static bool IsValidSlot(HeapObject* obj, int offset) { return false; }
template <typename ObjectVisitor>
static inline void IterateBody(HeapObject* obj, int object_size,
ObjectVisitor* v) {}
template <typename StaticVisitor>
static inline void IterateBody(HeapObject* obj, int object_size) {}
static inline int SizeOf(Map* map, HeapObject* obj) {
return FixedDoubleArray::SizeFor(
reinterpret_cast<FixedDoubleArray*>(obj)->length());
}
};
class FixedTypedArrayBase::BodyDescriptor final : public BodyDescriptorBase {
public:
static bool IsValidSlot(HeapObject* obj, int offset) {
......@@ -459,6 +492,39 @@ class Code::BodyDescriptor final : public BodyDescriptorBase {
}
};
class SeqOneByteString::BodyDescriptor final : public BodyDescriptorBase {
public:
static bool IsValidSlot(HeapObject* obj, int offset) { return false; }
template <typename ObjectVisitor>
static inline void IterateBody(HeapObject* obj, int object_size,
ObjectVisitor* v) {}
template <typename StaticVisitor>
static inline void IterateBody(HeapObject* obj, int object_size) {}
static inline int SizeOf(Map* map, HeapObject* obj) {
SeqOneByteString* string = SeqOneByteString::cast(obj);
return string->SizeFor(string->length());
}
};
class SeqTwoByteString::BodyDescriptor final : public BodyDescriptorBase {
public:
static bool IsValidSlot(HeapObject* obj, int offset) { return false; }
template <typename ObjectVisitor>
static inline void IterateBody(HeapObject* obj, int object_size,
ObjectVisitor* v) {}
template <typename StaticVisitor>
static inline void IterateBody(HeapObject* obj, int object_size) {}
static inline int SizeOf(Map* map, HeapObject* obj) {
SeqTwoByteString* string = SeqTwoByteString::cast(obj);
return string->SizeFor(string->length());
}
};
template <typename Op, typename ReturnType, typename T1, typename T2,
typename T3>
......
......@@ -101,6 +101,8 @@ class FixedBodyDescriptor final : public BodyDescriptorBase {
static inline void IterateBody(HeapObject* obj, int object_size) {
IterateBody(obj);
}
static inline int SizeOf(Map* map, HeapObject* object) { return kSize; }
};
......
......@@ -2904,6 +2904,8 @@ class FixedDoubleArray: public FixedArrayBase {
DECLARE_PRINTER(FixedDoubleArray)
DECLARE_VERIFIER(FixedDoubleArray)
class BodyDescriptor;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(FixedDoubleArray);
};
......@@ -3242,6 +3244,8 @@ class ByteArray: public FixedArrayBase {
// Maximal length of a single ByteArray.
static const int kMaxLength = kMaxSize - kHeaderSize;
class BodyDescriptor;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ByteArray);
};
......@@ -8501,6 +8505,8 @@ class SeqOneByteString: public SeqString {
static const int kMaxSize = 512 * MB - 1;
STATIC_ASSERT((kMaxSize - kHeaderSize) >= String::kMaxLength);
class BodyDescriptor;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(SeqOneByteString);
};
......@@ -8541,6 +8547,8 @@ class SeqTwoByteString: public SeqString {
STATIC_ASSERT(static_cast<int>((kMaxSize - kHeaderSize)/sizeof(uint16_t)) >=
String::kMaxLength);
class BodyDescriptor;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(SeqTwoByteString);
};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment