Commit 71267cf2 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Refactor BodyDescriptor to avoid redundant map loads.

Almost all callers of BodyDescriptor already have the map of the object
and should pass it to IterateBody and IsValidSlot functions.

This removes redundant load and makes the function consistent with the
SizeOf function.

Change-Id: Ie47a9bb05af23fbf0576dff99f2ec69625e057fc
Reviewed-on: https://chromium-review.googlesource.com/979436
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52218}
parent c4766f68
......@@ -245,7 +245,7 @@ class ConcurrentMarkingVisitor final
DCHECK(length->IsSmi());
int size = FixedArray::SizeFor(Smi::ToInt(length));
VisitMapPointer(object, object->map_slot());
FixedArray::BodyDescriptor::IterateBody(object, size, this);
FixedArray::BodyDescriptor::IterateBody(map, object, size, this);
return size;
}
......@@ -266,7 +266,7 @@ class ConcurrentMarkingVisitor final
if (!ShouldVisit(object)) return 0;
int size = BytecodeArray::BodyDescriptorWeak::SizeOf(map, object);
VisitMapPointer(object, object->map_slot());
BytecodeArray::BodyDescriptorWeak::IterateBody(object, size, this);
BytecodeArray::BodyDescriptorWeak::IterateBody(map, object, size, this);
object->MakeOlder();
return size;
}
......@@ -275,7 +275,7 @@ class ConcurrentMarkingVisitor final
if (!ShouldVisit(object)) return 0;
int size = AllocationSite::BodyDescriptorWeak::SizeOf(map, object);
VisitMapPointer(object, object->map_slot());
AllocationSite::BodyDescriptorWeak::IterateBody(object, size, this);
AllocationSite::BodyDescriptorWeak::IterateBody(map, object, size, this);
return size;
}
......@@ -283,7 +283,7 @@ class ConcurrentMarkingVisitor final
if (!ShouldVisit(object)) return 0;
int size = CodeDataContainer::BodyDescriptorWeak::SizeOf(map, object);
VisitMapPointer(object, object->map_slot());
CodeDataContainer::BodyDescriptorWeak::IterateBody(object, size, this);
CodeDataContainer::BodyDescriptorWeak::IterateBody(map, object, size, this);
return size;
}
......@@ -291,7 +291,7 @@ class ConcurrentMarkingVisitor final
if (!ShouldVisit(object)) return 0;
int size = JSFunction::BodyDescriptorWeak::SizeOf(map, object);
VisitMapPointer(object, object->map_slot());
JSFunction::BodyDescriptorWeak::IterateBody(object, size, this);
JSFunction::BodyDescriptorWeak::IterateBody(map, object, size, this);
return size;
}
......@@ -317,7 +317,7 @@ class ConcurrentMarkingVisitor final
if (!ShouldVisit(object)) return 0;
int size = Context::BodyDescriptorWeak::SizeOf(map, object);
VisitMapPointer(object, object->map_slot());
Context::BodyDescriptorWeak::IterateBody(object, size, this);
Context::BodyDescriptorWeak::IterateBody(map, object, size, this);
return size;
}
......@@ -325,7 +325,7 @@ class ConcurrentMarkingVisitor final
if (!ShouldVisit(array)) return 0;
VisitMapPointer(array, array->map_slot());
int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
TransitionArray::BodyDescriptor::IterateBody(array, size, this);
TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
weak_objects_->transition_arrays.Push(task_id_, array);
return size;
}
......@@ -405,7 +405,7 @@ class ConcurrentMarkingVisitor final
SlotSnapshottingVisitor visitor(&slot_snapshot_);
visitor.VisitPointer(object,
reinterpret_cast<Object**>(object->map_slot()));
T::BodyDescriptor::IterateBody(object, size, &visitor);
T::BodyDescriptor::IterateBody(map, object, size, &visitor);
return slot_snapshot_;
}
ConcurrentMarking::MarkingWorklist::View shared_;
......
......@@ -61,7 +61,7 @@ bool InvalidatedSlotsFilter::IsValid(Address slot) {
// we can return true here.
return true;
}
return invalidated_object_->IsValidSlot(offset);
return invalidated_object_->IsValidSlot(invalidated_object_->map(), offset);
}
} // namespace internal
......
......@@ -28,7 +28,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitAllocationSite(Map* map,
AllocationSite* object) {
int size = AllocationSite::BodyDescriptorWeak::SizeOf(map, object);
AllocationSite::BodyDescriptorWeak::IterateBody(object, size, this);
AllocationSite::BodyDescriptorWeak::IterateBody(map, object, size, this);
return size;
}
......@@ -38,7 +38,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitBytecodeArray(Map* map,
BytecodeArray* array) {
int size = BytecodeArray::BodyDescriptor::SizeOf(map, array);
BytecodeArray::BodyDescriptor::IterateBody(array, size, this);
BytecodeArray::BodyDescriptor::IterateBody(map, array, size, this);
array->MakeOlder();
return size;
}
......@@ -48,7 +48,7 @@ template <FixedArrayVisitationMode fixed_array_mode,
int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
VisitCodeDataContainer(Map* map, CodeDataContainer* object) {
int size = CodeDataContainer::BodyDescriptorWeak::SizeOf(map, object);
CodeDataContainer::BodyDescriptorWeak::IterateBody(object, size, this);
CodeDataContainer::BodyDescriptorWeak::IterateBody(map, object, size, this);
return size;
}
......@@ -71,7 +71,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
heap_->TracePossibleWrapper(object);
}
int size = JSObject::BodyDescriptor::SizeOf(map, object);
JSObject::BodyDescriptor::IterateBody(object, size, this);
JSObject::BodyDescriptor::IterateBody(map, object, size, this);
return size;
}
......@@ -81,7 +81,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitJSFunction(Map* map,
JSFunction* object) {
int size = JSFunction::BodyDescriptorWeak::SizeOf(map, object);
JSFunction::BodyDescriptorWeak::IterateBody(object, size, this);
JSFunction::BodyDescriptorWeak::IterateBody(map, object, size, this);
return size;
}
......@@ -98,7 +98,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
// Skip visiting the backing hash table containing the mappings and the
// pointer to the other enqueued weak collections, both are post-processed.
int size = JSWeakCollection::BodyDescriptorWeak::SizeOf(map, weak_collection);
JSWeakCollection::BodyDescriptorWeak::IterateBody(weak_collection, size,
JSWeakCollection::BodyDescriptorWeak::IterateBody(map, weak_collection, size,
this);
// Partially initialized weak collection is enqueued, but table is ignored.
......@@ -135,7 +135,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitNativeContext(Map* map,
Context* context) {
int size = Context::BodyDescriptorWeak::SizeOf(map, context);
Context::BodyDescriptorWeak::IterateBody(context, size, this);
Context::BodyDescriptorWeak::IterateBody(map, context, size, this);
return size;
}
......@@ -145,7 +145,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState>::VisitTransitionArray(Map* map,
TransitionArray* array) {
int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
TransitionArray::BodyDescriptor::IterateBody(array, size, this);
TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
collector_->AddTransitionArray(array);
return size;
}
......@@ -327,7 +327,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
}
}
} else {
FixedArray::BodyDescriptor::IterateBody(object, object_size, this);
FixedArray::BodyDescriptor::IterateBody(map, object, object_size, this);
}
return object_size;
}
......
......@@ -1443,16 +1443,14 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
base->heap_->CopyBlock(dst_addr, src_addr, size);
if (mode != MigrationMode::kFast)
base->ExecuteMigrationObservers(dest, src, dst, size);
dst->IterateBodyFast(dst->map()->instance_type(), size,
base->record_visitor_);
dst->IterateBodyFast(dst->map(), size, base->record_visitor_);
} else if (dest == CODE_SPACE) {
DCHECK_CODEOBJECT_SIZE(size, base->heap_->code_space());
base->heap_->CopyBlock(dst_addr, src_addr, size);
Code::cast(dst)->Relocate(dst_addr - src_addr);
if (mode != MigrationMode::kFast)
base->ExecuteMigrationObservers(dest, src, dst, size);
dst->IterateBodyFast(dst->map()->instance_type(), size,
base->record_visitor_);
dst->IterateBodyFast(dst->map(), size, base->record_visitor_);
} else {
DCHECK_OBJECT_SIZE(size);
DCHECK(dest == NEW_SPACE);
......@@ -1771,7 +1769,7 @@ void MarkCompactCollector::ProcessTopOptimizedFrame(ObjectVisitor* visitor) {
if (it.frame()->type() == StackFrame::OPTIMIZED) {
Code* code = it.frame()->LookupCode();
if (!code->CanDeoptAt(it.frame()->pc())) {
Code::BodyDescriptor::IterateBody(code, visitor);
Code::BodyDescriptor::IterateBody(code->map(), code, visitor);
}
return;
}
......@@ -3717,7 +3715,7 @@ class ToSpaceUpdatingItem : public UpdatingItem {
HeapObject* object = HeapObject::FromAddress(cur);
Map* map = object->map();
int size = object->SizeFromMap(map);
object->IterateBody(map->instance_type(), size, &visitor);
object->IterateBody(map, size, &visitor);
cur += size;
}
}
......
......@@ -80,7 +80,7 @@ void HeapVisitor<ResultType, ConcreteVisitor>::VisitMapPointer(
int size = type::BodyDescriptor::SizeOf(map, object); \
if (visitor->ShouldVisitMapPointer()) \
visitor->VisitMapPointer(object, object->map_slot()); \
type::BodyDescriptor::IterateBody(object, size, visitor); \
type::BodyDescriptor::IterateBody(map, object, size, visitor); \
return static_cast<ResultType>(size); \
}
TYPED_VISITOR_ID_LIST(VISIT)
......@@ -100,7 +100,7 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitNativeContext(
int size = Context::BodyDescriptor::SizeOf(map, object);
if (visitor->ShouldVisitMapPointer())
visitor->VisitMapPointer(object, object->map_slot());
Context::BodyDescriptor::IterateBody(object, size, visitor);
Context::BodyDescriptor::IterateBody(map, object, size, visitor);
return static_cast<ResultType>(size);
}
......@@ -123,7 +123,7 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSObjectFast(
int size = JSObject::FastBodyDescriptor::SizeOf(map, object);
if (visitor->ShouldVisitMapPointer())
visitor->VisitMapPointer(object, object->map_slot());
JSObject::FastBodyDescriptor::IterateBody(object, size, visitor);
JSObject::FastBodyDescriptor::IterateBody(map, object, size, visitor);
return static_cast<ResultType>(size);
}
......@@ -135,7 +135,7 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSApiObject(
int size = JSObject::BodyDescriptor::SizeOf(map, object);
if (visitor->ShouldVisitMapPointer())
visitor->VisitMapPointer(object, object->map_slot());
JSObject::BodyDescriptor::IterateBody(object, size, visitor);
JSObject::BodyDescriptor::IterateBody(map, object, size, visitor);
return static_cast<ResultType>(size);
}
......@@ -147,7 +147,7 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitStruct(
int size = map->instance_size();
if (visitor->ShouldVisitMapPointer())
visitor->VisitMapPointer(object, object->map_slot());
StructBodyDescriptor::IterateBody(object, size, visitor);
StructBodyDescriptor::IterateBody(map, object, size, visitor);
return static_cast<ResultType>(size);
}
......@@ -166,7 +166,7 @@ int NewSpaceVisitor<ConcreteVisitor>::VisitJSFunction(Map* map,
JSFunction* object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
int size = JSFunction::BodyDescriptorWeak::SizeOf(map, object);
JSFunction::BodyDescriptorWeak::IterateBody(object, size, visitor);
JSFunction::BodyDescriptorWeak::IterateBody(map, object, size, visitor);
return size;
}
......@@ -175,7 +175,7 @@ int NewSpaceVisitor<ConcreteVisitor>::VisitNativeContext(Map* map,
Context* object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
int size = Context::BodyDescriptor::SizeOf(map, object);
Context::BodyDescriptor::IterateBody(object, size, visitor);
Context::BodyDescriptor::IterateBody(map, object, size, visitor);
return size;
}
......
......@@ -103,7 +103,7 @@ void Scavenger::IterateAndScavengePromotedObject(HeapObject* target, int size) {
is_compacting_ &&
heap()->incremental_marking()->atomic_marking_state()->IsBlack(target);
IterateAndScavengePromotedObjectsVisitor visitor(heap(), this, record_slots);
target->IterateBody(target->map()->instance_type(), size, &visitor);
target->IterateBody(target->map(), size, &visitor);
}
void Scavenger::AddPageToSweeperIfNecessary(MemoryChunk* page) {
......
......@@ -1930,7 +1930,7 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
// All the interior pointers should be contained in the heap.
int size = object->Size();
object->IterateBody(map->instance_type(), size, visitor);
object->IterateBody(map, size, visitor);
CHECK(object->address() + size <= top);
end_of_previous_object = object->address() + size;
}
......@@ -2382,7 +2382,7 @@ void NewSpace::Verify() {
// All the interior pointers should be contained in the heap.
VerifyPointersVisitor visitor;
int size = object->Size();
object->IterateBody(map->instance_type(), size, &visitor);
object->IterateBody(map, size, &visitor);
current += size;
} else {
......@@ -3473,7 +3473,7 @@ void LargeObjectSpace::Verify() {
// Byte arrays and strings don't have interior pointers.
if (object->IsAbstractCode()) {
VerifyPointersVisitor code_visitor;
object->IterateBody(map->instance_type(), object->Size(), &code_visitor);
object->IterateBody(map, object->Size(), &code_visitor);
} else if (object->IsFixedArray()) {
FixedArray* array = FixedArray::cast(object);
for (int j = 0; j < array->length(); j++) {
......
This diff is collapsed.
......@@ -18,13 +18,13 @@ namespace internal {
// It is used for invalid slots filtering. If the offset points outside
// of the object or to the map word, the result is UNDEFINED (!!!).
//
// static bool IsValidSlot(HeapObject* obj, int offset);
// static bool IsValidSlot(Map* map, HeapObject* obj, int offset);
//
//
// 2) Iterate object's body using stateful object visitor.
//
// template <typename ObjectVisitor>
// static inline void IterateBody(HeapObject* obj, int object_size,
// static inline void IterateBody(Map* map, HeapObject* obj, int object_size,
// ObjectVisitor* v);
class BodyDescriptorBase BASE_EMBEDDED {
public:
......@@ -46,12 +46,13 @@ class BodyDescriptorBase BASE_EMBEDDED {
protected:
// Returns true for all header and embedder fields.
static inline bool IsValidSlotImpl(HeapObject* obj, int offset);
static inline bool IsValidSlotImpl(Map* map, HeapObject* obj, int offset);
// Treats all header and embedder fields in the range as tagged.
template <typename ObjectVisitor>
static inline void IterateBodyImpl(HeapObject* obj, int start_offset,
int end_offset, ObjectVisitor* v);
static inline void IterateBodyImpl(Map* map, HeapObject* obj,
int start_offset, int end_offset,
ObjectVisitor* v);
};
......@@ -65,19 +66,19 @@ class FixedBodyDescriptor final : public BodyDescriptorBase {
static const int kEndOffset = end_offset;
static const int kSize = size;
static bool IsValidSlot(HeapObject* obj, int offset) {
static bool IsValidSlot(Map* map, HeapObject* obj, int offset) {
return offset >= kStartOffset && offset < kEndOffset;
}
template <typename ObjectVisitor>
static inline void IterateBody(HeapObject* obj, ObjectVisitor* v) {
static inline void IterateBody(Map* map, HeapObject* obj, ObjectVisitor* v) {
IteratePointers(obj, start_offset, end_offset, v);
}
template <typename ObjectVisitor>
static inline void IterateBody(HeapObject* obj, int object_size,
static inline void IterateBody(Map* map, HeapObject* obj, int object_size,
ObjectVisitor* v) {
IterateBody(obj, v);
IterateBody(map, obj, v);
}
static inline int SizeOf(Map* map, HeapObject* object) { return kSize; }
......@@ -92,12 +93,12 @@ class FlexibleBodyDescriptor final : public BodyDescriptorBase {
public:
static const int kStartOffset = start_offset;
static bool IsValidSlot(HeapObject* obj, int offset) {
static bool IsValidSlot(Map* map, HeapObject* obj, int offset) {
return (offset >= kStartOffset);
}
template <typename ObjectVisitor>
static inline void IterateBody(HeapObject* obj, int object_size,
static inline void IterateBody(Map* map, HeapObject* obj, int object_size,
ObjectVisitor* v) {
IteratePointers(obj, start_offset, object_size, v);
}
......
......@@ -1341,8 +1341,7 @@ int JSObject::GetHeaderSize(InstanceType type,
case JS_BOUND_FUNCTION_TYPE:
return JSBoundFunction::kSize;
case JS_FUNCTION_TYPE:
return function_has_prototype_slot ? JSFunction::kSizeWithPrototype
: JSFunction::kSizeWithoutPrototype;
return JSFunction::GetHeaderSize(function_has_prototype_slot);
case JS_VALUE_TYPE:
return JSValue::kSize;
case JS_DATE_TYPE:
......@@ -3479,27 +3478,24 @@ void HeapObject::Iterate(ObjectVisitor* v) { IterateFast<ObjectVisitor>(v); }
void HeapObject::IterateBody(ObjectVisitor* v) {
Map* m = map();
IterateBodyFast<ObjectVisitor>(m->instance_type(), SizeFromMap(m), v);
IterateBodyFast<ObjectVisitor>(m, SizeFromMap(m), v);
}
void HeapObject::IterateBody(InstanceType type, int object_size,
ObjectVisitor* v) {
IterateBodyFast<ObjectVisitor>(type, object_size, v);
void HeapObject::IterateBody(Map* map, int object_size, ObjectVisitor* v) {
IterateBodyFast<ObjectVisitor>(map, object_size, v);
}
struct CallIsValidSlot {
template <typename BodyDescriptor>
static bool apply(HeapObject* obj, int offset, int) {
return BodyDescriptor::IsValidSlot(obj, offset);
static bool apply(Map* map, HeapObject* obj, int offset, int) {
return BodyDescriptor::IsValidSlot(map, obj, offset);
}
};
bool HeapObject::IsValidSlot(int offset) {
bool HeapObject::IsValidSlot(Map* map, int offset) {
DCHECK_NE(0, offset);
return BodyDescriptorApply<CallIsValidSlot, bool>(map()->instance_type(),
return BodyDescriptorApply<CallIsValidSlot, bool>(map->instance_type(), map,
this, offset, 0);
}
......
......@@ -1750,19 +1750,18 @@ class HeapObject: public Object {
// If it's not performance critical iteration use the non-templatized
// version.
void IterateBody(ObjectVisitor* v);
void IterateBody(InstanceType type, int object_size, ObjectVisitor* v);
void IterateBody(Map* map, int object_size, ObjectVisitor* v);
template <typename ObjectVisitor>
inline void IterateBodyFast(ObjectVisitor* v);
template <typename ObjectVisitor>
inline void IterateBodyFast(InstanceType type, int object_size,
ObjectVisitor* v);
inline void IterateBodyFast(Map* map, int object_size, ObjectVisitor* v);
// Returns true if the object contains a tagged value at given offset.
// It is used for invalid slots filtering. If the offset points outside
// of the object or to the map word, the result is UNDEFINED (!!!).
bool IsValidSlot(int offset);
bool IsValidSlot(Map* map, int offset);
// Returns the heap object's size in bytes
inline int Size() const;
......@@ -3445,6 +3444,11 @@ class JSFunction: public JSObject {
// Returns if this function has been compiled to native code yet.
inline bool is_compiled();
static int GetHeaderSize(bool function_has_prototype_slot) {
return function_has_prototype_slot ? JSFunction::kSizeWithPrototype
: JSFunction::kSizeWithoutPrototype;
}
// Prints the name of the function using PrintF.
void PrintName(FILE* out = stdout);
......
......@@ -667,12 +667,12 @@ void Serializer<AllocatorT>::ObjectSerializer::SerializeContent(Map* map,
// For code objects, output raw bytes first.
OutputCode(size);
// Then iterate references via reloc info.
object_->IterateBody(map->instance_type(), size, this);
object_->IterateBody(map, size, this);
// Finally skip to the end.
serializer_->FlushSkip(SkipTo(object_->address() + size));
} else {
// For other objects, iterate references first.
object_->IterateBody(map->instance_type(), size, this);
object_->IterateBody(map, size, this);
// Then output data payload, if any.
OutputRawData(object_->address() + size);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment