Commit b74cfe4a authored by ishell's avatar ishell Committed by Commit bot

LayoutDescriptorHelper is now able to calculate the length of contiguous...

LayoutDescriptorHelper is now able to calculate the length of contiguous regions of tagged/non-tagged fields.
This functionality is now used by both object visitor and store buffer.

TEST=cctest/test-unboxed-doubles

Review URL: https://codereview.chromium.org/726713003

Cr-Commit-Position: refs/heads/master@{#25816}
parent 286748c9
......@@ -1806,29 +1806,28 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
promotion_queue()->remove(&target, &size);
// Promoted object might be already partially visited
// during old space pointer iteration. Thus we search specificly
// during old space pointer iteration. Thus we search specifically
// for pointers to from semispace instead of looking for pointers
// to new space.
DCHECK(!target->IsMap());
Address start_address = target->address();
Address end_address = start_address + size;
Address obj_address = target->address();
#if V8_DOUBLE_FIELDS_UNBOXING
InobjectPropertiesHelper helper(target->map());
LayoutDescriptorHelper helper(target->map());
bool has_only_tagged_fields = helper.all_fields_tagged();
if (!has_only_tagged_fields) {
for (Address slot = start_address; slot < end_address;
slot += kPointerSize) {
if (helper.IsTagged(static_cast<int>(slot - start_address))) {
// TODO(ishell): call this once for contiguous region
// of tagged fields.
IterateAndMarkPointersToFromSpace(slot, slot + kPointerSize,
&ScavengeObject);
for (int offset = 0; offset < size;) {
int end_of_region_offset;
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
IterateAndMarkPointersToFromSpace(
obj_address + offset, obj_address + end_of_region_offset,
&ScavengeObject);
}
offset = end_of_region_offset;
}
} else {
#endif
IterateAndMarkPointersToFromSpace(start_address, end_address,
IterateAndMarkPointersToFromSpace(obj_address, obj_address + size,
&ScavengeObject);
#if V8_DOUBLE_FIELDS_UNBOXING
}
......
......@@ -2783,7 +2783,7 @@ void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src,
bool may_contain_raw_values = src->MayContainRawValues();
#if V8_DOUBLE_FIELDS_UNBOXING
InobjectPropertiesHelper helper(src->map());
LayoutDescriptorHelper helper(src->map());
bool has_only_tagged_fields = helper.all_fields_tagged();
#endif
for (int remaining = size / kPointerSize; remaining > 0; remaining--) {
......
......@@ -226,16 +226,14 @@ class BodyVisitorBase : public AllStatic {
DCHECK(IsAligned(start_offset, kPointerSize) &&
IsAligned(end_offset, kPointerSize));
InobjectPropertiesHelper helper(object->map());
LayoutDescriptorHelper helper(object->map());
DCHECK(!helper.all_fields_tagged());
for (int offset = start_offset; offset < end_offset;
offset += kPointerSize) {
// Visit tagged fields only.
if (helper.IsTagged(offset)) {
// TODO(ishell): call this once for contiguous region of tagged fields.
IterateRawPointers(heap, object, offset, offset + kPointerSize);
for (int offset = start_offset; offset < end_offset;) {
int end_of_region_offset;
if (helper.IsTagged(offset, end_offset, &end_of_region_offset)) {
IterateRawPointers(heap, object, offset, end_of_region_offset);
}
offset = end_of_region_offset;
}
}
};
......
......@@ -512,24 +512,28 @@ void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback,
bool may_contain_raw_values = heap_object->MayContainRawValues();
if (!may_contain_raw_values) {
Address obj_address = heap_object->address();
Address start_address = obj_address + HeapObject::kHeaderSize;
Address end_address = obj_address + heap_object->Size();
const int start_offset = HeapObject::kHeaderSize;
const int end_offset = heap_object->Size();
#if V8_DOUBLE_FIELDS_UNBOXING
InobjectPropertiesHelper helper(heap_object->map());
LayoutDescriptorHelper helper(heap_object->map());
bool has_only_tagged_fields = helper.all_fields_tagged();
if (!has_only_tagged_fields) {
for (Address slot = start_address; slot < end_address;
slot += kPointerSize) {
if (helper.IsTagged(static_cast<int>(slot - obj_address))) {
// TODO(ishell): call this once for contiguous region
// of tagged fields.
FindPointersToNewSpaceInRegion(slot, slot + kPointerSize,
slot_callback, clear_maps);
for (int offset = start_offset; offset < end_offset;) {
int end_of_region_offset;
if (helper.IsTagged(offset, end_offset,
&end_of_region_offset)) {
FindPointersToNewSpaceInRegion(
obj_address + offset,
obj_address + end_of_region_offset, slot_callback,
clear_maps);
}
offset = end_of_region_offset;
}
} else {
#endif
Address start_address = obj_address + start_offset;
Address end_address = obj_address + end_offset;
// Object has only tagged fields.
FindPointersToNewSpaceInRegion(start_address, end_address,
slot_callback, clear_maps);
......
......@@ -53,7 +53,7 @@ LayoutDescriptor* LayoutDescriptor::FastPointerLayout() {
bool LayoutDescriptor::GetIndexes(int field_index, int* layout_word_index,
uint32_t* layout_mask) {
int* layout_bit_index) {
if (static_cast<unsigned>(field_index) >= static_cast<unsigned>(capacity())) {
return false;
}
......@@ -62,20 +62,20 @@ bool LayoutDescriptor::GetIndexes(int field_index, int* layout_word_index,
CHECK((!IsSmi() && (*layout_word_index < length())) ||
(IsSmi() && (*layout_word_index < 1)));
int layout_bit_index = field_index % kNumberOfBits;
*layout_mask = static_cast<uint32_t>(1) << layout_bit_index;
*layout_bit_index = field_index % kNumberOfBits;
return true;
}
LayoutDescriptor* LayoutDescriptor::SetTagged(int field_index, bool tagged) {
int layout_word_index;
uint32_t layout_mask;
int layout_bit_index;
if (!GetIndexes(field_index, &layout_word_index, &layout_mask)) {
if (!GetIndexes(field_index, &layout_word_index, &layout_bit_index)) {
CHECK(false);
return this;
}
uint32_t layout_mask = static_cast<uint32_t>(1) << layout_bit_index;
if (IsSlowLayout()) {
uint32_t value = get_scalar(layout_word_index);
......@@ -102,12 +102,13 @@ bool LayoutDescriptor::IsTagged(int field_index) {
if (IsFastPointerLayout()) return true;
int layout_word_index;
uint32_t layout_mask;
int layout_bit_index;
if (!GetIndexes(field_index, &layout_word_index, &layout_mask)) {
if (!GetIndexes(field_index, &layout_word_index, &layout_bit_index)) {
// All bits after Out of bounds queries
return true;
}
uint32_t layout_mask = static_cast<uint32_t>(1) << layout_bit_index;
if (IsSlowLayout()) {
uint32_t value = get_scalar(layout_word_index);
......@@ -155,7 +156,7 @@ LayoutDescriptor* LayoutDescriptor::cast_gc_safe(Object* object) {
// InobjectPropertiesHelper is a helper class for querying whether inobject
// property at offset is Double or not.
InobjectPropertiesHelper::InobjectPropertiesHelper(Map* map)
LayoutDescriptorHelper::LayoutDescriptorHelper(Map* map)
: all_fields_tagged_(true),
header_size_(0),
layout_descriptor_(LayoutDescriptor::FastPointerLayout()) {
......@@ -175,7 +176,7 @@ InobjectPropertiesHelper::InobjectPropertiesHelper(Map* map)
}
bool InobjectPropertiesHelper::IsTagged(int offset_in_bytes) {
bool LayoutDescriptorHelper::IsTagged(int offset_in_bytes) {
DCHECK(IsAligned(offset_in_bytes, kPointerSize));
if (all_fields_tagged_) return true;
// Object headers do not contain non-tagged fields.
......
......@@ -6,8 +6,11 @@
#include "src/v8.h"
#include "src/base/bits.h"
#include "src/layout-descriptor.h"
using v8::base::bits::CountTrailingZeros32;
namespace v8 {
namespace internal {
......@@ -143,5 +146,111 @@ Handle<LayoutDescriptor> LayoutDescriptor::EnsureCapacity(
return new_layout_descriptor;
}
}
bool LayoutDescriptor::IsTagged(int field_index, int max_sequence_length,
int* out_sequence_length) {
DCHECK(max_sequence_length > 0);
if (IsFastPointerLayout()) {
*out_sequence_length = max_sequence_length;
return true;
}
int layout_word_index;
int layout_bit_index;
if (!GetIndexes(field_index, &layout_word_index, &layout_bit_index)) {
// Out of bounds queries are considered tagged.
*out_sequence_length = max_sequence_length;
return true;
}
uint32_t layout_mask = static_cast<uint32_t>(1) << layout_bit_index;
uint32_t value = IsSlowLayout()
? get_scalar(layout_word_index)
: static_cast<uint32_t>(Smi::cast(this)->value());
bool is_tagged = (value & layout_mask) == 0;
if (!is_tagged) value = ~value; // Count set bits instead of cleared bits.
value = value & ~(layout_mask - 1); // Clear bits we are not interested in.
int sequence_length = CountTrailingZeros32(value) - layout_bit_index;
if (layout_bit_index + sequence_length == kNumberOfBits) {
// This is a contiguous sequence till the end of current word, proceed
// counting in the subsequent words.
if (IsSlowLayout()) {
int len = length();
++layout_word_index;
for (; layout_word_index < len; layout_word_index++) {
value = get_scalar(layout_word_index);
bool cur_is_tagged = (value & 1) == 0;
if (cur_is_tagged != is_tagged) break;
if (!is_tagged) value = ~value; // Count set bits instead.
int cur_sequence_length = CountTrailingZeros32(value);
sequence_length += cur_sequence_length;
if (sequence_length >= max_sequence_length) break;
if (cur_sequence_length != kNumberOfBits) break;
}
}
if (is_tagged && (field_index + sequence_length == capacity())) {
// The contiguous sequence of tagged fields lasts till the end of the
// layout descriptor which means that all the fields starting from
// field_index are tagged.
sequence_length = std::numeric_limits<int>::max();
}
}
*out_sequence_length = Min(sequence_length, max_sequence_length);
return is_tagged;
}
Handle<LayoutDescriptor> LayoutDescriptor::NewForTesting(Isolate* isolate,
int length) {
return New(isolate, length);
}
LayoutDescriptor* LayoutDescriptor::SetTaggedForTesting(int field_index,
bool tagged) {
return SetTagged(field_index, tagged);
}
bool LayoutDescriptorHelper::IsTagged(
int offset_in_bytes, int end_offset,
int* out_end_of_contiguous_region_offset) {
DCHECK(IsAligned(offset_in_bytes, kPointerSize));
DCHECK(IsAligned(end_offset, kPointerSize));
DCHECK(offset_in_bytes < end_offset);
if (all_fields_tagged_) {
*out_end_of_contiguous_region_offset = end_offset;
DCHECK(offset_in_bytes < *out_end_of_contiguous_region_offset);
return true;
}
int max_sequence_length = (end_offset - offset_in_bytes) / kPointerSize;
int field_index = Max(0, (offset_in_bytes - header_size_) / kPointerSize);
int sequence_length;
bool tagged = layout_descriptor_->IsTagged(field_index, max_sequence_length,
&sequence_length);
DCHECK(sequence_length > 0);
if (offset_in_bytes < header_size_) {
// Object headers do not contain non-tagged fields. Check if the contiguous
// region continues after the header.
if (tagged) {
// First field is tagged, calculate end offset from there.
*out_end_of_contiguous_region_offset =
header_size_ + sequence_length * kPointerSize;
} else {
*out_end_of_contiguous_region_offset = header_size_;
}
DCHECK(offset_in_bytes < *out_end_of_contiguous_region_offset);
return true;
}
*out_end_of_contiguous_region_offset =
offset_in_bytes + sequence_length * kPointerSize;
DCHECK(offset_in_bytes < *out_end_of_contiguous_region_offset);
return tagged;
}
}
} // namespace v8::internal
......@@ -26,6 +26,14 @@ class LayoutDescriptor : public FixedTypedArray<Uint32ArrayTraits> {
public:
V8_INLINE bool IsTagged(int field_index);
// Queries the contiguous region of fields that are either tagged or not.
// Returns true if the given field is tagged or false otherwise and writes
// the length of the contiguous region to |out_sequence_length|.
// If the sequence is longer than |max_sequence_length| then
// |out_sequence_length| is set to |max_sequence_length|.
bool IsTagged(int field_index, int max_sequence_length,
int* out_sequence_length);
// Returns true if this is a layout of the object having only tagged fields.
V8_INLINE bool IsFastPointerLayout();
V8_INLINE static bool IsFastPointerLayout(Object* layout_descriptor);
......@@ -76,10 +84,8 @@ class LayoutDescriptor : public FixedTypedArray<Uint32ArrayTraits> {
// Capacity of layout descriptors in bits.
V8_INLINE int capacity();
V8_INLINE LayoutDescriptor* SetTaggedForTesting(int field_index,
bool tagged) {
return SetTagged(field_index, tagged);
}
static Handle<LayoutDescriptor> NewForTesting(Isolate* isolate, int length);
LayoutDescriptor* SetTaggedForTesting(int field_index, bool tagged);
private:
static const int kNumberOfBits = 32;
......@@ -96,7 +102,7 @@ class LayoutDescriptor : public FixedTypedArray<Uint32ArrayTraits> {
// Returns false if requested field_index is out of bounds.
V8_INLINE bool GetIndexes(int field_index, int* layout_word_index,
uint32_t* layout_mask);
int* layout_bit_index);
V8_INLINE MUST_USE_RESULT LayoutDescriptor* SetRawData(int field_index) {
return SetTagged(field_index, false);
......@@ -107,15 +113,23 @@ class LayoutDescriptor : public FixedTypedArray<Uint32ArrayTraits> {
};
// InobjectPropertiesHelper is a helper class for querying layout descriptor
// LayoutDescriptorHelper is a helper class for querying layout descriptor
// about whether the field at given offset is tagged or not.
class InobjectPropertiesHelper {
class LayoutDescriptorHelper {
public:
inline explicit InobjectPropertiesHelper(Map* map);
inline explicit LayoutDescriptorHelper(Map* map);
bool all_fields_tagged() { return all_fields_tagged_; }
inline bool IsTagged(int offset_in_bytes);
// Queries the contiguous region of fields that are either tagged or not.
// Returns true if fields starting at |offset_in_bytes| are tagged or false
// otherwise and writes the offset of the end of the contiguous region to
// |out_end_of_contiguous_region_offset|. The |end_offset| value is the
// upper bound for |out_end_of_contiguous_region_offset|.
bool IsTagged(int offset_in_bytes, int end_offset,
int* out_end_of_contiguous_region_offset);
private:
bool all_fields_tagged_;
int header_size_;
......
......@@ -7421,7 +7421,7 @@ static inline void IterateBodyUsingLayoutDescriptor(HeapObject* object,
DCHECK(IsAligned(start_offset, kPointerSize) &&
IsAligned(end_offset, kPointerSize));
InobjectPropertiesHelper helper(object->map());
LayoutDescriptorHelper helper(object->map());
DCHECK(!helper.all_fields_tagged());
for (int offset = start_offset; offset < end_offset; offset += kPointerSize) {
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment