Commit 97c962c2 authored by rmcilroy@chromium.org's avatar rmcilroy@chromium.org

Add support for extended constant pool arrays.

This CL adds support for ConstantPoolArrays which contain an extended section.
This will be used to enable larger constant pools than can be addressed by a
single ldr with immediate offset instruction (which has a limit of a 4KB range).
Extended constant pools will have a small section, which is addressable via a
single ldr instruction, and an extended section, which will require a multi-
instruction sequence to load from.

Currently, no code uses the extended ConstantPoolArray's - this change will
be made in a followup CL.

A number of changes are made to the ConstantPoolArray object in order to
support this:
  - Small section layout is now entirely defined by the small layout bitmaps.
  - The ConstantPoolArray no longer extends FixedArrayBase since the length
    field is not useful for extended layouts.
  - Enums are used to represent the type of an entry and the layout section.
  - An iterator can be used to iterate through all elements of a given type.
  - A number of tests were added for these features.

R=ulan@chromium.org

Review URL: https://codereview.chromium.org/304143002

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@21653 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent b6a8f739
......@@ -3554,11 +3554,15 @@ void ConstantPoolBuilder::AddEntry(Assembler* assm,
// Check if we still have room for another entry given Arm's ldr and vldr
// immediate offset range.
if (!(is_uint12(ConstantPoolArray::SizeFor(count_of_64bit_,
count_of_code_ptr_,
count_of_heap_ptr_,
count_of_32bit_))) &&
is_uint10(ConstantPoolArray::SizeFor(count_of_64bit_, 0, 0, 0))) {
// TODO(rmcilroy): Avoid creating a new object here when we support
// extended constant pools.
ConstantPoolArray::NumberOfEntries total(count_of_64bit_,
count_of_code_ptr_,
count_of_heap_ptr_,
count_of_32bit_);
ConstantPoolArray::NumberOfEntries int64_counts(count_of_64bit_, 0, 0, 0);
if (!(is_uint12(ConstantPoolArray::SizeFor(total)) &&
is_uint10(ConstantPoolArray::SizeFor(int64_counts)))) {
assm->set_constant_pool_full();
}
}
......@@ -3577,20 +3581,26 @@ Handle<ConstantPoolArray> ConstantPoolBuilder::New(Isolate* isolate) {
if (IsEmpty()) {
return isolate->factory()->empty_constant_pool_array();
} else {
return isolate->factory()->NewConstantPoolArray(count_of_64bit_,
count_of_code_ptr_,
count_of_heap_ptr_,
count_of_32bit_);
ConstantPoolArray::NumberOfEntries small(count_of_64bit_,
count_of_code_ptr_,
count_of_heap_ptr_,
count_of_32bit_);
return isolate->factory()->NewConstantPoolArray(small);
}
}
void ConstantPoolBuilder::Populate(Assembler* assm,
ConstantPoolArray* constant_pool) {
ASSERT(constant_pool->count_of_int64_entries() == count_of_64bit_);
ASSERT(constant_pool->count_of_code_ptr_entries() == count_of_code_ptr_);
ASSERT(constant_pool->count_of_heap_ptr_entries() == count_of_heap_ptr_);
ASSERT(constant_pool->count_of_int32_entries() == count_of_32bit_);
ConstantPoolArray::LayoutSection section = ConstantPoolArray::SMALL_SECTION;
ASSERT(count_of_64bit_ ==
constant_pool->number_of_entries(ConstantPoolArray::INT64, section));
ASSERT(count_of_code_ptr_ ==
constant_pool->number_of_entries(ConstantPoolArray::CODE_PTR, section));
ASSERT(count_of_heap_ptr_ ==
constant_pool->number_of_entries(ConstantPoolArray::HEAP_PTR, section));
ASSERT(count_of_32bit_ ==
constant_pool->number_of_entries(ConstantPoolArray::INT32, section));
ASSERT(entries_.size() == merged_indexes_.size());
int index_64bit = 0;
......@@ -3616,7 +3626,7 @@ void ConstantPoolBuilder::Populate(Assembler* assm,
offset = constant_pool->OffsetOfElementAt(index_code_ptr) -
kHeapObjectTag;
constant_pool->set(index_code_ptr++,
reinterpret_cast<Object *>(rinfo->data()));
reinterpret_cast<Address>(rinfo->data()));
} else {
ASSERT(IsHeapPtrEntry(rmode));
offset = constant_pool->OffsetOfElementAt(index_heap_ptr) -
......
......@@ -115,18 +115,23 @@ Handle<FixedArrayBase> Factory::NewFixedDoubleArrayWithHoles(
Handle<ConstantPoolArray> Factory::NewConstantPoolArray(
int number_of_int64_entries,
int number_of_code_ptr_entries,
int number_of_heap_ptr_entries,
int number_of_int32_entries) {
ASSERT(number_of_int64_entries > 0 || number_of_code_ptr_entries > 0 ||
number_of_heap_ptr_entries > 0 || number_of_int32_entries > 0);
const ConstantPoolArray::NumberOfEntries& small) {
ASSERT(small.total_count() > 0);
CALL_HEAP_FUNCTION(
isolate(),
isolate()->heap()->AllocateConstantPoolArray(number_of_int64_entries,
number_of_code_ptr_entries,
number_of_heap_ptr_entries,
number_of_int32_entries),
isolate()->heap()->AllocateConstantPoolArray(small),
ConstantPoolArray);
}
Handle<ConstantPoolArray> Factory::NewExtendedConstantPoolArray(
const ConstantPoolArray::NumberOfEntries& small,
const ConstantPoolArray::NumberOfEntries& extended) {
ASSERT(small.total_count() > 0);
ASSERT(extended.total_count() > 0);
CALL_HEAP_FUNCTION(
isolate(),
isolate()->heap()->AllocateExtendedConstantPoolArray(small, extended),
ConstantPoolArray);
}
......
......@@ -45,10 +45,11 @@ class Factory V8_FINAL {
PretenureFlag pretenure = NOT_TENURED);
Handle<ConstantPoolArray> NewConstantPoolArray(
int number_of_int64_entries,
int number_of_code_ptr_entries,
int number_of_heap_ptr_entries,
int number_of_int32_entries);
const ConstantPoolArray::NumberOfEntries& small);
Handle<ConstantPoolArray> NewExtendedConstantPoolArray(
const ConstantPoolArray::NumberOfEntries& small,
const ConstantPoolArray::NumberOfEntries& extended);
Handle<OrderedHashSet> NewOrderedHashSet();
Handle<OrderedHashMap> NewOrderedHashMap();
......
......@@ -1808,7 +1808,9 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &
kDoubleAlignmentMask) == 0); // NOLINT
STATIC_ASSERT((ConstantPoolArray::kHeaderSize &
STATIC_ASSERT((ConstantPoolArray::kFirstEntryOffset &
kDoubleAlignmentMask) == 0); // NOLINT
STATIC_ASSERT((ConstantPoolArray::kExtendedFirstOffset &
kDoubleAlignmentMask) == 0); // NOLINT
......@@ -4046,23 +4048,26 @@ AllocationResult Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src,
AllocationResult Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src,
Map* map) {
int int64_entries = src->count_of_int64_entries();
int code_ptr_entries = src->count_of_code_ptr_entries();
int heap_ptr_entries = src->count_of_heap_ptr_entries();
int int32_entries = src->count_of_int32_entries();
HeapObject* obj;
{ AllocationResult allocation =
AllocateConstantPoolArray(int64_entries, code_ptr_entries,
heap_ptr_entries, int32_entries);
if (src->is_extended_layout()) {
ConstantPoolArray::NumberOfEntries small(src,
ConstantPoolArray::SMALL_SECTION);
ConstantPoolArray::NumberOfEntries extended(src,
ConstantPoolArray::EXTENDED_SECTION);
AllocationResult allocation =
AllocateExtendedConstantPoolArray(small, extended);
if (!allocation.To(&obj)) return allocation;
} else {
ConstantPoolArray::NumberOfEntries small(src,
ConstantPoolArray::SMALL_SECTION);
AllocationResult allocation = AllocateConstantPoolArray(small);
if (!allocation.To(&obj)) return allocation;
}
obj->set_map_no_write_barrier(map);
int size = ConstantPoolArray::SizeFor(
int64_entries, code_ptr_entries, heap_ptr_entries, int32_entries);
CopyBlock(
obj->address() + ConstantPoolArray::kLengthOffset,
src->address() + ConstantPoolArray::kLengthOffset,
size - ConstantPoolArray::kLengthOffset);
obj->address() + ConstantPoolArray::kFirstEntryOffset,
src->address() + ConstantPoolArray::kFirstEntryOffset,
src->size() - ConstantPoolArray::kFirstEntryOffset);
return obj;
}
......@@ -4154,22 +4159,10 @@ AllocationResult Heap::AllocateRawFixedDoubleArray(int length,
}
AllocationResult Heap::AllocateConstantPoolArray(int number_of_int64_entries,
int number_of_code_ptr_entries,
int number_of_heap_ptr_entries,
int number_of_int32_entries) {
CHECK(number_of_int64_entries >= 0 &&
number_of_int64_entries <= ConstantPoolArray::kMaxEntriesPerType &&
number_of_code_ptr_entries >= 0 &&
number_of_code_ptr_entries <= ConstantPoolArray::kMaxEntriesPerType &&
number_of_heap_ptr_entries >= 0 &&
number_of_heap_ptr_entries <= ConstantPoolArray::kMaxEntriesPerType &&
number_of_int32_entries >= 0 &&
number_of_int32_entries <= ConstantPoolArray::kMaxEntriesPerType);
int size = ConstantPoolArray::SizeFor(number_of_int64_entries,
number_of_code_ptr_entries,
number_of_heap_ptr_entries,
number_of_int32_entries);
AllocationResult Heap::AllocateConstantPoolArray(
const ConstantPoolArray::NumberOfEntries& small) {
CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType));
int size = ConstantPoolArray::SizeFor(small);
#ifndef V8_HOST_ARCH_64_BIT
size += kPointerSize;
#endif
......@@ -4183,39 +4176,47 @@ AllocationResult Heap::AllocateConstantPoolArray(int number_of_int64_entries,
object->set_map_no_write_barrier(constant_pool_array_map());
ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object);
constant_pool->Init(number_of_int64_entries,
number_of_code_ptr_entries,
number_of_heap_ptr_entries,
number_of_int32_entries);
if (number_of_code_ptr_entries > 0) {
int offset =
constant_pool->OffsetOfElementAt(constant_pool->first_code_ptr_index());
MemsetPointer(
reinterpret_cast<Address*>(HeapObject::RawField(constant_pool, offset)),
isolate()->builtins()->builtin(Builtins::kIllegal)->entry(),
number_of_code_ptr_entries);
}
if (number_of_heap_ptr_entries > 0) {
int offset =
constant_pool->OffsetOfElementAt(constant_pool->first_heap_ptr_index());
MemsetPointer(
HeapObject::RawField(constant_pool, offset),
undefined_value(),
number_of_heap_ptr_entries);
constant_pool->Init(small);
constant_pool->ClearPtrEntries(isolate());
return constant_pool;
}
AllocationResult Heap::AllocateExtendedConstantPoolArray(
const ConstantPoolArray::NumberOfEntries& small,
const ConstantPoolArray::NumberOfEntries& extended) {
CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType));
CHECK(extended.are_in_range(0, kMaxInt));
int size = ConstantPoolArray::SizeForExtended(small, extended);
#ifndef V8_HOST_ARCH_64_BIT
size += kPointerSize;
#endif
AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
HeapObject* object;
{ AllocationResult allocation = AllocateRaw(size, space, OLD_POINTER_SPACE);
if (!allocation.To(&object)) return allocation;
}
object = EnsureDoubleAligned(this, object, size);
object->set_map_no_write_barrier(constant_pool_array_map());
ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object);
constant_pool->InitExtended(small, extended);
constant_pool->ClearPtrEntries(isolate());
return constant_pool;
}
AllocationResult Heap::AllocateEmptyConstantPoolArray() {
int size = ConstantPoolArray::SizeFor(0, 0, 0, 0);
ConstantPoolArray::NumberOfEntries small(0, 0, 0, 0);
int size = ConstantPoolArray::SizeFor(small);
HeapObject* result;
{ AllocationResult allocation =
AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
if (!allocation.To(&result)) return allocation;
}
result->set_map_no_write_barrier(constant_pool_array_map());
ConstantPoolArray::cast(result)->Init(0, 0, 0, 0);
ConstantPoolArray::cast(result)->Init(small);
return result;
}
......
......@@ -1873,10 +1873,11 @@ class Heap {
ConstantPoolArray* src, Map* map);
MUST_USE_RESULT AllocationResult AllocateConstantPoolArray(
int number_of_int64_entries,
int number_of_code_ptr_entries,
int number_of_heap_ptr_entries,
int number_of_int32_entries);
const ConstantPoolArray::NumberOfEntries& small);
MUST_USE_RESULT AllocationResult AllocateExtendedConstantPoolArray(
const ConstantPoolArray::NumberOfEntries& small,
const ConstantPoolArray::NumberOfEntries& extended);
// Allocates an external array of the specified length and type.
MUST_USE_RESULT AllocationResult AllocateExternalArray(
......
......@@ -2847,10 +2847,11 @@ void MarkCompactCollector::MigrateObject(HeapObject* dst,
SlotsBuffer::IGNORE_OVERFLOW);
}
} else if (compacting_ && dst->IsConstantPoolArray()) {
ConstantPoolArray* constant_pool = ConstantPoolArray::cast(dst);
for (int i = 0; i < constant_pool->count_of_code_ptr_entries(); i++) {
ConstantPoolArray* array = ConstantPoolArray::cast(dst);
ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR);
while (!code_iter.is_finished()) {
Address code_entry_slot =
dst_addr + constant_pool->OffsetOfElementAt(i);
dst_addr + array->OffsetOfElementAt(code_iter.next_index());
Address code_entry = Memory::Address_at(code_entry_slot);
if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) {
......
......@@ -378,12 +378,14 @@ void FixedDoubleArray::FixedDoubleArrayVerify() {
void ConstantPoolArray::ConstantPoolArrayVerify() {
CHECK(IsConstantPoolArray());
for (int i = 0; i < count_of_code_ptr_entries(); i++) {
Address code_entry = get_code_ptr_entry(first_code_ptr_index() + i);
ConstantPoolArray::Iterator code_iter(this, ConstantPoolArray::CODE_PTR);
while (!code_iter.is_finished()) {
Address code_entry = get_code_ptr_entry(code_iter.next_index());
VerifyPointer(Code::GetCodeFromTargetAddress(code_entry));
}
for (int i = 0; i < count_of_heap_ptr_entries(); i++) {
VerifyObjectField(OffsetOfElementAt(first_heap_ptr_index() + i));
ConstantPoolArray::Iterator heap_iter(this, ConstantPoolArray::HEAP_PTR);
while (!heap_iter.is_finished()) {
VerifyObjectField(OffsetOfElementAt(heap_iter.next_index()));
}
}
......
This diff is collapsed.
......@@ -579,19 +579,36 @@ void FixedDoubleArray::FixedDoubleArrayPrint(FILE* out) {
void ConstantPoolArray::ConstantPoolArrayPrint(FILE* out) {
HeapObject::PrintHeader(out, "ConstantPoolArray");
PrintF(out, " - length: %d", length());
for (int i = 0; i < length(); i++) {
if (i < first_code_ptr_index()) {
for (int i = 0; i <= last_index(INT32, SMALL_SECTION); i++) {
if (i < last_index(INT64, SMALL_SECTION)) {
PrintF(out, "\n [%d]: double: %g", i, get_int64_entry_as_double(i));
} else if (i < first_heap_ptr_index()) {
} else if (i <= last_index(CODE_PTR, SMALL_SECTION)) {
PrintF(out, "\n [%d]: code target pointer: %p", i,
reinterpret_cast<void*>(get_code_ptr_entry(i)));
} else if (i < first_int32_index()) {
} else if (i <= last_index(HEAP_PTR, SMALL_SECTION)) {
PrintF(out, "\n [%d]: heap pointer: %p", i,
reinterpret_cast<void*>(get_heap_ptr_entry(i)));
} else {
} else if (i <= last_index(INT32, SMALL_SECTION)) {
PrintF(out, "\n [%d]: int32: %d", i, get_int32_entry(i));
}
}
if (is_extended_layout()) {
PrintF(out, "\n Extended section:");
for (int i = first_extended_section_index();
i <= last_index(INT32, EXTENDED_SECTION); i++) {
if (i < last_index(INT64, EXTENDED_SECTION)) {
PrintF(out, "\n [%d]: double: %g", i, get_int64_entry_as_double(i));
} else if (i <= last_index(CODE_PTR, EXTENDED_SECTION)) {
PrintF(out, "\n [%d]: code target pointer: %p", i,
reinterpret_cast<void*>(get_code_ptr_entry(i)));
} else if (i <= last_index(HEAP_PTR, EXTENDED_SECTION)) {
PrintF(out, "\n [%d]: heap pointer: %p", i,
reinterpret_cast<void*>(get_heap_ptr_entry(i)));
} else if (i <= last_index(INT32, EXTENDED_SECTION)) {
PrintF(out, "\n [%d]: int32: %d", i, get_int32_entry(i));
}
}
}
PrintF(out, "\n");
}
......
......@@ -498,23 +498,24 @@ template<typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object);
for (int i = 0; i < constant_pool->count_of_code_ptr_entries(); i++) {
int index = constant_pool->first_code_ptr_index() + i;
Address code_entry =
reinterpret_cast<Address>(constant_pool->RawFieldOfElementAt(index));
ConstantPoolArray* array = ConstantPoolArray::cast(object);
ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR);
while (!code_iter.is_finished()) {
Address code_entry = reinterpret_cast<Address>(
array->RawFieldOfElementAt(code_iter.next_index()));
StaticVisitor::VisitCodeEntry(heap, code_entry);
}
for (int i = 0; i < constant_pool->count_of_heap_ptr_entries(); i++) {
int index = constant_pool->first_heap_ptr_index() + i;
Object** slot = constant_pool->RawFieldOfElementAt(index);
ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR);
while (!heap_iter.is_finished()) {
Object** slot = array->RawFieldOfElementAt(heap_iter.next_index());
HeapObject* object = HeapObject::cast(*slot);
heap->mark_compact_collector()->RecordSlot(slot, slot, object);
bool is_weak_object =
(constant_pool->get_weak_object_state() ==
(array->get_weak_object_state() ==
ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
Code::IsWeakObjectInOptimizedCode(object)) ||
(constant_pool->get_weak_object_state() ==
(array->get_weak_object_state() ==
ConstantPoolArray::WEAK_OBJECTS_IN_IC &&
Code::IsWeakObjectInIC(object));
if (!is_weak_object) {
......
......@@ -9863,13 +9863,36 @@ bool Map::EquivalentToForNormalization(Map* other,
void ConstantPoolArray::ConstantPoolIterateBody(ObjectVisitor* v) {
for (int i = 0; i < count_of_code_ptr_entries(); i++) {
int index = first_code_ptr_index() + i;
v->VisitCodeEntry(reinterpret_cast<Address>(RawFieldOfElementAt(index)));
ConstantPoolArray::Iterator code_iter(this, ConstantPoolArray::CODE_PTR);
while (!code_iter.is_finished()) {
v->VisitCodeEntry(reinterpret_cast<Address>(
RawFieldOfElementAt(code_iter.next_index())));
}
for (int i = 0; i < count_of_heap_ptr_entries(); i++) {
int index = first_heap_ptr_index() + i;
v->VisitPointer(RawFieldOfElementAt(index));
ConstantPoolArray::Iterator heap_iter(this, ConstantPoolArray::HEAP_PTR);
while (!heap_iter.is_finished()) {
v->VisitPointer(RawFieldOfElementAt(heap_iter.next_index()));
}
}
void ConstantPoolArray::ClearPtrEntries(Isolate* isolate) {
Type type[] = { CODE_PTR, HEAP_PTR };
Address default_value[] = {
isolate->builtins()->builtin(Builtins::kIllegal)->entry(),
reinterpret_cast<Address>(isolate->heap()->undefined_value()) };
for (int i = 0; i < 2; ++i) {
for (int s = 0; s <= final_section(); ++s) {
LayoutSection section = static_cast<LayoutSection>(s);
if (number_of_entries(type[i], section) > 0) {
int offset = OffsetOfElementAt(first_index(type[i], section));
MemsetPointer(
reinterpret_cast<Address*>(HeapObject::RawField(this, offset)),
default_value[i],
number_of_entries(type[i], section));
}
}
}
}
......
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment