Commit 54855b67 authored by Igor Sheludko's avatar Igor Sheludko Committed by Commit Bot

[ptr-compr] Move Heap::root() to Isolate

... and Heap::root_handle() to RootsTable.

This is a preliminary step before moving IsolateData object from Heap to Isolate
which is required for pointer-compression friendly heap layout.

Bug: v8:8182
Change-Id: Ideacc1c9e4435be7a33db08415ac1ad46e956199
Reviewed-on: https://chromium-review.googlesource.com/c/1273238Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarGeorg Neis <neis@chromium.org>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#56552}
parent b929b52e
......@@ -16,7 +16,7 @@ RootIndexMap::RootIndexMap(Isolate* isolate) {
map_ = new HeapObjectToIndexHashMap();
for (RootIndex root_index = RootIndex::kFirstStrongRoot;
root_index <= RootIndex::kLastStrongRoot; ++root_index) {
Object* root = isolate->heap()->root(root_index);
Object* root = isolate->root(root_index);
if (!root->IsHeapObject()) continue;
// Omit root entries that can be written after initialization. They must
// not be referenced through the root list in the snapshot.
......
......@@ -966,7 +966,7 @@ Node* CodeAssembler::AtomicLoad(MachineType rep, Node* base, Node* offset) {
TNode<Object> CodeAssembler::LoadRoot(RootIndex root_index) {
if (RootsTable::IsImmortalImmovable(root_index)) {
Handle<Object> root = isolate()->heap()->root_handle(root_index);
Handle<Object> root = isolate()->root_handle(root_index);
if (root->IsSmi()) {
return SmiConstant(Smi::cast(*root));
} else {
......
......@@ -135,7 +135,7 @@ const char* V8NameConverter::RootRelativeName(int offset) const {
HeapStringAllocator allocator;
StringStream accumulator(&allocator);
isolate_->heap()->root(root_index)->ShortPrint(&accumulator);
isolate_->root(root_index)->ShortPrint(&accumulator);
std::unique_ptr<char[]> obj_name = accumulator.ToCString();
SNPrintF(v8_buffer_, "root (%s)", obj_name.get());
......
......@@ -156,7 +156,7 @@ Object** CanonicalHandleScope::Lookup(Object* object) {
if (object->IsHeapObject()) {
RootIndex root_index;
if (root_index_map_->Lookup(HeapObject::cast(object), &root_index)) {
return isolate_->heap()->root_handle(root_index).location();
return isolate_->root_handle(root_index).location();
}
}
Object*** entry = identity_map_->Get(object);
......
......@@ -294,7 +294,7 @@ Handle<FixedArray> Factory::NewFixedArrayWithFiller(RootIndex map_root_index,
PretenureFlag pretenure) {
HeapObject* result = AllocateRawFixedArray(length, pretenure);
DCHECK(RootsTable::IsImmortalImmovable(map_root_index));
Map* map = Map::cast(isolate()->heap()->root(map_root_index));
Map* map = Map::cast(isolate()->root(map_root_index));
result->set_map_after_allocation(map, SKIP_WRITE_BARRIER);
Handle<FixedArray> array(FixedArray::cast(result), isolate());
array->set_length(length);
......@@ -326,7 +326,7 @@ Handle<T> Factory::NewWeakFixedArrayWithMap(RootIndex map_root_index,
HeapObject* result =
AllocateRawArray(WeakFixedArray::SizeFor(length), pretenure);
Map* map = Map::cast(isolate()->heap()->root(map_root_index));
Map* map = Map::cast(isolate()->root(map_root_index));
result->set_map_after_allocation(map, SKIP_WRITE_BARRIER);
Handle<WeakFixedArray> array(WeakFixedArray::cast(result), isolate());
......
......@@ -2398,13 +2398,13 @@ HeapObject* Heap::CreateFillerObjectAt(Address addr, int size,
if (size == 0) return nullptr;
HeapObject* filler = HeapObject::FromAddress(addr);
if (size == kPointerSize) {
filler->set_map_after_allocation(
reinterpret_cast<Map*>(root(RootIndex::kOnePointerFillerMap)),
SKIP_WRITE_BARRIER);
filler->set_map_after_allocation(reinterpret_cast<Map*>(isolate()->root(
RootIndex::kOnePointerFillerMap)),
SKIP_WRITE_BARRIER);
} else if (size == 2 * kPointerSize) {
filler->set_map_after_allocation(
reinterpret_cast<Map*>(root(RootIndex::kTwoPointerFillerMap)),
SKIP_WRITE_BARRIER);
filler->set_map_after_allocation(reinterpret_cast<Map*>(isolate()->root(
RootIndex::kTwoPointerFillerMap)),
SKIP_WRITE_BARRIER);
if (clear_memory_mode == ClearFreedMemoryMode::kClearFreedMemory) {
Memory<Address>(addr + kPointerSize) =
static_cast<Address>(kClearedFreeMemoryValue);
......@@ -2412,7 +2412,7 @@ HeapObject* Heap::CreateFillerObjectAt(Address addr, int size,
} else {
DCHECK_GT(size, 2 * kPointerSize);
filler->set_map_after_allocation(
reinterpret_cast<Map*>(root(RootIndex::kFreeSpaceMap)),
reinterpret_cast<Map*>(isolate()->root(RootIndex::kFreeSpaceMap)),
SKIP_WRITE_BARRIER);
FreeSpace::cast(filler)->relaxed_write_size(size);
if (clear_memory_mode == ClearFreedMemoryMode::kClearFreedMemory) {
......
......@@ -655,12 +655,6 @@ class Heap {
MUTABLE_ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR
// TODO(ishell): move to Isolate
Object* root(RootIndex index) { return roots_table()[index]; }
Handle<Object> root_handle(RootIndex index) {
return Handle<Object>(&roots_table()[index]);
}
// Sets the stub_cache_ (only used when expanding the dictionary).
V8_INLINE void SetRootCodeStubs(SimpleNumberDictionary* value);
V8_INLINE void SetRootMaterializedObjects(FixedArray* objects);
......
......@@ -123,7 +123,8 @@ AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
// Map::cast cannot be used due to uninitialized map field.
Map* map = reinterpret_cast<Map*>(result);
map->set_map_after_allocation(
reinterpret_cast<Map*>(root(RootIndex::kMetaMap)), SKIP_WRITE_BARRIER);
reinterpret_cast<Map*>(isolate()->root(RootIndex::kMetaMap)),
SKIP_WRITE_BARRIER);
map->set_instance_type(instance_type);
map->set_instance_size(instance_size);
// Initialize to only containing tagged fields.
......
......@@ -2973,7 +2973,8 @@ size_t FreeListCategory::SumFreeList() {
size_t sum = 0;
FreeSpace* cur = top();
while (cur != nullptr) {
DCHECK(cur->map() == page()->heap()->root(RootIndex::kFreeSpaceMap));
DCHECK_EQ(cur->map(),
page()->heap()->isolate()->root(RootIndex::kFreeSpaceMap));
sum += cur->relaxed_read_size();
cur = cur->next();
}
......
......@@ -55,7 +55,7 @@ void TurboAssembler::LoadRoot(Register destination, RootIndex index) {
// TODO(jgruber, v8:6666): Support loads through the root register once it
// exists.
if (RootsTable::IsImmortalImmovable(index)) {
Handle<Object> object = isolate()->heap()->root_handle(index);
Handle<Object> object = isolate()->root_handle(index);
if (object->IsSmi()) {
mov(destination, Immediate(Smi::cast(*object)));
return;
......@@ -84,7 +84,7 @@ void MacroAssembler::CompareRoot(Register with, Register scratch,
void MacroAssembler::CompareRoot(Register with, RootIndex index) {
DCHECK(RootsTable::IsImmortalImmovable(index));
Handle<Object> object = isolate()->heap()->root_handle(index);
Handle<Object> object = isolate()->root_handle(index);
if (object->IsHeapObject()) {
cmp(with, Handle<HeapObject>::cast(object));
} else {
......@@ -94,7 +94,7 @@ void MacroAssembler::CompareRoot(Register with, RootIndex index) {
void MacroAssembler::CompareRoot(Operand with, RootIndex index) {
DCHECK(RootsTable::IsImmortalImmovable(index));
Handle<Object> object = isolate()->heap()->root_handle(index);
Handle<Object> object = isolate()->root_handle(index);
if (object->IsHeapObject()) {
cmp(with, Handle<HeapObject>::cast(object));
} else {
......@@ -104,7 +104,7 @@ void MacroAssembler::CompareRoot(Operand with, RootIndex index) {
void MacroAssembler::PushRoot(RootIndex index) {
DCHECK(RootsTable::IsImmortalImmovable(index));
Handle<Object> object = isolate()->heap()->root_handle(index);
Handle<Object> object = isolate()->root_handle(index);
if (object->IsHeapObject()) {
Push(Handle<HeapObject>::cast(object));
} else {
......
......@@ -3882,7 +3882,7 @@ Handle<Symbol> Isolate::SymbolFor(RootIndex dictionary_index,
Handle<String> name, bool private_symbol) {
Handle<String> key = factory()->InternalizeString(name);
Handle<NameDictionary> dictionary =
Handle<NameDictionary>::cast(heap()->root_handle(dictionary_index));
Handle<NameDictionary>::cast(root_handle(dictionary_index));
int entry = dictionary->FindEntry(this, key);
Handle<Symbol> symbol;
if (entry == NameDictionary::kNotFound) {
......
......@@ -1012,6 +1012,12 @@ class Isolate : private HiddenFactory {
// offset from kRootRegister.
inline base::AddressRegion root_register_addressable_region();
Object* root(RootIndex index) { return roots_table()[index]; }
Handle<Object> root_handle(RootIndex index) {
return Handle<Object>(&roots_table()[index]);
}
ExternalReferenceTable* external_reference_table() {
DCHECK(isolate_data()->external_reference_table()->is_initialized());
return isolate_data()->external_reference_table();
......
......@@ -2210,33 +2210,32 @@ bool TransitionsAccessor::IsConsistentWithBackPointers() {
// Estimates if there is a path from the object to a context.
// This function is not precise, and can return false even if
// there is a path to a context.
bool CanLeak(Object* obj, Heap* heap) {
bool CanLeak(Object* obj, Isolate* isolate) {
if (!obj->IsHeapObject()) return false;
if (obj->IsCell()) {
return CanLeak(Cell::cast(obj)->value(), heap);
return CanLeak(Cell::cast(obj)->value(), isolate);
}
if (obj->IsPropertyCell()) {
return CanLeak(PropertyCell::cast(obj)->value(), heap);
return CanLeak(PropertyCell::cast(obj)->value(), isolate);
}
if (obj->IsContext()) return true;
if (obj->IsMap()) {
Map* map = Map::cast(obj);
for (RootIndex root_index = RootIndex::kFirstStrongRoot;
root_index <= RootIndex::kLastStrongRoot; ++root_index) {
if (map == heap->root(root_index)) return false;
if (map == isolate->root(root_index)) return false;
}
return true;
}
return CanLeak(HeapObject::cast(obj)->map(), heap);
return CanLeak(HeapObject::cast(obj)->map(), isolate);
}
void Code::VerifyEmbeddedObjects(Isolate* isolate, VerifyMode mode) {
if (kind() == OPTIMIZED_FUNCTION) return;
Heap* heap = isolate->heap();
int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
for (RelocIterator it(this, mask); !it.done(); it.next()) {
Object* target = it.rinfo()->target_object();
DCHECK(!CanLeak(target, heap));
DCHECK(!CanLeak(target, isolate));
}
}
......
......@@ -1459,20 +1459,22 @@ int FreeSpace::Size() { return size(); }
FreeSpace* FreeSpace::next() {
DCHECK(map() == Heap::FromWritableHeapObject(this)->root(
RootIndex::kFreeSpaceMap) ||
(!Heap::FromWritableHeapObject(this)->deserialization_complete() &&
map() == nullptr));
#ifdef DEBUG
Heap* heap = Heap::FromWritableHeapObject(this);
DCHECK_IMPLIES(map() != heap->isolate()->root(RootIndex::kFreeSpaceMap),
!heap->deserialization_complete() && map() == nullptr);
#endif
DCHECK_LE(kNextOffset + kPointerSize, relaxed_read_size());
return reinterpret_cast<FreeSpace*>(Memory<Address>(address() + kNextOffset));
}
void FreeSpace::set_next(FreeSpace* next) {
DCHECK(map() == Heap::FromWritableHeapObject(this)->root(
RootIndex::kFreeSpaceMap) ||
(!Heap::FromWritableHeapObject(this)->deserialization_complete() &&
map() == nullptr));
#ifdef DEBUG
Heap* heap = Heap::FromWritableHeapObject(this);
DCHECK_IMPLIES(map() != heap->isolate()->root(RootIndex::kFreeSpaceMap),
!heap->deserialization_complete() && map() == nullptr);
#endif
DCHECK_LE(kNextOffset + kPointerSize, relaxed_read_size());
base::Relaxed_Store(
reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
......
......@@ -916,7 +916,7 @@ static const struct {
} native_context_names[] = {
#define CONTEXT_FIELD_INDEX_NAME(index, _, name) {Context::index, #name},
NATIVE_CONTEXT_FIELDS(CONTEXT_FIELD_INDEX_NAME)
#undef CONTEXT_FIELD_INDEX
#undef CONTEXT_FIELD_INDEX_NAME
};
void V8HeapExplorer::ExtractContextReferences(HeapEntry* entry,
......@@ -1673,24 +1673,13 @@ void V8HeapExplorer::SetGcSubrootReference(Root root, const char* description,
SetUserGlobalReference(global);
}
// This static array is used to prevent excessive code-size in
// GetStrongGcSubrootName below, which would happen if we called emplace() for
// every root in a macro.
static const char* root_names[] = {
#define ROOT_NAME(type, name, CamelName) #name,
READ_ONLY_ROOT_LIST(ROOT_NAME) MUTABLE_ROOT_LIST(ROOT_NAME)
#undef ROOT_NAME
};
STATIC_ASSERT(static_cast<uint16_t>(RootIndex::kRootListLength) ==
arraysize(root_names));
const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
if (strong_gc_subroot_names_.empty()) {
for (uint16_t i = 0; i < static_cast<uint16_t>(RootIndex::kRootListLength);
i++) {
const char* name = root_names[i];
RootIndex index = static_cast<RootIndex>(i);
strong_gc_subroot_names_.emplace(heap_->root(index), name);
Isolate* isolate = heap_->isolate();
for (RootIndex root_index = RootIndex::kFirstStrongRoot;
root_index <= RootIndex::kLastStrongRoot; ++root_index) {
const char* name = RootsTable::name(root_index);
strong_gc_subroot_names_.emplace(isolate->root(root_index), name);
}
CHECK(!strong_gc_subroot_names_.empty());
}
......
......@@ -23,10 +23,11 @@ V8_INLINE RootIndex operator++(RootIndex& index) {
return index;
}
ReadOnlyRoots::ReadOnlyRoots(Heap* heap) : roots_table_(heap->roots_table()) {}
ReadOnlyRoots::ReadOnlyRoots(Heap* heap)
: roots_table_(heap->isolate()->roots_table()) {}
ReadOnlyRoots::ReadOnlyRoots(Isolate* isolate)
: roots_table_(isolate->heap()->roots_table()) {}
: roots_table_(isolate->roots_table()) {}
#define ROOT_ACCESSOR(type, name, CamelName) \
type* ReadOnlyRoots::name() { \
......
......@@ -8,6 +8,12 @@
namespace v8 {
namespace internal {
const char* RootsTable::root_names_[RootsTable::kEntriesCount] = {
#define ROOT_NAME(type, name, CamelName) #name,
ROOT_LIST(ROOT_NAME)
#undef ROOT_NAME
};
// static
RootIndex RootsTable::RootIndexForFixedTypedArray(
ExternalArrayType array_type) {
......
......@@ -402,6 +402,12 @@ class RootsTable {
return roots_[index];
}
static const char* name(RootIndex root_index) {
size_t index = static_cast<size_t>(root_index);
DCHECK_LT(index, kEntriesCount);
return root_names_[index];
}
static RootIndex RootIndexForFixedTypedArray(ExternalArrayType array_type);
static RootIndex RootIndexForFixedTypedArray(ElementsKind elements_kind);
static RootIndex RootIndexForEmptyFixedTypedArray(ElementsKind elements_kind);
......@@ -448,6 +454,7 @@ class RootsTable {
}
Object* roots_[kEntriesCount];
static const char* root_names_[kEntriesCount];
friend class Isolate;
friend class Heap;
......
......@@ -679,7 +679,7 @@ bool Deserializer<AllocatorT>::ReadData(MaybeObject** current,
int id = data & kRootArrayConstantsMask;
RootIndex root_index = static_cast<RootIndex>(id);
MaybeObject* object =
MaybeObject::FromObject(isolate->heap()->root(root_index));
MaybeObject::FromObject(isolate->root(root_index));
DCHECK(!Heap::InNewSpace(object));
DCHECK(!allocator()->next_reference_is_weak());
UnalignedCopy(current++, &object);
......@@ -812,7 +812,7 @@ MaybeObject** Deserializer<AllocatorT>::ReadDataCase(
} else if (where == kRootArray) {
int id = source_.GetInt();
RootIndex root_index = static_cast<RootIndex>(id);
new_object = isolate->heap()->root(root_index);
new_object = isolate->root(root_index);
emit_write_barrier = Heap::InNewSpace(new_object);
hot_objects_.Add(HeapObject::cast(new_object));
} else if (where == kPartialSnapshotCache) {
......
......@@ -63,7 +63,6 @@ TEST(InvalidLab) {
TEST(UnusedLabImplicitClose) {
CcTest::InitializeVM();
Heap* heap = CcTest::heap();
heap->root(RootIndex::kOnePointerFillerMap);
const int kLabSize = 4 * KB;
Address base = AllocateLabBackingStore(heap, kLabSize);
Address limit = base + kLabSize;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment