Commit 56e66b32 authored by Dan Elphick's avatar Dan Elphick Committed by Commit Bot

[explicit isolates] Convert heap/ to use ReadOnlyRoots

In future the RO_SPACE root accessors in Heap will become private, so
instead convert them all to use ReadOnlyRoots.

Bug: v8:7786
Change-Id: I67420b74440c7620b0eb3aa7980b08c0e43a1e75
Reviewed-on: https://chromium-review.googlesource.com/1125729Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Commit-Queue: Dan Elphick <delphick@chromium.org>
Cr-Commit-Position: refs/heads/master@{#54211}
parent 2075910f
......@@ -399,7 +399,8 @@ MaybeHandle<FixedArray> Factory::TryNewFixedArray(int length,
result->set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER);
Handle<FixedArray> array(FixedArray::cast(result), isolate());
array->set_length(length);
MemsetPointer(array->data_start(), heap->undefined_value(), length);
MemsetPointer(array->data_start(), ReadOnlyRoots(heap).undefined_value(),
length);
return array;
}
......@@ -1548,17 +1549,18 @@ Handle<Script> Factory::NewScriptWithId(Handle<String> source, int script_id,
DCHECK(tenure == TENURED || tenure == TENURED_READ_ONLY);
// Create and initialize script object.
Heap* heap = isolate()->heap();
ReadOnlyRoots roots(heap);
Handle<Script> script = Handle<Script>::cast(NewStruct(SCRIPT_TYPE, tenure));
script->set_source(*source);
script->set_name(heap->undefined_value());
script->set_name(roots.undefined_value());
script->set_id(script_id);
script->set_line_offset(0);
script->set_column_offset(0);
script->set_context_data(heap->undefined_value());
script->set_context_data(roots.undefined_value());
script->set_type(Script::TYPE_NORMAL);
script->set_wrapper(heap->undefined_value());
script->set_line_ends(heap->undefined_value());
script->set_eval_from_shared_or_wrapped_arguments(heap->undefined_value());
script->set_wrapper(roots.undefined_value());
script->set_line_ends(roots.undefined_value());
script->set_eval_from_shared_or_wrapped_arguments(roots.undefined_value());
script->set_eval_from_position(0);
script->set_shared_function_infos(*empty_weak_fixed_array(),
SKIP_WRITE_BARRIER);
......@@ -2048,7 +2050,7 @@ Handle<WeakFixedArray> Factory::CopyWeakFixedArrayAndGrow(
WriteBarrierMode mode = obj->GetWriteBarrierMode(no_gc);
for (int i = 0; i < old_len; i++) result->Set(i, src->Get(i), mode);
HeapObjectReference* undefined_reference =
HeapObjectReference::Strong(isolate()->heap()->undefined_value());
HeapObjectReference::Strong(ReadOnlyRoots(isolate()).undefined_value());
MemsetPointer(result->data_start() + old_len, undefined_reference, grow_by);
return Handle<WeakFixedArray>(result, isolate());
}
......@@ -2070,7 +2072,7 @@ Handle<WeakArrayList> Factory::CopyWeakArrayListAndGrow(
WriteBarrierMode mode = obj->GetWriteBarrierMode(no_gc);
for (int i = 0; i < old_capacity; i++) result->Set(i, src->Get(i), mode);
HeapObjectReference* undefined_reference =
HeapObjectReference::Strong(isolate()->heap()->undefined_value());
HeapObjectReference::Strong(ReadOnlyRoots(isolate()).undefined_value());
MemsetPointer(result->data_start() + old_capacity, undefined_reference,
grow_by);
return Handle<WeakArrayList>(result, isolate());
......@@ -2975,7 +2977,7 @@ Handle<JSModuleNamespace> Factory::NewJSModuleNamespace() {
FieldIndex index = FieldIndex::ForDescriptor(
*map, JSModuleNamespace::kToStringTagFieldIndex);
module_namespace->FastPropertyAtPut(index,
isolate()->heap()->Module_string());
ReadOnlyRoots(isolate()).Module_string());
return module_namespace;
}
......@@ -3005,18 +3007,19 @@ Handle<Module> Factory::NewModule(Handle<SharedFunctionInfo> code) {
requested_modules_length > 0 ? NewFixedArray(requested_modules_length)
: empty_fixed_array();
ReadOnlyRoots roots(isolate());
Handle<Module> module = Handle<Module>::cast(NewStruct(MODULE_TYPE, TENURED));
module->set_code(*code);
module->set_exports(*exports);
module->set_regular_exports(*regular_exports);
module->set_regular_imports(*regular_imports);
module->set_hash(isolate()->GenerateIdentityHash(Smi::kMaxValue));
module->set_module_namespace(isolate()->heap()->undefined_value());
module->set_module_namespace(roots.undefined_value());
module->set_requested_modules(*requested_modules);
module->set_script(Script::cast(code->script()));
module->set_status(Module::kUninstantiated);
module->set_exception(isolate()->heap()->the_hole_value());
module->set_import_meta(isolate()->heap()->the_hole_value());
module->set_exception(roots.the_hole_value());
module->set_import_meta(roots.the_hole_value());
module->set_dfs_index(-1);
module->set_dfs_ancestor_index(-1);
return module;
......@@ -3588,8 +3591,9 @@ Handle<DebugInfo> Factory::NewDebugInfo(Handle<SharedFunctionInfo> shared) {
DCHECK(!shared->HasDebugInfo());
debug_info->set_function_identifier(
shared->function_identifier_or_debug_info());
debug_info->set_original_bytecode_array(heap->undefined_value());
debug_info->set_break_points(heap->empty_fixed_array());
debug_info->set_original_bytecode_array(
ReadOnlyRoots(heap).undefined_value());
debug_info->set_break_points(ReadOnlyRoots(heap).empty_fixed_array());
// Link debug info to function.
shared->SetDebugInfo(*debug_info);
......@@ -4046,7 +4050,7 @@ Handle<CallHandlerInfo> Factory::NewCallHandlerInfo(bool has_no_side_effect) {
: side_effect_call_handler_info_map();
Handle<CallHandlerInfo> info(CallHandlerInfo::cast(New(map, TENURED)),
isolate());
Object* undefined_value = isolate()->heap()->undefined_value();
Object* undefined_value = ReadOnlyRoots(isolate()).undefined_value();
info->set_callback(undefined_value);
info->set_js_callback(undefined_value);
info->set_data(undefined_value);
......
......@@ -3999,9 +3999,10 @@ class FixStaleLeftTrimmedHandlesVisitor : public RootVisitor {
// We need to find a FixedArrayBase map after walking the fillers.
while (current->IsFiller()) {
Address next = reinterpret_cast<Address>(current);
if (current->map() == heap_->one_pointer_filler_map()) {
if (current->map() == ReadOnlyRoots(heap_).one_pointer_filler_map()) {
next += kPointerSize;
} else if (current->map() == heap_->two_pointer_filler_map()) {
} else if (current->map() ==
ReadOnlyRoots(heap_).two_pointer_filler_map()) {
next += 2 * kPointerSize;
} else {
next += current->Size();
......
......@@ -588,7 +588,7 @@ void IncrementalMarking::FinalizeIncrementally() {
void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
if (!IsMarking()) return;
Map* filler_map = heap_->one_pointer_filler_map();
Map* filler_map = ReadOnlyRoots(heap_).one_pointer_filler_map();
#ifdef ENABLE_MINOR_MC
MinorMarkCompactCollector::MarkingState* minor_marking_state =
......
......@@ -432,9 +432,11 @@ template <LiveObjectIterationMode mode>
LiveObjectRange<mode>::iterator::iterator(MemoryChunk* chunk, Bitmap* bitmap,
Address start)
: chunk_(chunk),
one_word_filler_map_(chunk->heap()->one_pointer_filler_map()),
two_word_filler_map_(chunk->heap()->two_pointer_filler_map()),
free_space_map_(chunk->heap()->free_space_map()),
one_word_filler_map_(
ReadOnlyRoots(chunk->heap()).one_pointer_filler_map()),
two_word_filler_map_(
ReadOnlyRoots(chunk->heap()).two_pointer_filler_map()),
free_space_map_(ReadOnlyRoots(chunk->heap()).free_space_map()),
it_(chunk, bitmap) {
it_.Advance(Bitmap::IndexToCell(
Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(start))));
......
......@@ -103,7 +103,7 @@ void MarkingVerifier::VerifyMarkingOnPage(const Page* page, Address start,
object = HeapObject::FromAddress(current);
// One word fillers at the end of a black area can be grey.
if (IsBlackOrGrey(object) &&
object->map() != heap_->one_pointer_filler_map()) {
object->map() != ReadOnlyRoots(heap_).one_pointer_filler_map()) {
CHECK(IsMarked(object));
CHECK(current >= next_object_must_be_here_or_later);
object->Iterate(this);
......@@ -949,7 +949,7 @@ class InternalizedStringTableCleaner : public ObjectVisitor {
void VisitPointers(HeapObject* host, Object** start, Object** end) override {
// Visit all HeapObject pointers in [start, end).
Object* the_hole = heap_->the_hole_value();
Object* the_hole = ReadOnlyRoots(heap_).the_hole_value();
MarkCompactCollector::NonAtomicMarkingState* marking_state =
heap_->mark_compact_collector()->non_atomic_marking_state();
for (Object** p = start; p < end; p++) {
......@@ -993,7 +993,7 @@ class ExternalStringTableCleaner : public RootVisitor {
// Visit all HeapObject pointers in [start, end).
MarkCompactCollector::NonAtomicMarkingState* marking_state =
heap_->mark_compact_collector()->non_atomic_marking_state();
Object* the_hole = heap_->the_hole_value();
Object* the_hole = ReadOnlyRoots(heap_).the_hole_value();
for (Object** p = start; p < end; p++) {
Object* o = *p;
if (o->IsHeapObject()) {
......@@ -1903,7 +1903,7 @@ void MarkCompactCollector::TrimDescriptorArray(Map* map,
DescriptorArray* descriptors) {
int number_of_own_descriptors = map->NumberOfOwnDescriptors();
if (number_of_own_descriptors == 0) {
DCHECK(descriptors == heap_->empty_descriptor_array());
DCHECK(descriptors == ReadOnlyRoots(heap_).empty_descriptor_array());
return;
}
......@@ -3840,7 +3840,7 @@ class YoungGenerationExternalStringTableCleaner : public RootVisitor {
DCHECK(o->IsThinString());
}
// Set the entry to the_hole_value (as deleted).
*p = heap_->the_hole_value();
*p = ReadOnlyRoots(heap_).the_hole_value();
}
}
}
......
......@@ -405,7 +405,7 @@ bool ObjectStatsCollectorImpl::ShouldRecordObject(HeapObject* obj,
bool cow_check = check_cow_array == kIgnoreCow || !IsCowArray(fixed_array);
return CanRecordFixedArray(fixed_array) && cow_check;
}
if (obj == heap_->empty_property_array()) return false;
if (obj == ReadOnlyRoots(heap_).empty_property_array()) return false;
return true;
}
......@@ -721,14 +721,15 @@ void ObjectStatsCollectorImpl::RecordObjectStats(HeapObject* obj,
}
bool ObjectStatsCollectorImpl::CanRecordFixedArray(FixedArrayBase* array) {
return array != heap_->empty_fixed_array() &&
array != heap_->empty_sloppy_arguments_elements() &&
array != heap_->empty_slow_element_dictionary() &&
ReadOnlyRoots roots(heap_);
return array != roots.empty_fixed_array() &&
array != roots.empty_sloppy_arguments_elements() &&
array != roots.empty_slow_element_dictionary() &&
array != heap_->empty_property_dictionary();
}
bool ObjectStatsCollectorImpl::IsCowArray(FixedArrayBase* array) {
return array->map() == heap_->fixed_cow_array_map();
return array->map() == ReadOnlyRoots(heap_).fixed_cow_array_map();
}
bool ObjectStatsCollectorImpl::SameLiveness(HeapObject* obj1,
......@@ -741,7 +742,8 @@ void ObjectStatsCollectorImpl::RecordVirtualMapDetails(Map* map) {
// TODO(mlippautz): map->dependent_code(): DEPENDENT_CODE_TYPE.
DescriptorArray* array = map->instance_descriptors();
if (map->owns_descriptors() && array != heap_->empty_descriptor_array()) {
if (map->owns_descriptors() &&
array != ReadOnlyRoots(heap_).empty_descriptor_array()) {
// DescriptorArray has its own instance type.
EnumCache* enum_cache = array->GetEnumCache();
RecordSimpleVirtualObjectStats(array, enum_cache->keys(),
......
......@@ -27,7 +27,7 @@ struct WeakListVisitor;
template <class T>
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
Object* undefined = heap->undefined_value();
Object* undefined = ReadOnlyRoots(heap).undefined_value();
Object* head = undefined;
T* tail = nullptr;
bool record_slots = MustRecordSlots(heap);
......@@ -78,7 +78,7 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
template <class T>
static void ClearWeakList(Heap* heap, Object* list) {
Object* undefined = heap->undefined_value();
Object* undefined = ReadOnlyRoots(heap).undefined_value();
while (list != undefined) {
T* candidate = reinterpret_cast<T*>(list);
list = WeakListVisitor<T>::WeakNext(candidate);
......@@ -108,7 +108,7 @@ struct WeakListVisitor<Code> {
static void VisitPhantomObject(Heap* heap, Code* code) {
// Even though the code is dying, its code_data_container can still be
// alive. Clear the next_code_link slot to avoid a dangling pointer.
SetWeakNext(code, heap->undefined_value());
SetWeakNext(code, ReadOnlyRoots(heap).undefined_value());
}
};
......
......@@ -165,7 +165,7 @@ void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot,
ConsString* object, int object_size) {
DCHECK(IsShortcutCandidate(map->instance_type()));
if (!is_incremental_marking_ &&
object->unchecked_second() == heap()->empty_string()) {
object->unchecked_second() == ReadOnlyRoots(heap()).empty_string()) {
HeapObject* first = HeapObject::cast(object->unchecked_first());
*slot = first;
......@@ -248,7 +248,7 @@ void Scavenger::ScavengeObject(HeapObjectReference** p, HeapObject* object) {
Map* map = first_word.ToMap();
// AllocationMementos are unrooted and shouldn't survive a scavenge
DCHECK_NE(heap()->allocation_memento_map(), map);
DCHECK_NE(ReadOnlyRoots(heap()).allocation_memento_map(), map);
// Call the slow part of scavenge object.
EvacuateObject(p, map, object);
}
......
......@@ -2834,9 +2834,9 @@ void FreeListCategory::RepairFreeList(Heap* heap) {
while (n != nullptr) {
Map** map_location = reinterpret_cast<Map**>(n->address());
if (*map_location == nullptr) {
*map_location = heap->free_space_map();
*map_location = ReadOnlyRoots(heap).free_space_map();
} else {
DCHECK(*map_location == heap->free_space_map());
DCHECK(*map_location == ReadOnlyRoots(heap).free_space_map());
}
n = n->next();
}
......@@ -3382,7 +3382,7 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size,
// Make the object consistent so the heap can be verified in OldSpaceStep.
// We only need to do this in debug builds or if verify_heap is on.
reinterpret_cast<Object**>(object->address())[0] =
heap()->fixed_array_map();
ReadOnlyRoots(heap()).fixed_array_map();
reinterpret_cast<Object**>(object->address())[1] = Smi::kZero;
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment