Commit 93d757a0 authored by Dan Elphick's avatar Dan Elphick Committed by Commit Bot

[explicit isolates] Eliminate most GetIsolates from heap/

Removes most explicit calls to GetIsolate/GetHeap from heap/ and implicit
calls via the single argument Handle constructor and handle function by
passing it in through calling functions.

(One GetHeap remains in FreeList in heap/spaces.cc for now).

Bug: v8:7786
Change-Id: I14fd099d26f94b9afbcfad41aa873c8f843297dd
Reviewed-on: https://chromium-review.googlesource.com/1087002Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Commit-Queue: Dan Elphick <delphick@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53565}
parent 352f8009
......@@ -641,10 +641,11 @@ void GlobalHandles::IterateWeakRootsForFinalizers(RootVisitor* v) {
DISABLE_CFI_PERF
void GlobalHandles::IterateWeakRootsForPhantomHandles(
WeakSlotCallback should_reset_handle) {
WeakSlotCallbackWithHeap should_reset_handle) {
for (NodeIterator it(this); !it.done(); it.Advance()) {
Node* node = it.node();
if (node->IsWeakRetainer() && should_reset_handle(node->location())) {
if (node->IsWeakRetainer() &&
should_reset_handle(isolate()->heap(), node->location())) {
if (node->IsPhantomResetHandle()) {
node->MarkPending();
node->ResetPhantomHandle();
......@@ -658,10 +659,12 @@ void GlobalHandles::IterateWeakRootsForPhantomHandles(
}
}
void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback should_reset_handle) {
void GlobalHandles::IdentifyWeakHandles(
WeakSlotCallbackWithHeap should_reset_handle) {
for (NodeIterator it(this); !it.done(); it.Advance()) {
Node* node = it.node();
if (node->IsWeak() && should_reset_handle(node->location())) {
if (node->IsWeak() &&
should_reset_handle(isolate()->heap(), node->location())) {
if (!node->IsPhantomCallback() && !node->IsPhantomResetHandle()) {
node->MarkPending();
}
......
......@@ -137,11 +137,12 @@ class GlobalHandles {
// Iterates over weak roots on the heap.
void IterateWeakRootsForFinalizers(RootVisitor* v);
void IterateWeakRootsForPhantomHandles(WeakSlotCallback should_reset_handle);
void IterateWeakRootsForPhantomHandles(
WeakSlotCallbackWithHeap should_reset_handle);
// Marks all handles that should be finalized based on the predicate
// |should_reset_handle| as pending.
void IdentifyWeakHandles(WeakSlotCallback should_reset_handle);
void IdentifyWeakHandles(WeakSlotCallbackWithHeap should_reset_handle);
// NOTE: Five ...NewSpace... functions below are used during
// scavenge collections and iterate over sets of handles that are
......
......@@ -60,11 +60,9 @@ void InitializeCode(Heap* heap, Handle<Code> code, int object_size,
bool is_turbofanned, int stack_slots,
int safepoint_table_offset, int handler_table_offset) {
DCHECK(IsAligned(code->address(), kCodeAlignment));
DCHECK(
!code->GetIsolate()->heap()->memory_allocator()->code_range()->valid() ||
code->GetIsolate()->heap()->memory_allocator()->code_range()->contains(
code->address()) ||
object_size <= code->GetIsolate()->heap()->code_space()->AreaSize());
DCHECK(!heap->memory_allocator()->code_range()->valid() ||
heap->memory_allocator()->code_range()->contains(code->address()) ||
object_size <= heap->code_space()->AreaSize());
bool has_unwinding_info = desc.unwinding_info != nullptr;
......@@ -2335,20 +2333,24 @@ Handle<JSFunction> Factory::NewFunction(const NewFunctionArgs& args) {
Handle<JSObject> Factory::NewFunctionPrototype(Handle<JSFunction> function) {
// Make sure to use globals from the function's context, since the function
// can be from a different context.
Handle<Context> native_context(function->context()->native_context());
Handle<Context> native_context(function->context()->native_context(),
isolate());
Handle<Map> new_map;
if (V8_UNLIKELY(IsAsyncGeneratorFunction(function->shared()->kind()))) {
new_map = handle(native_context->async_generator_object_prototype_map());
new_map = handle(native_context->async_generator_object_prototype_map(),
isolate());
} else if (IsResumableFunction(function->shared()->kind())) {
// Generator and async function prototypes can share maps since they
// don't have "constructor" properties.
new_map = handle(native_context->generator_object_prototype_map());
new_map =
handle(native_context->generator_object_prototype_map(), isolate());
} else {
// Each function prototype gets a fresh map to avoid unwanted sharing of
// maps between prototypes of different constructors.
Handle<JSFunction> object_function(native_context->object_function());
Handle<JSFunction> object_function(native_context->object_function(),
isolate());
DCHECK(object_function->has_initial_map());
new_map = handle(object_function->initial_map());
new_map = handle(object_function->initial_map(), isolate());
}
DCHECK(!new_map->is_prototype_map());
......@@ -2365,7 +2367,8 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
Handle<SharedFunctionInfo> info, Handle<Context> context,
PretenureFlag pretenure) {
Handle<Map> initial_map(
Map::cast(context->native_context()->get(info->function_map_index())));
Map::cast(context->native_context()->get(info->function_map_index())),
isolate());
return NewFunctionFromSharedFunctionInfo(initial_map, info, context,
pretenure);
}
......@@ -2374,7 +2377,8 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
Handle<SharedFunctionInfo> info, Handle<Context> context,
Handle<FeedbackCell> feedback_cell, PretenureFlag pretenure) {
Handle<Map> initial_map(
Map::cast(context->native_context()->get(info->function_map_index())));
Map::cast(context->native_context()->get(info->function_map_index())),
isolate());
return NewFunctionFromSharedFunctionInfo(initial_map, info, context,
feedback_cell, pretenure);
}
......@@ -2663,7 +2667,7 @@ Handle<BytecodeArray> Factory::CopyBytecodeArray(
Handle<JSObject> Factory::NewJSObject(Handle<JSFunction> constructor,
PretenureFlag pretenure) {
JSFunction::EnsureHasInitialMap(constructor);
Handle<Map> map(constructor->initial_map());
Handle<Map> map(constructor->initial_map(), isolate());
return NewJSObjectFromMap(map, pretenure);
}
......@@ -2671,7 +2675,7 @@ Handle<JSObject> Factory::NewJSObjectWithNullProto(PretenureFlag pretenure) {
Handle<JSObject> result =
NewJSObject(isolate()->object_function(), pretenure);
Handle<Map> new_map =
Map::Copy(Handle<Map>(result->map()), "ObjectWithNullProto");
Map::Copy(Handle<Map>(result->map(), isolate()), "ObjectWithNullProto");
Map::SetPrototype(new_map, null_value());
JSObject::MigrateToMap(result, new_map);
return result;
......@@ -2680,7 +2684,7 @@ Handle<JSObject> Factory::NewJSObjectWithNullProto(PretenureFlag pretenure) {
Handle<JSGlobalObject> Factory::NewJSGlobalObject(
Handle<JSFunction> constructor) {
DCHECK(constructor->has_initial_map());
Handle<Map> map(constructor->initial_map());
Handle<Map> map(constructor->initial_map(), isolate());
DCHECK(map->is_dictionary_map());
// Make sure no field properties are described in the initial map.
......@@ -2705,14 +2709,14 @@ Handle<JSGlobalObject> Factory::NewJSGlobalObject(
// The global object might be created from an object template with accessors.
// Fill these accessors into the dictionary.
Handle<DescriptorArray> descs(map->instance_descriptors());
Handle<DescriptorArray> descs(map->instance_descriptors(), isolate());
for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
PropertyDetails details = descs->GetDetails(i);
// Only accessors are expected.
DCHECK_EQ(kAccessor, details.kind());
PropertyDetails d(kAccessor, details.attributes(),
PropertyCellType::kMutable);
Handle<Name> name(descs->GetKey(i));
Handle<Name> name(descs->GetKey(i), isolate());
Handle<PropertyCell> cell = NewPropertyCell(name);
cell->set_value(descs->GetStrongValue(i));
// |dictionary| already contains enough space for all properties.
......@@ -2821,7 +2825,8 @@ Handle<JSArray> Factory::NewJSArray(ElementsKind elements_kind,
JSFunction* array_function = native_context->array_function();
map = array_function->initial_map();
}
return Handle<JSArray>::cast(NewJSObjectFromMap(handle(map), pretenure));
return Handle<JSArray>::cast(
NewJSObjectFromMap(handle(map, isolate()), pretenure));
}
Handle<JSArray> Factory::NewJSArray(ElementsKind elements_kind, int length,
......@@ -2882,8 +2887,9 @@ void Factory::NewJSArrayStorage(Handle<JSArray> array, int length, int capacity,
Handle<JSWeakMap> Factory::NewJSWeakMap() {
Context* native_context = isolate()->raw_native_context();
Handle<Map> map(native_context->js_weak_map_fun()->initial_map());
Handle<JSWeakMap> weakmap(JSWeakMap::cast(*NewJSObjectFromMap(map)));
Handle<Map> map(native_context->js_weak_map_fun()->initial_map(), isolate());
Handle<JSWeakMap> weakmap(JSWeakMap::cast(*NewJSObjectFromMap(map)),
isolate());
{
// Do not leak handles for the hash table, it would make entries strong.
HandleScope scope(isolate());
......@@ -2907,7 +2913,7 @@ Handle<JSGeneratorObject> Factory::NewJSGeneratorObject(
Handle<JSFunction> function) {
DCHECK(IsResumableFunction(function->shared()->kind()));
JSFunction::EnsureHasInitialMap(function);
Handle<Map> map(function->initial_map());
Handle<Map> map(function->initial_map(), isolate());
DCHECK(map->instance_type() == JS_GENERATOR_OBJECT_TYPE ||
map->instance_type() == JS_ASYNC_GENERATOR_OBJECT_TYPE);
......@@ -2951,14 +2957,16 @@ Handle<JSArrayBuffer> Factory::NewJSArrayBuffer(SharedFlag shared,
Handle<JSFunction> array_buffer_fun(
shared == SharedFlag::kShared
? isolate()->native_context()->shared_array_buffer_fun()
: isolate()->native_context()->array_buffer_fun());
: isolate()->native_context()->array_buffer_fun(),
isolate());
Handle<Map> map(array_buffer_fun->initial_map(), isolate());
return Handle<JSArrayBuffer>::cast(NewJSObjectFromMap(map, pretenure));
}
Handle<JSIteratorResult> Factory::NewJSIteratorResult(Handle<Object> value,
bool done) {
Handle<Map> map(isolate()->native_context()->iterator_result_map());
Handle<Map> map(isolate()->native_context()->iterator_result_map(),
isolate());
Handle<JSIteratorResult> js_iter_result =
Handle<JSIteratorResult>::cast(NewJSObjectFromMap(map));
js_iter_result->set_value(*value);
......@@ -2968,7 +2976,8 @@ Handle<JSIteratorResult> Factory::NewJSIteratorResult(Handle<Object> value,
Handle<JSAsyncFromSyncIterator> Factory::NewJSAsyncFromSyncIterator(
Handle<JSReceiver> sync_iterator, Handle<Object> next) {
Handle<Map> map(isolate()->native_context()->async_from_sync_iterator_map());
Handle<Map> map(isolate()->native_context()->async_from_sync_iterator_map(),
isolate());
Handle<JSAsyncFromSyncIterator> iterator =
Handle<JSAsyncFromSyncIterator>::cast(NewJSObjectFromMap(map));
......@@ -2978,14 +2987,14 @@ Handle<JSAsyncFromSyncIterator> Factory::NewJSAsyncFromSyncIterator(
}
Handle<JSMap> Factory::NewJSMap() {
Handle<Map> map(isolate()->native_context()->js_map_map());
Handle<Map> map(isolate()->native_context()->js_map_map(), isolate());
Handle<JSMap> js_map = Handle<JSMap>::cast(NewJSObjectFromMap(map));
JSMap::Initialize(js_map, isolate());
return js_map;
}
Handle<JSSet> Factory::NewJSSet() {
Handle<Map> map(isolate()->native_context()->js_set_map());
Handle<Map> map(isolate()->native_context()->js_set_map(), isolate());
Handle<JSSet> js_set = Handle<JSSet>::cast(NewJSObjectFromMap(map));
JSSet::Initialize(js_set, isolate());
return js_set;
......@@ -3103,7 +3112,8 @@ void SetupArrayBufferView(i::Isolate* isolate,
Handle<JSTypedArray> Factory::NewJSTypedArray(ExternalArrayType type,
PretenureFlag pretenure) {
Handle<JSFunction> typed_array_fun(GetTypedArrayFun(type, isolate()));
Handle<JSFunction> typed_array_fun(GetTypedArrayFun(type, isolate()),
isolate());
Handle<Map> map(typed_array_fun->initial_map(), isolate());
return Handle<JSTypedArray>::cast(NewJSObjectFromMap(map, pretenure));
}
......
......@@ -334,17 +334,20 @@ bool Heap::InNewSpace(HeapObject* heap_object) {
return result;
}
// static
bool Heap::InFromSpace(Object* object) {
DCHECK(!HasWeakHeapObjectTag(object));
return object->IsHeapObject() && InFromSpace(HeapObject::cast(object));
}
// static
bool Heap::InFromSpace(MaybeObject* object) {
HeapObject* heap_object;
return object->ToStrongOrWeakHeapObject(&heap_object) &&
InFromSpace(heap_object);
}
// static
bool Heap::InFromSpace(HeapObject* heap_object) {
return MemoryChunk::FromHeapObject(heap_object)
->IsFlagSet(Page::IN_FROM_SPACE);
......
......@@ -1523,15 +1523,16 @@ void Heap::MoveElements(FixedArray* array, int dst_index, int src_index,
// Helper class for verifying the string table.
class StringTableVerifier : public ObjectVisitor {
public:
explicit StringTableVerifier(Isolate* isolate) : isolate_(isolate) {}
void VisitPointers(HeapObject* host, Object** start, Object** end) override {
// Visit all HeapObject pointers in [start, end).
for (Object** p = start; p < end; p++) {
DCHECK(!HasWeakHeapObjectTag(*p));
if ((*p)->IsHeapObject()) {
HeapObject* object = HeapObject::cast(*p);
Isolate* isolate = object->GetIsolate();
// Check that the string is actually internalized.
CHECK(object->IsTheHole(isolate) || object->IsUndefined(isolate) ||
CHECK(object->IsTheHole(isolate_) || object->IsUndefined(isolate_) ||
object->IsInternalizedString());
}
}
......@@ -1540,12 +1541,14 @@ class StringTableVerifier : public ObjectVisitor {
MaybeObject** end) override {
UNREACHABLE();
}
};
private:
Isolate* isolate_;
};
static void VerifyStringTable(Heap* heap) {
StringTableVerifier verifier;
heap->string_table()->IterateElements(&verifier);
static void VerifyStringTable(Isolate* isolate) {
StringTableVerifier verifier(isolate);
isolate->heap()->string_table()->IterateElements(&verifier);
}
#endif // VERIFY_HEAP
......@@ -1694,7 +1697,7 @@ bool Heap::PerformGarbageCollection(
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifyStringTable(this);
VerifyStringTable(this->isolate());
}
#endif
......@@ -1813,7 +1816,7 @@ bool Heap::PerformGarbageCollection(
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifyStringTable(this);
VerifyStringTable(this->isolate());
}
#endif
......@@ -2476,20 +2479,21 @@ void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
class ExternalStringTableVisitorAdapter : public RootVisitor {
public:
explicit ExternalStringTableVisitorAdapter(
v8::ExternalResourceVisitor* visitor)
: visitor_(visitor) {}
Isolate* isolate, v8::ExternalResourceVisitor* visitor)
: isolate_(isolate), visitor_(visitor) {}
virtual void VisitRootPointers(Root root, const char* description,
Object** start, Object** end) {
for (Object** p = start; p < end; p++) {
DCHECK((*p)->IsExternalString());
visitor_->VisitExternalString(
Utils::ToLocal(Handle<String>(String::cast(*p))));
Utils::ToLocal(Handle<String>(String::cast(*p), isolate_)));
}
}
private:
Isolate* isolate_;
v8::ExternalResourceVisitor* visitor_;
} external_string_table_visitor(visitor);
} external_string_table_visitor(isolate(), visitor);
external_string_table_.IterateAll(&external_string_table_visitor);
}
......@@ -5812,7 +5816,7 @@ Code* GcSafeCastToCode(Heap* heap, HeapObject* object, Address inner_pointer) {
bool Heap::GcSafeCodeContains(HeapObject* code, Address addr) {
Map* map = GcSafeMapOfCodeSpaceObject(code);
DCHECK(map == code->GetHeap()->code_map());
DCHECK(map == code_map());
#ifdef V8_EMBEDDED_BUILTINS
if (InstructionStream::TryLookupCode(isolate(), addr) == code) return true;
#endif
......
......@@ -1375,9 +1375,9 @@ class Heap {
inline bool InNewSpace(Object* object);
inline bool InNewSpace(MaybeObject* object);
inline bool InNewSpace(HeapObject* heap_object);
inline bool InFromSpace(Object* object);
inline bool InFromSpace(MaybeObject* object);
inline bool InFromSpace(HeapObject* heap_object);
static inline bool InFromSpace(Object* object);
static inline bool InFromSpace(MaybeObject* object);
static inline bool InFromSpace(HeapObject* heap_object);
inline bool InToSpace(Object* object);
inline bool InToSpace(MaybeObject* object);
inline bool InToSpace(HeapObject* heap_object);
......
......@@ -486,16 +486,14 @@ void IncrementalMarking::MarkRoots() {
heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
}
bool ShouldRetainMap(Map* map, int age) {
bool IncrementalMarking::ShouldRetainMap(Map* map, int age) {
if (age == 0) {
// The map has aged. Do not retain this map.
return false;
}
Object* constructor = map->GetConstructor();
Heap* heap = map->GetHeap();
if (!constructor->IsHeapObject() ||
heap->incremental_marking()->marking_state()->IsWhite(
HeapObject::cast(constructor))) {
marking_state()->IsWhite(HeapObject::cast(constructor))) {
// The constructor is dead, no new objects with this map can
// be created. Do not retain this map.
return false;
......
......@@ -290,6 +290,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
void FinishBlackAllocation();
void MarkRoots();
bool ShouldRetainMap(Map* map, int age);
// Retain dying maps for <FLAG_retain_maps_for_n_gc> garbage collections to
// increase chances of reusing of map transition tree in future.
void RetainMaps();
......
......@@ -1427,14 +1427,12 @@ class EvacuateRecordOnlyVisitor final : public HeapObjectVisitor {
Heap* heap_;
};
bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
bool MarkCompactCollector::IsUnmarkedHeapObject(Heap* heap, Object** p) {
Object* o = *p;
if (!o->IsHeapObject()) return false;
HeapObject* heap_object = HeapObject::cast(o);
return heap_object->GetHeap()
->mark_compact_collector()
->non_atomic_marking_state()
->IsWhite(HeapObject::cast(o));
return heap->mark_compact_collector()->non_atomic_marking_state()->IsWhite(
heap_object);
}
void MarkCompactCollector::MarkStringTable(
......@@ -2016,7 +2014,7 @@ static inline SlotCallbackResult UpdateSlot(
HeapObjectReferenceType reference_type) {
MapWord map_word = heap_obj->map_word();
if (map_word.IsForwardingAddress()) {
DCHECK(heap_obj->GetHeap()->InFromSpace(heap_obj) ||
DCHECK(Heap::InFromSpace(heap_obj) ||
MarkCompactCollector::IsOnEvacuationCandidate(heap_obj) ||
Page::FromAddress(heap_obj->address())
->IsFlagSet(Page::COMPACTION_WAS_ABORTED));
......@@ -2029,7 +2027,7 @@ static inline SlotCallbackResult UpdateSlot(
} else {
base::AsAtomicPointer::Release_CompareAndSwap(slot, old, target);
}
DCHECK(!heap_obj->GetHeap()->InFromSpace(target));
DCHECK(!Heap::InFromSpace(target));
DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(target));
}
// OLD_TO_OLD slots are always removed after updating.
......
......@@ -686,7 +686,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// Callback function for telling whether the object *p is an unmarked
// heap object.
static bool IsUnmarkedHeapObject(Object** p);
static bool IsUnmarkedHeapObject(Heap* heap, Object** p);
// Clear non-live references in weak cells, transition and descriptor arrays,
// and deoptimize dependent code of non-live maps.
......
......@@ -106,8 +106,8 @@ AllocationResult Heap::AllocateMap(InstanceType instance_type,
// Eagerly initialize the WeakCell cache for the map as it will not be
// writable in RO_SPACE.
HandleScope handle_scope(isolate());
Handle<WeakCell> weak_cell =
isolate()->factory()->NewWeakCell(Handle<Map>(map), TENURED_READ_ONLY);
Handle<WeakCell> weak_cell = isolate()->factory()->NewWeakCell(
Handle<Map>(map, isolate()), TENURED_READ_ONLY);
map->set_weak_cell_cache(*weak_cell);
}
......@@ -160,8 +160,8 @@ void Heap::FinalizePartialMap(Map* map) {
// Eagerly initialize the WeakCell cache for the map as it will not be
// writable in RO_SPACE.
HandleScope handle_scope(isolate());
Handle<WeakCell> weak_cell =
isolate()->factory()->NewWeakCell(Handle<Map>(map), TENURED_READ_ONLY);
Handle<WeakCell> weak_cell = isolate()->factory()->NewWeakCell(
Handle<Map>(map, isolate()), TENURED_READ_ONLY);
map->set_weak_cell_cache(*weak_cell);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment