Commit 07336053 authored by Sathya Gunasekaran's avatar Sathya Gunasekaran Committed by Commit Bot

[heap] Make retained maps list be per context

Previously, one single retained maps list was used across all contexts. When one context was disposed, this entire list of retained maps was disposed as well. This caused maps that were still alive to be disposed leading to deopts when such maps were embedded in code objects.

This patch makes the list of retained maps be per context so we can dispose only the dead maps.

Bug: v8:9684, v8:10431
Change-Id: I0a50f4f49c9f6d72367c62e950828a039220fdfc
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2122016Reviewed-by: 's avatarGeorg Neis <neis@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Sathya Gunasekaran  <gsathya@chromium.org>
Cr-Commit-Position: refs/heads/master@{#67225}
parent f5a31f0b
......@@ -988,7 +988,9 @@ class PipelineCompilationJob final : public OptimizedCompilationJob {
Status FinalizeJobImpl(Isolate* isolate) final;
// Registers weak object to optimized code dependencies.
void RegisterWeakObjectsInOptimizedCode(Handle<Code> code, Isolate* isolate);
void RegisterWeakObjectsInOptimizedCode(Isolate* isolate,
Handle<NativeContext> context,
Handle<Code> code);
private:
Zone zone_;
......@@ -1167,13 +1169,14 @@ PipelineCompilationJob::Status PipelineCompilationJob::FinalizeJobImpl(
}
compilation_info()->SetCode(code);
compilation_info()->native_context().AddOptimizedCode(*code);
RegisterWeakObjectsInOptimizedCode(code, isolate);
Handle<NativeContext> context(compilation_info()->native_context(), isolate);
context->AddOptimizedCode(*code);
RegisterWeakObjectsInOptimizedCode(isolate, context, code);
return SUCCEEDED;
}
void PipelineCompilationJob::RegisterWeakObjectsInOptimizedCode(
Handle<Code> code, Isolate* isolate) {
Isolate* isolate, Handle<NativeContext> context, Handle<Code> code) {
std::vector<Handle<Map>> maps;
DCHECK(code->is_optimized_code());
{
......@@ -1191,7 +1194,7 @@ void PipelineCompilationJob::RegisterWeakObjectsInOptimizedCode(
}
}
for (Handle<Map> map : maps) {
isolate->heap()->AddRetainedMap(map);
isolate->heap()->AddRetainedMap(context, map);
}
code->set_can_have_weak_objects(true);
}
......
......@@ -1051,6 +1051,7 @@ Handle<NativeContext> Factory::NewNativeContext() {
context->set_serialized_objects(*empty_fixed_array());
context->set_microtask_queue(nullptr);
context->set_osr_code_cache(*empty_weak_fixed_array());
context->set_retained_maps(*empty_weak_array_list());
return context;
}
......
......@@ -1659,9 +1659,10 @@ int Heap::NotifyContextDisposed(bool dependant_context) {
isolate()->AbortConcurrentOptimization(BlockingBehavior::kDontBlock);
if (!isolate()->context().is_null()) {
RemoveDirtyFinalizationRegistriesOnContext(isolate()->raw_native_context());
isolate()->raw_native_context().set_retained_maps(
ReadOnlyRoots(this).empty_weak_array_list());
}
number_of_disposed_maps_ = retained_maps().length();
tracer()->AddContextDisposalTime(MonotonicallyIncreasingTimeInMs());
return ++contexts_disposed_;
}
......@@ -5619,11 +5620,11 @@ void Heap::CompactWeakArrayLists(AllocationType allocation) {
set_script_list(*scripts);
}
void Heap::AddRetainedMap(Handle<Map> map) {
void Heap::AddRetainedMap(Handle<NativeContext> context, Handle<Map> map) {
if (map->is_in_retained_map_list()) {
return;
}
Handle<WeakArrayList> array(retained_maps(), isolate());
Handle<WeakArrayList> array(context->retained_maps(), isolate());
if (array->IsFull()) {
CompactRetainedMaps(*array);
}
......@@ -5632,17 +5633,15 @@ void Heap::AddRetainedMap(Handle<Map> map) {
array = WeakArrayList::AddToEnd(
isolate(), array,
MaybeObjectHandle(Smi::FromInt(FLAG_retain_maps_for_n_gc), isolate()));
if (*array != retained_maps()) {
set_retained_maps(*array);
if (*array != context->retained_maps()) {
context->set_retained_maps(*array);
}
map->set_is_in_retained_map_list(true);
}
void Heap::CompactRetainedMaps(WeakArrayList retained_maps) {
DCHECK_EQ(retained_maps, this->retained_maps());
int length = retained_maps.length();
int new_length = 0;
int new_number_of_disposed_maps = 0;
// This loop compacts the array by removing cleared weak cells.
for (int i = 0; i < length; i += 2) {
MaybeObject maybe_object = retained_maps.Get(i);
......@@ -5658,12 +5657,8 @@ void Heap::CompactRetainedMaps(WeakArrayList retained_maps) {
retained_maps.Set(new_length, maybe_object);
retained_maps.Set(new_length + 1, age);
}
if (i < number_of_disposed_maps_) {
new_number_of_disposed_maps += 2;
}
new_length += 2;
}
number_of_disposed_maps_ = new_number_of_disposed_maps;
HeapObject undefined = ReadOnlyRoots(this).undefined_value();
for (int i = new_length; i < length; i++) {
retained_maps.Set(i, HeapObjectReference::Strong(undefined));
......@@ -6281,6 +6276,17 @@ std::vector<Handle<NativeContext>> Heap::FindAllNativeContexts() {
return result;
}
std::vector<WeakArrayList> Heap::FindAllRetainedMaps() {
std::vector<WeakArrayList> result;
Object context = native_contexts_list();
while (!context.IsUndefined(isolate())) {
NativeContext native_context = NativeContext::cast(context);
result.push_back(native_context.retained_maps());
context = native_context.next_context_link();
}
return result;
}
size_t Heap::NumberOfDetachedContexts() {
// The detached_contexts() array has two entries per detached context.
return detached_contexts().length() / 2;
......
......@@ -668,7 +668,8 @@ class Heap {
void CompactWeakArrayLists(AllocationType allocation);
V8_EXPORT_PRIVATE void AddRetainedMap(Handle<Map> map);
V8_EXPORT_PRIVATE void AddRetainedMap(Handle<NativeContext> context,
Handle<Map> map);
// This event is triggered after successful allocation of a new object made
// by runtime. Allocations of target space for object evacuation do not
......@@ -1924,6 +1925,7 @@ class Heap {
#endif // DEBUG
std::vector<Handle<NativeContext>> FindAllNativeContexts();
std::vector<WeakArrayList> FindAllRetainedMaps();
MemoryMeasurement* memory_measurement() { return memory_measurement_.get(); }
// The amount of memory that has been freed concurrently.
......@@ -1980,11 +1982,6 @@ class Heap {
// For keeping track of context disposals.
int contexts_disposed_ = 0;
// The length of the retained_maps array at the time of context disposal.
// This separates maps in the retained_maps array that were created before
// and after context disposal.
int number_of_disposed_maps_ = 0;
NewSpace* new_space_ = nullptr;
OldSpace* old_space_ = nullptr;
CodeSpace* code_space_ = nullptr;
......
......@@ -435,42 +435,41 @@ void IncrementalMarking::RetainMaps() {
// - GC is requested by tests or dev-tools (abort_incremental_marking_).
bool map_retaining_is_disabled = heap()->ShouldReduceMemory() ||
FLAG_retain_maps_for_n_gc == 0;
WeakArrayList retained_maps = heap()->retained_maps();
int length = retained_maps.length();
// The number_of_disposed_maps separates maps in the retained_maps
// array that were created before and after context disposal.
// We do not age and retain disposed maps to avoid memory leaks.
int number_of_disposed_maps = heap()->number_of_disposed_maps_;
for (int i = 0; i < length; i += 2) {
MaybeObject value = retained_maps.Get(i);
HeapObject map_heap_object;
if (!value->GetHeapObjectIfWeak(&map_heap_object)) {
continue;
}
int age = retained_maps.Get(i + 1).ToSmi().value();
int new_age;
Map map = Map::cast(map_heap_object);
if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
marking_state()->IsWhite(map)) {
if (ShouldRetainMap(map, age)) {
WhiteToGreyAndPush(map);
std::vector<WeakArrayList> retained_maps_list = heap()->FindAllRetainedMaps();
for (WeakArrayList retained_maps : retained_maps_list) {
int length = retained_maps.length();
for (int i = 0; i < length; i += 2) {
MaybeObject value = retained_maps.Get(i);
HeapObject map_heap_object;
if (!value->GetHeapObjectIfWeak(&map_heap_object)) {
continue;
}
Object prototype = map.prototype();
if (age > 0 && prototype.IsHeapObject() &&
marking_state()->IsWhite(HeapObject::cast(prototype))) {
// The prototype is not marked, age the map.
new_age = age - 1;
int age = retained_maps.Get(i + 1).ToSmi().value();
int new_age;
Map map = Map::cast(map_heap_object);
if (!map_retaining_is_disabled && marking_state()->IsWhite(map)) {
if (ShouldRetainMap(map, age)) {
WhiteToGreyAndPush(map);
}
Object prototype = map.prototype();
if (age > 0 && prototype.IsHeapObject() &&
marking_state()->IsWhite(HeapObject::cast(prototype))) {
// The prototype is not marked, age the map.
new_age = age - 1;
} else {
// The prototype and the constructor are marked, this map keeps only
// transition tree alive, not JSObjects. Do not age the map.
new_age = age;
}
} else {
// The prototype and the constructor are marked, this map keeps only
// transition tree alive, not JSObjects. Do not age the map.
new_age = age;
new_age = FLAG_retain_maps_for_n_gc;
}
// Compact the array and update the age.
if (new_age != age) {
retained_maps.Set(i + 1, MaybeObject::FromSmi(Smi::FromInt(new_age)));
}
} else {
new_age = FLAG_retain_maps_for_n_gc;
}
// Compact the array and update the age.
if (new_age != age) {
retained_maps.Set(i + 1, MaybeObject::FromSmi(Smi::FromInt(new_age)));
}
}
}
......
......@@ -821,8 +821,6 @@ void ObjectStatsCollectorImpl::CollectGlobalStatistics() {
ObjectStats::STRING_SPLIT_CACHE_TYPE);
RecordSimpleVirtualObjectStats(HeapObject(), heap_->regexp_multiple_cache(),
ObjectStats::REGEXP_MULTIPLE_CACHE_TYPE);
RecordSimpleVirtualObjectStats(HeapObject(), heap_->retained_maps(),
ObjectStats::RETAINED_MAPS_TYPE);
// WeakArrayList.
RecordSimpleVirtualObjectStats(HeapObject(),
......@@ -1075,6 +1073,9 @@ void ObjectStatsCollectorImpl::RecordVirtualCodeDetails(Code code) {
void ObjectStatsCollectorImpl::RecordVirtualContext(Context context) {
if (context.IsNativeContext()) {
RecordObjectStats(context, NATIVE_CONTEXT_TYPE, context.Size());
RecordSimpleVirtualObjectStats(context, context.retained_maps(),
ObjectStats::RETAINED_MAPS_TYPE);
} else if (context.IsFunctionContext()) {
RecordObjectStats(context, FUNCTION_CONTEXT_TYPE, context.Size());
} else {
......
......@@ -774,7 +774,6 @@ void Heap::CreateInitialObjects() {
}
set_detached_contexts(roots.empty_weak_array_list());
set_retained_maps(roots.empty_weak_array_list());
set_retaining_path_targets(roots.empty_weak_array_list());
set_feedback_vectors_for_profiling_tools(roots.undefined_value());
......
......@@ -366,6 +366,7 @@ enum ContextLookupFlags {
V(WEAKMAP_SET_INDEX, JSFunction, weakmap_set) \
V(WEAKMAP_GET_INDEX, JSFunction, weakmap_get) \
V(WEAKSET_ADD_INDEX, JSFunction, weakset_add) \
V(RETAINED_MAPS, WeakArrayList, retained_maps) \
V(OSR_CODE_CACHE_INDEX, WeakFixedArray, osr_code_cache)
// A table of all script contexts. Every loaded top-level script with top-level
......
......@@ -243,7 +243,6 @@ class Symbol;
V(FixedArray, materialized_objects, MaterializedObjects) \
V(WeakArrayList, detached_contexts, DetachedContexts) \
V(WeakArrayList, retaining_path_targets, RetainingPathTargets) \
V(WeakArrayList, retained_maps, RetainedMaps) \
/* Feedback vectors that we need for code coverage or type profile */ \
V(Object, feedback_vectors_for_profiling_tools, \
FeedbackVectorsForProfilingTools) \
......
......@@ -5051,7 +5051,8 @@ TEST(Regress3877) {
CHECK(weak_prototype_holder->Get(0)->IsCleared());
}
Handle<WeakFixedArray> AddRetainedMap(Isolate* isolate, Heap* heap) {
Handle<WeakFixedArray> AddRetainedMap(Isolate* isolate,
Handle<NativeContext> context) {
HandleScope inner_scope(isolate);
Handle<Map> map = Map::Create(isolate, 1);
v8::Local<v8::Value> result =
......@@ -5059,18 +5060,24 @@ Handle<WeakFixedArray> AddRetainedMap(Isolate* isolate, Heap* heap) {
Handle<JSReceiver> proto =
v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
Map::SetPrototype(isolate, map, proto);
heap->AddRetainedMap(map);
isolate->heap()->AddRetainedMap(context, map);
Handle<WeakFixedArray> array = isolate->factory()->NewWeakFixedArray(1);
array->Set(0, HeapObjectReference::Weak(*map));
return inner_scope.CloseAndEscape(array);
}
void CheckMapRetainingFor(int n) {
FLAG_retain_maps_for_n_gc = n;
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
Handle<WeakFixedArray> array_with_map = AddRetainedMap(isolate, heap);
v8::Local<v8::Context> ctx = v8::Context::New(CcTest::isolate());
Handle<Context> context = Utils::OpenHandle(*ctx);
CHECK(context->IsNativeContext());
Handle<NativeContext> native_context = Handle<NativeContext>::cast(context);
ctx->Enter();
Handle<WeakFixedArray> array_with_map =
AddRetainedMap(isolate, native_context);
CHECK(array_with_map->Get(0)->IsWeak());
for (int i = 0; i < n; i++) {
heap::SimulateIncrementalMarking(heap);
......@@ -5080,6 +5087,8 @@ void CheckMapRetainingFor(int n) {
heap::SimulateIncrementalMarking(heap);
CcTest::CollectGarbage(OLD_SPACE);
CHECK(array_with_map->Get(0)->IsCleared());
ctx->Exit();
}
......@@ -5094,6 +5103,30 @@ TEST(MapRetaining) {
CheckMapRetainingFor(7);
}
TEST(RetainedMapsCleanup) {
if (!FLAG_incremental_marking) return;
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
v8::Local<v8::Context> ctx = v8::Context::New(CcTest::isolate());
Handle<Context> context = Utils::OpenHandle(*ctx);
CHECK(context->IsNativeContext());
Handle<NativeContext> native_context = Handle<NativeContext>::cast(context);
ctx->Enter();
Handle<WeakFixedArray> array_with_map =
AddRetainedMap(isolate, native_context);
CHECK(array_with_map->Get(0)->IsWeak());
heap->NotifyContextDisposed(true);
CcTest::CollectAllGarbage();
ctx->Exit();
CHECK_EQ(ReadOnlyRoots(heap).empty_weak_array_list(),
native_context->retained_maps());
}
TEST(PreprocessStackTrace) {
// Do not automatically trigger early GC.
FLAG_gc_interval = -1;
......
......@@ -48,7 +48,6 @@ bool IsInitiallyMutable(Factory* factory, Address object_address) {
V(shared_wasm_memories) \
V(materialized_objects) \
V(public_symbol_table) \
V(retained_maps) \
V(retaining_path_targets) \
V(serialized_global_proxy_sizes) \
V(serialized_objects) \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment