Commit ff953ac0 authored by ulan@chromium.org's avatar ulan@chromium.org

Make maps in monomorphic IC stubs weak.

Maps in monomorphic Load, KeyedLoad, Store, KeyedStore, and CompareNil IC
stubs are treated as weak references by the marking visitor.

During generation of an IC stub with a weak map, the stub is appended to the
dependent code array of the map. When the map dies, all stubs in its dependent
code array are invalidated by setting embedded maps to undefined.

BUG=v8:2073
LOG=Y
TEST=cctest/test-heap/WeakMapInMonomorphic*IC
R=mstarzinger@chromium.org, verwaest@chromium.org

Review URL: https://codereview.chromium.org/188783003

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@20679 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 2754ab26
......@@ -72,10 +72,11 @@ SmartArrayPointer<const char> CodeStub::GetName() {
}
void CodeStub::RecordCodeGeneration(Code* code, Isolate* isolate) {
void CodeStub::RecordCodeGeneration(Handle<Code> code, Isolate* isolate) {
IC::RegisterWeakMapDependency(code);
SmartArrayPointer<const char> name = GetName();
PROFILE(isolate, CodeCreateEvent(Logger::STUB_TAG, code, name.get()));
GDBJIT(AddCode(GDBJITInterface::STUB, name.get(), code));
PROFILE(isolate, CodeCreateEvent(Logger::STUB_TAG, *code, name.get()));
GDBJIT(AddCode(GDBJITInterface::STUB, name.get(), *code));
Counters* counters = isolate->counters();
counters->total_stubs_code_size()->Increment(code->instruction_size());
}
......@@ -91,7 +92,7 @@ Handle<Code> CodeStub::GetCodeCopy(Isolate* isolate,
Handle<Code> ic = GetCode(isolate);
ic = isolate->factory()->CopyCode(ic);
ic->FindAndReplace(pattern);
RecordCodeGeneration(*ic, isolate);
RecordCodeGeneration(ic, isolate);
return ic;
}
......@@ -154,7 +155,7 @@ Handle<Code> CodeStub::GetCode(Isolate* isolate) {
Handle<Code> new_object = GenerateCode(isolate);
new_object->set_major_key(MajorKey());
FinishCode(new_object);
RecordCodeGeneration(*new_object, isolate);
RecordCodeGeneration(new_object, isolate);
#ifdef ENABLE_DISASSEMBLER
if (FLAG_print_code_stubs) {
......
......@@ -228,7 +228,7 @@ class CodeStub BASE_EMBEDDED {
private:
// Perform bookkeeping required after code generation when stub code is
// initially generated.
void RecordCodeGeneration(Code* code, Isolate* isolate);
void RecordCodeGeneration(Handle<Code> code, Isolate* isolate);
// Finish the code object after it has been generated.
virtual void FinishCode(Handle<Code> code) { }
......
......@@ -515,6 +515,8 @@ DEFINE_bool(trace_external_memory, false,
"it is adjusted.")
DEFINE_bool(collect_maps, true,
"garbage collect maps from which no objects can be reached")
DEFINE_bool(weak_embedded_maps_in_ic, true,
"make maps embedded in inline cache stubs")
DEFINE_bool(weak_embedded_maps_in_optimized_code, true,
"make maps embedded in optimized code weak")
DEFINE_bool(weak_embedded_objects_in_optimized_code, true,
......
......@@ -706,7 +706,7 @@ void AddWeakObjectToCodeDependency(Heap* heap,
Handle<Code> code) {
heap->EnsureWeakObjectToCodeTable();
Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(*object));
dep = DependentCode::Insert(dep, DependentCode::kWeaklyEmbeddedGroup, code);
dep = DependentCode::Insert(dep, DependentCode::kWeakCodeGroup, code);
CALL_HEAP_FUNCTION_VOID(heap->isolate(),
heap->AddWeakObjectToCodeDependency(*object, *dep));
}
......
......@@ -1710,180 +1710,6 @@ void Heap::UpdateReferencesInExternalStringTable(
}
template <class T>
struct WeakListVisitor;
template <class T>
static Object* VisitWeakList(Heap* heap,
Object* list,
WeakObjectRetainer* retainer,
bool record_slots) {
Object* undefined = heap->undefined_value();
Object* head = undefined;
T* tail = NULL;
MarkCompactCollector* collector = heap->mark_compact_collector();
while (list != undefined) {
// Check whether to keep the candidate in the list.
T* candidate = reinterpret_cast<T*>(list);
Object* retained = retainer->RetainAs(list);
if (retained != NULL) {
if (head == undefined) {
// First element in the list.
head = retained;
} else {
// Subsequent elements in the list.
ASSERT(tail != NULL);
WeakListVisitor<T>::SetWeakNext(tail, retained);
if (record_slots) {
Object** next_slot =
HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
collector->RecordSlot(next_slot, next_slot, retained);
}
}
// Retained object is new tail.
ASSERT(!retained->IsUndefined());
candidate = reinterpret_cast<T*>(retained);
tail = candidate;
// tail is a live object, visit it.
WeakListVisitor<T>::VisitLiveObject(
heap, tail, retainer, record_slots);
} else {
WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
}
// Move to next element in the list.
list = WeakListVisitor<T>::WeakNext(candidate);
}
// Terminate the list if there is one or more elements.
if (tail != NULL) {
WeakListVisitor<T>::SetWeakNext(tail, undefined);
}
return head;
}
template <class T>
static void ClearWeakList(Heap* heap,
Object* list) {
Object* undefined = heap->undefined_value();
while (list != undefined) {
T* candidate = reinterpret_cast<T*>(list);
list = WeakListVisitor<T>::WeakNext(candidate);
WeakListVisitor<T>::SetWeakNext(candidate, undefined);
}
}
template<>
struct WeakListVisitor<JSFunction> {
static void SetWeakNext(JSFunction* function, Object* next) {
function->set_next_function_link(next);
}
static Object* WeakNext(JSFunction* function) {
return function->next_function_link();
}
static int WeakNextOffset() {
return JSFunction::kNextFunctionLinkOffset;
}
static void VisitLiveObject(Heap*, JSFunction*,
WeakObjectRetainer*, bool) {
}
static void VisitPhantomObject(Heap*, JSFunction*) {
}
};
template<>
struct WeakListVisitor<Code> {
static void SetWeakNext(Code* code, Object* next) {
code->set_next_code_link(next);
}
static Object* WeakNext(Code* code) {
return code->next_code_link();
}
static int WeakNextOffset() {
return Code::kNextCodeLinkOffset;
}
static void VisitLiveObject(Heap*, Code*,
WeakObjectRetainer*, bool) {
}
static void VisitPhantomObject(Heap*, Code*) {
}
};
template<>
struct WeakListVisitor<Context> {
static void SetWeakNext(Context* context, Object* next) {
context->set(Context::NEXT_CONTEXT_LINK,
next,
UPDATE_WRITE_BARRIER);
}
static Object* WeakNext(Context* context) {
return context->get(Context::NEXT_CONTEXT_LINK);
}
static void VisitLiveObject(Heap* heap,
Context* context,
WeakObjectRetainer* retainer,
bool record_slots) {
// Process the three weak lists linked off the context.
DoWeakList<JSFunction>(heap, context, retainer, record_slots,
Context::OPTIMIZED_FUNCTIONS_LIST);
DoWeakList<Code>(heap, context, retainer, record_slots,
Context::OPTIMIZED_CODE_LIST);
DoWeakList<Code>(heap, context, retainer, record_slots,
Context::DEOPTIMIZED_CODE_LIST);
}
template<class T>
static void DoWeakList(Heap* heap,
Context* context,
WeakObjectRetainer* retainer,
bool record_slots,
int index) {
// Visit the weak list, removing dead intermediate elements.
Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer,
record_slots);
// Update the list head.
context->set(index, list_head, UPDATE_WRITE_BARRIER);
if (record_slots) {
// Record the updated slot if necessary.
Object** head_slot = HeapObject::RawField(
context, FixedArray::SizeFor(index));
heap->mark_compact_collector()->RecordSlot(
head_slot, head_slot, list_head);
}
}
static void VisitPhantomObject(Heap* heap, Context* context) {
ClearWeakList<JSFunction>(heap,
context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
}
static int WeakNextOffset() {
return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
}
};
void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
// We don't record weak slots during marking or scavenges.
// Instead we do it once when we complete mark-compact cycle.
......@@ -1909,66 +1735,6 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer,
}
template<>
struct WeakListVisitor<JSArrayBufferView> {
static void SetWeakNext(JSArrayBufferView* obj, Object* next) {
obj->set_weak_next(next);
}
static Object* WeakNext(JSArrayBufferView* obj) {
return obj->weak_next();
}
static void VisitLiveObject(Heap*,
JSArrayBufferView* obj,
WeakObjectRetainer* retainer,
bool record_slots) {}
static void VisitPhantomObject(Heap*, JSArrayBufferView*) {}
static int WeakNextOffset() {
return JSArrayBufferView::kWeakNextOffset;
}
};
template<>
struct WeakListVisitor<JSArrayBuffer> {
static void SetWeakNext(JSArrayBuffer* obj, Object* next) {
obj->set_weak_next(next);
}
static Object* WeakNext(JSArrayBuffer* obj) {
return obj->weak_next();
}
static void VisitLiveObject(Heap* heap,
JSArrayBuffer* array_buffer,
WeakObjectRetainer* retainer,
bool record_slots) {
Object* typed_array_obj =
VisitWeakList<JSArrayBufferView>(
heap,
array_buffer->weak_first_view(),
retainer, record_slots);
array_buffer->set_weak_first_view(typed_array_obj);
if (typed_array_obj != heap->undefined_value() && record_slots) {
Object** slot = HeapObject::RawField(
array_buffer, JSArrayBuffer::kWeakFirstViewOffset);
heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj);
}
}
static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) {
Runtime::FreeArrayBuffer(heap->isolate(), phantom);
}
static int WeakNextOffset() {
return JSArrayBuffer::kWeakNextOffset;
}
};
void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer,
bool record_slots) {
Object* array_buffer_obj =
......@@ -1990,29 +1756,6 @@ void Heap::TearDownArrayBuffers() {
}
template<>
struct WeakListVisitor<AllocationSite> {
static void SetWeakNext(AllocationSite* obj, Object* next) {
obj->set_weak_next(next);
}
static Object* WeakNext(AllocationSite* obj) {
return obj->weak_next();
}
static void VisitLiveObject(Heap* heap,
AllocationSite* site,
WeakObjectRetainer* retainer,
bool record_slots) {}
static void VisitPhantomObject(Heap* heap, AllocationSite* phantom) {}
static int WeakNextOffset() {
return AllocationSite::kWeakNextOffset;
}
};
void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer,
bool record_slots) {
Object* allocation_site_obj =
......@@ -4163,7 +3906,11 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
code->set_ic_age(global_ic_age_);
code->set_prologue_offset(prologue_offset);
if (code->kind() == Code::OPTIMIZED_FUNCTION) {
code->set_marked_for_deoptimization(false);
ASSERT(!code->marked_for_deoptimization());
}
if (code->is_inline_cache_stub()) {
ASSERT(!code->is_weak_stub());
ASSERT(!code->is_invalidated_weak_stub());
}
if (FLAG_enable_ool_constant_pool) {
......
......@@ -435,6 +435,42 @@ void IC::PostPatching(Address address, Code* target, Code* old_target) {
}
void IC::RegisterWeakMapDependency(Handle<Code> stub) {
if (FLAG_collect_maps && FLAG_weak_embedded_maps_in_ic &&
stub->CanBeWeakStub()) {
ASSERT(!stub->is_weak_stub());
MapHandleList maps;
stub->FindAllMaps(&maps);
if (maps.length() == 1 && stub->IsWeakObjectInIC(*maps.at(0))) {
maps.at(0)->AddDependentIC(stub);
stub->mark_as_weak_stub();
if (FLAG_enable_ool_constant_pool) {
stub->constant_pool()->set_weak_object_state(
ConstantPoolArray::WEAK_OBJECTS_IN_IC);
}
}
}
}
void IC::InvalidateMaps(Code* stub) {
ASSERT(stub->is_weak_stub());
stub->mark_as_invalidated_weak_stub();
Isolate* isolate = stub->GetIsolate();
Heap* heap = isolate->heap();
Object* undefined = heap->undefined_value();
int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
for (RelocIterator it(stub, mode_mask); !it.done(); it.next()) {
RelocInfo::Mode mode = it.rinfo()->rmode();
if (mode == RelocInfo::EMBEDDED_OBJECT &&
it.rinfo()->target_object()->IsMap()) {
it.rinfo()->set_target_object(undefined, SKIP_WRITE_BARRIER);
}
}
CPU::FlushICache(stub->instruction_start(), stub->instruction_size());
}
void IC::Clear(Isolate* isolate, Address address,
ConstantPoolArray* constant_pool) {
Code* target = GetTargetAtAddress(address, constant_pool);
......
......@@ -106,6 +106,14 @@ class IC {
return false;
}
// If the stub contains weak maps then this function adds the stub to
// the dependent code array of each weak map.
static void RegisterWeakMapDependency(Handle<Code> stub);
// This function is called when a weak map in the stub is dying,
// invalidates the stub by setting maps in it to undefined.
static void InvalidateMaps(Code* stub);
// Clear the inline cache to initial state.
static void Clear(Isolate* isolate,
Address address,
......@@ -173,6 +181,9 @@ class IC {
// Set the call-site target.
void set_target(Code* code) {
#ifdef VERIFY_HEAP
code->VerifyEmbeddedObjectsDependency();
#endif
SetTargetAtAddress(address(), code, constant_pool());
target_set_ = true;
}
......
......@@ -211,7 +211,7 @@ void LCodeGenBase::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) {
NoWeakObjectVerificationScope disable_verification_of_embedded_objects;
#endif
for (int i = 0; i < maps.length(); i++) {
maps.at(i)->AddDependentCode(DependentCode::kWeaklyEmbeddedGroup, code);
maps.at(i)->AddDependentCode(DependentCode::kWeakCodeGroup, code);
}
for (int i = 0; i < objects.length(); i++) {
AddWeakObjectToCodeDependency(isolate()->heap(), objects.at(i), code);
......
......@@ -449,7 +449,7 @@ void MarkCompactCollector::CollectGarbage() {
#ifdef VERIFY_HEAP
if (heap()->weak_embedded_objects_verification_enabled()) {
VerifyWeakEmbeddedObjectsInOptimizedCode();
VerifyWeakEmbeddedObjectsInCode();
}
if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) {
VerifyOmittedMapChecks();
......@@ -510,13 +510,13 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() {
}
void MarkCompactCollector::VerifyWeakEmbeddedObjectsInOptimizedCode() {
void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() {
HeapObjectIterator code_iterator(heap()->code_space());
for (HeapObject* obj = code_iterator.Next();
obj != NULL;
obj = code_iterator.Next()) {
Code* code = Code::cast(obj);
if (code->kind() != Code::OPTIMIZED_FUNCTION) continue;
if (!code->is_optimized_code() && !code->is_weak_stub()) continue;
if (WillBeDeoptimized(code)) continue;
code->VerifyEmbeddedObjectsDependency();
}
......@@ -2583,7 +2583,7 @@ void MarkCompactCollector::ClearNonLiveReferences() {
if (map_mark.Get()) {
ClearNonLiveDependentCode(map->dependent_code());
} else {
ClearAndDeoptimizeDependentCode(map->dependent_code());
ClearDependentCode(map->dependent_code());
map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array()));
}
}
......@@ -2638,7 +2638,7 @@ void MarkCompactCollector::ClearNonLiveReferences() {
}
ClearNonLiveDependentCode(DependentCode::cast(value));
} else {
ClearAndDeoptimizeDependentCode(DependentCode::cast(value));
ClearDependentCode(DependentCode::cast(value));
table->set(key_index, heap_->the_hole_value());
table->set(value_index, heap_->the_hole_value());
}
......@@ -2708,56 +2708,102 @@ void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map,
}
void MarkCompactCollector::ClearAndDeoptimizeDependentCode(
void MarkCompactCollector::ClearDependentICList(Object* head) {
Object* current = head;
Object* undefined = heap()->undefined_value();
while (current != undefined) {
Code* code = Code::cast(current);
if (IsMarked(code)) {
ASSERT(code->is_weak_stub());
IC::InvalidateMaps(code);
}
current = code->next_code_link();
code->set_next_code_link(undefined);
}
}
void MarkCompactCollector::ClearDependentCode(
DependentCode* entries) {
DisallowHeapAllocation no_allocation;
DependentCode::GroupStartIndexes starts(entries);
int number_of_entries = starts.number_of_entries();
if (number_of_entries == 0) return;
for (int i = 0; i < number_of_entries; i++) {
int g = DependentCode::kWeakICGroup;
if (starts.at(g) != starts.at(g + 1)) {
int i = starts.at(g);
ASSERT(i + 1 == starts.at(g + 1));
Object* head = entries->object_at(i);
ClearDependentICList(head);
}
g = DependentCode::kWeakCodeGroup;
for (int i = starts.at(g); i < starts.at(g + 1); i++) {
// If the entry is compilation info then the map must be alive,
// and ClearAndDeoptimizeDependentCode shouldn't be called.
// and ClearDependentCode shouldn't be called.
ASSERT(entries->is_code_at(i));
Code* code = entries->code_at(i);
if (IsMarked(code) && !code->marked_for_deoptimization()) {
code->set_marked_for_deoptimization(true);
code->InvalidateEmbeddedObjects();
have_code_to_deoptimize_ = true;
}
}
for (int i = 0; i < number_of_entries; i++) {
entries->clear_at(i);
}
}
void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) {
DisallowHeapAllocation no_allocation;
DependentCode::GroupStartIndexes starts(entries);
int number_of_entries = starts.number_of_entries();
if (number_of_entries == 0) return;
int new_number_of_entries = 0;
// Go through all groups, remove dead codes and compact.
for (int g = 0; g < DependentCode::kGroupCount; g++) {
int group_number_of_entries = 0;
for (int i = starts.at(g); i < starts.at(g + 1); i++) {
int MarkCompactCollector::ClearNonLiveDependentCodeInGroup(
DependentCode* entries, int group, int start, int end, int new_start) {
int survived = 0;
if (group == DependentCode::kWeakICGroup) {
// Dependent weak IC stubs form a linked list and only the head is stored
// in the dependent code array.
if (start != end) {
ASSERT(start + 1 == end);
Object* old_head = entries->object_at(start);
MarkCompactWeakObjectRetainer retainer;
Object* head = VisitWeakList<Code>(heap(), old_head, &retainer, true);
entries->set_object_at(new_start, head);
Object** slot = entries->slot_at(new_start);
RecordSlot(slot, slot, head);
// We do not compact this group even if the head is undefined,
// more dependent ICs are likely to be added later.
survived = 1;
}
} else {
for (int i = start; i < end; i++) {
Object* obj = entries->object_at(i);
ASSERT(obj->IsCode() || IsMarked(obj));
if (IsMarked(obj) &&
(!obj->IsCode() || !WillBeDeoptimized(Code::cast(obj)))) {
if (new_number_of_entries + group_number_of_entries != i) {
entries->set_object_at(
new_number_of_entries + group_number_of_entries, obj);
if (new_start + survived != i) {
entries->set_object_at(new_start + survived, obj);
}
Object** slot = entries->slot_at(new_number_of_entries +
group_number_of_entries);
Object** slot = entries->slot_at(new_start + survived);
RecordSlot(slot, slot, obj);
group_number_of_entries++;
survived++;
}
}
entries->set_number_of_entries(
static_cast<DependentCode::DependencyGroup>(g),
group_number_of_entries);
new_number_of_entries += group_number_of_entries;
}
entries->set_number_of_entries(
static_cast<DependentCode::DependencyGroup>(group), survived);
return survived;
}
void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) {
DisallowHeapAllocation no_allocation;
DependentCode::GroupStartIndexes starts(entries);
int number_of_entries = starts.number_of_entries();
if (number_of_entries == 0) return;
int new_number_of_entries = 0;
// Go through all groups, remove dead codes and compact.
for (int g = 0; g < DependentCode::kGroupCount; g++) {
int survived = ClearNonLiveDependentCodeInGroup(
entries, g, starts.at(g), starts.at(g + 1), new_number_of_entries);
new_number_of_entries += survived;
}
for (int i = new_number_of_entries; i < number_of_entries; i++) {
entries->clear_at(i);
......@@ -3414,7 +3460,7 @@ void MarkCompactCollector::InvalidateCode(Code* code) {
// Return true if the given code is deoptimized or will be deoptimized.
bool MarkCompactCollector::WillBeDeoptimized(Code* code) {
return code->marked_for_deoptimization();
return code->is_optimized_code() && code->marked_for_deoptimization();
}
......
......@@ -609,7 +609,7 @@ class MarkCompactCollector {
void VerifyMarkbitsAreClean();
static void VerifyMarkbitsAreClean(PagedSpace* space);
static void VerifyMarkbitsAreClean(NewSpace* space);
void VerifyWeakEmbeddedObjectsInOptimizedCode();
void VerifyWeakEmbeddedObjectsInCode();
void VerifyOmittedMapChecks();
#endif
......@@ -870,8 +870,11 @@ class MarkCompactCollector {
void ClearNonLivePrototypeTransitions(Map* map);
void ClearNonLiveMapTransitions(Map* map, MarkBit map_mark);
void ClearAndDeoptimizeDependentCode(DependentCode* dependent_code);
void ClearDependentCode(DependentCode* dependent_code);
void ClearDependentICList(Object* head);
void ClearNonLiveDependentCode(DependentCode* dependent_code);
int ClearNonLiveDependentCodeInGroup(DependentCode* dependent_code, int group,
int start, int end, int new_start);
// Marking detaches initial maps from SharedFunctionInfo objects
// to make this reference weak. We need to reattach initial maps
......
......@@ -663,19 +663,21 @@ void Code::CodeVerify() {
void Code::VerifyEmbeddedObjectsDependency() {
if (!CanContainWeakObjects()) return;
int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
Object* obj = it.rinfo()->target_object();
if (IsWeakObject(obj)) {
if (obj->IsMap()) {
Map* map = Map::cast(obj);
CHECK(map->dependent_code()->Contains(
DependentCode::kWeaklyEmbeddedGroup, this));
DependentCode::DependencyGroup group = is_optimized_code() ?
DependentCode::kWeakCodeGroup : DependentCode::kWeakICGroup;
CHECK(map->dependent_code()->Contains(group, this));
} else if (obj->IsJSObject()) {
Object* raw_table = GetIsolate()->heap()->weak_object_to_code_table();
WeakHashTable* table = WeakHashTable::cast(raw_table);
CHECK(DependentCode::cast(table->Lookup(obj))->Contains(
DependentCode::kWeaklyEmbeddedGroup, this));
DependentCode::kWeakCodeGroup, this));
}
}
}
......
......@@ -4771,6 +4771,34 @@ void Code::set_marked_for_deoptimization(bool flag) {
}
bool Code::is_weak_stub() {
return CanBeWeakStub() && WeakStubField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::mark_as_weak_stub() {
ASSERT(CanBeWeakStub());
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
int updated = WeakStubField::update(previous, true);
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
bool Code::is_invalidated_weak_stub() {
return is_weak_stub() && InvalidatedWeakStubField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::mark_as_invalidated_weak_stub() {
ASSERT(is_inline_cache_stub());
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
int updated = InvalidatedWeakStubField::update(previous, true);
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
bool Code::is_inline_cache_stub() {
Kind kind = this->kind();
switch (kind) {
......@@ -4914,6 +4942,13 @@ class Code::FindAndReplacePattern {
};
bool Code::IsWeakObjectInIC(Object* object) {
return object->IsMap() && Map::cast(object)->CanTransition() &&
FLAG_collect_maps &&
FLAG_weak_embedded_maps_in_ic;
}
Object* Map::prototype() {
return READ_FIELD(this, kPrototypeOffset);
}
......
......@@ -312,7 +312,8 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(
if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
&& (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() ||
Serializer::enabled() || target->ic_age() != heap->global_ic_age())) {
Serializer::enabled() || target->ic_age() != heap->global_ic_age() ||
target->is_invalidated_weak_stub())) {
IC::Clear(target->GetIsolate(), rinfo->pc(),
rinfo->host()->constant_pool());
target = Code::GetCodeFromTargetAddress(rinfo->target_address());
......@@ -501,9 +502,14 @@ void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
Object** slot = constant_pool->RawFieldOfElementAt(index);
HeapObject* object = HeapObject::cast(*slot);
heap->mark_compact_collector()->RecordSlot(slot, slot, object);
if (!(constant_pool->get_weak_object_state() ==
bool is_weak_object =
(constant_pool->get_weak_object_state() ==
ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
Code::IsWeakObjectInOptimizedCode(object))) {
Code::IsWeakObjectInOptimizedCode(object)) ||
(constant_pool->get_weak_object_state() ==
ConstantPoolArray::WEAK_OBJECTS_IN_IC &&
Code::IsWeakObjectInIC(object));
if (!is_weak_object) {
StaticVisitor::MarkObject(heap, object);
}
}
......
......@@ -211,4 +211,281 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
}
}
template <class T>
struct WeakListVisitor;
template <class T>
Object* VisitWeakList(Heap* heap,
Object* list,
WeakObjectRetainer* retainer,
bool record_slots) {
Object* undefined = heap->undefined_value();
Object* head = undefined;
T* tail = NULL;
MarkCompactCollector* collector = heap->mark_compact_collector();
while (list != undefined) {
// Check whether to keep the candidate in the list.
T* candidate = reinterpret_cast<T*>(list);
Object* retained = retainer->RetainAs(list);
if (retained != NULL) {
if (head == undefined) {
// First element in the list.
head = retained;
} else {
// Subsequent elements in the list.
ASSERT(tail != NULL);
WeakListVisitor<T>::SetWeakNext(tail, retained);
if (record_slots) {
Object** next_slot =
HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
collector->RecordSlot(next_slot, next_slot, retained);
}
}
// Retained object is new tail.
ASSERT(!retained->IsUndefined());
candidate = reinterpret_cast<T*>(retained);
tail = candidate;
// tail is a live object, visit it.
WeakListVisitor<T>::VisitLiveObject(
heap, tail, retainer, record_slots);
} else {
WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
}
// Move to next element in the list.
list = WeakListVisitor<T>::WeakNext(candidate);
}
// Terminate the list if there is one or more elements.
if (tail != NULL) {
WeakListVisitor<T>::SetWeakNext(tail, undefined);
}
return head;
}
template <class T>
static void ClearWeakList(Heap* heap,
Object* list) {
Object* undefined = heap->undefined_value();
while (list != undefined) {
T* candidate = reinterpret_cast<T*>(list);
list = WeakListVisitor<T>::WeakNext(candidate);
WeakListVisitor<T>::SetWeakNext(candidate, undefined);
}
}
template<>
struct WeakListVisitor<JSFunction> {
static void SetWeakNext(JSFunction* function, Object* next) {
function->set_next_function_link(next);
}
static Object* WeakNext(JSFunction* function) {
return function->next_function_link();
}
static int WeakNextOffset() {
return JSFunction::kNextFunctionLinkOffset;
}
static void VisitLiveObject(Heap*, JSFunction*,
WeakObjectRetainer*, bool) {
}
static void VisitPhantomObject(Heap*, JSFunction*) {
}
};
template<>
struct WeakListVisitor<Code> {
static void SetWeakNext(Code* code, Object* next) {
code->set_next_code_link(next);
}
static Object* WeakNext(Code* code) {
return code->next_code_link();
}
static int WeakNextOffset() {
return Code::kNextCodeLinkOffset;
}
static void VisitLiveObject(Heap*, Code*,
WeakObjectRetainer*, bool) {
}
static void VisitPhantomObject(Heap*, Code*) {
}
};
template<>
struct WeakListVisitor<Context> {
static void SetWeakNext(Context* context, Object* next) {
context->set(Context::NEXT_CONTEXT_LINK,
next,
UPDATE_WRITE_BARRIER);
}
static Object* WeakNext(Context* context) {
return context->get(Context::NEXT_CONTEXT_LINK);
}
static void VisitLiveObject(Heap* heap,
Context* context,
WeakObjectRetainer* retainer,
bool record_slots) {
// Process the three weak lists linked off the context.
DoWeakList<JSFunction>(heap, context, retainer, record_slots,
Context::OPTIMIZED_FUNCTIONS_LIST);
DoWeakList<Code>(heap, context, retainer, record_slots,
Context::OPTIMIZED_CODE_LIST);
DoWeakList<Code>(heap, context, retainer, record_slots,
Context::DEOPTIMIZED_CODE_LIST);
}
template<class T>
static void DoWeakList(Heap* heap,
Context* context,
WeakObjectRetainer* retainer,
bool record_slots,
int index) {
// Visit the weak list, removing dead intermediate elements.
Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer,
record_slots);
// Update the list head.
context->set(index, list_head, UPDATE_WRITE_BARRIER);
if (record_slots) {
// Record the updated slot if necessary.
Object** head_slot = HeapObject::RawField(
context, FixedArray::SizeFor(index));
heap->mark_compact_collector()->RecordSlot(
head_slot, head_slot, list_head);
}
}
static void VisitPhantomObject(Heap* heap, Context* context) {
ClearWeakList<JSFunction>(heap,
context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
}
static int WeakNextOffset() {
return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
}
};
template<>
struct WeakListVisitor<JSArrayBufferView> {
static void SetWeakNext(JSArrayBufferView* obj, Object* next) {
obj->set_weak_next(next);
}
static Object* WeakNext(JSArrayBufferView* obj) {
return obj->weak_next();
}
static void VisitLiveObject(Heap*,
JSArrayBufferView* obj,
WeakObjectRetainer* retainer,
bool record_slots) {}
static void VisitPhantomObject(Heap*, JSArrayBufferView*) {}
static int WeakNextOffset() {
return JSArrayBufferView::kWeakNextOffset;
}
};
template<>
struct WeakListVisitor<JSArrayBuffer> {
static void SetWeakNext(JSArrayBuffer* obj, Object* next) {
obj->set_weak_next(next);
}
static Object* WeakNext(JSArrayBuffer* obj) {
return obj->weak_next();
}
static void VisitLiveObject(Heap* heap,
JSArrayBuffer* array_buffer,
WeakObjectRetainer* retainer,
bool record_slots) {
Object* typed_array_obj =
VisitWeakList<JSArrayBufferView>(
heap,
array_buffer->weak_first_view(),
retainer, record_slots);
array_buffer->set_weak_first_view(typed_array_obj);
if (typed_array_obj != heap->undefined_value() && record_slots) {
Object** slot = HeapObject::RawField(
array_buffer, JSArrayBuffer::kWeakFirstViewOffset);
heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj);
}
}
static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) {
Runtime::FreeArrayBuffer(heap->isolate(), phantom);
}
static int WeakNextOffset() {
return JSArrayBuffer::kWeakNextOffset;
}
};
template<>
struct WeakListVisitor<AllocationSite> {
static void SetWeakNext(AllocationSite* obj, Object* next) {
obj->set_weak_next(next);
}
static Object* WeakNext(AllocationSite* obj) {
return obj->weak_next();
}
static void VisitLiveObject(Heap* heap,
AllocationSite* site,
WeakObjectRetainer* retainer,
bool record_slots) {}
static void VisitPhantomObject(Heap* heap, AllocationSite* phantom) {}
static int WeakNextOffset() {
return AllocationSite::kWeakNextOffset;
}
};
template Object* VisitWeakList<Code>(
Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots);
template Object* VisitWeakList<JSFunction>(
Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots);
template Object* VisitWeakList<Context>(
Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots);
template Object* VisitWeakList<JSArrayBuffer>(
Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots);
template Object* VisitWeakList<AllocationSite>(
Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots);
} } // namespace v8::internal
......@@ -483,6 +483,20 @@ VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
StaticMarkingVisitor<StaticVisitor>::table_;
class WeakObjectRetainer;
// A weak list is single linked list where each element has a weak pointer to
// the next element. Given the head of the list, this function removes dead
// elements from the list and if requested records slots for next-element
// pointers. The template parameter T is a WeakListVisitor that defines how to
// access the next-element pointers.
template <class T>
Object* VisitWeakList(Heap* heap,
Object* list,
WeakObjectRetainer* retainer,
bool record_slots);
} } // namespace v8::internal
#endif // V8_OBJECTS_VISITING_H_
......@@ -11538,6 +11538,21 @@ void Map::AddDependentCode(DependentCode::DependencyGroup group,
}
void Map::AddDependentIC(Handle<Code> stub) {
ASSERT(stub->next_code_link()->IsUndefined());
int n = dependent_code()->number_of_entries(DependentCode::kWeakICGroup);
if (n == 0) {
// Slow path: insert the head of the list with possible heap allocation.
AddDependentCode(DependentCode::kWeakICGroup, stub);
} else {
// Fast path: link the stub to the existing head of the list without any
// heap allocation.
ASSERT(n == 1);
dependent_code()->AddToDependentICList(stub);
}
}
DependentCode::GroupStartIndexes::GroupStartIndexes(DependentCode* entries) {
Recompute(entries);
}
......@@ -11668,10 +11683,22 @@ void DependentCode::RemoveCompilationInfo(DependentCode::DependencyGroup group,
}
static bool CodeListContains(Object* head, Code* code) {
while (!head->IsUndefined()) {
if (head == code) return true;
head = Code::cast(head)->next_code_link();
}
return false;
}
bool DependentCode::Contains(DependencyGroup group, Code* code) {
GroupStartIndexes starts(this);
int start = starts.at(group);
int end = starts.at(group + 1);
if (group == kWeakICGroup) {
return CodeListContains(object_at(start), code);
}
for (int i = start; i < end; i++) {
if (object_at(i) == code) return true;
}
......@@ -11728,6 +11755,15 @@ void DependentCode::DeoptimizeDependentCodeGroup(
}
void DependentCode::AddToDependentICList(Handle<Code> stub) {
DisallowHeapAllocation no_heap_allocation;
GroupStartIndexes starts(this);
int i = starts.at(kWeakICGroup);
stub->set_next_code_link(object_at(i));
set_object_at(i, *stub);
}
Handle<Object> JSObject::SetPrototype(Handle<JSObject> object,
Handle<Object> value,
bool skip_hidden_prototypes) {
......
......@@ -3225,7 +3225,8 @@ class ConstantPoolArray: public FixedArrayBase {
public:
enum WeakObjectState {
NO_WEAK_OBJECTS,
WEAK_OBJECTS_IN_OPTIMIZED_CODE
WEAK_OBJECTS_IN_OPTIMIZED_CODE,
WEAK_OBJECTS_IN_IC
};
// Getters for the field storing the first index for different type entries.
......@@ -5494,6 +5495,17 @@ class Code: public HeapObject {
inline bool is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
inline bool is_keyed_stub();
inline bool is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; }
inline bool is_weak_stub();
inline void mark_as_weak_stub();
inline bool is_invalidated_weak_stub();
inline void mark_as_invalidated_weak_stub();
inline bool CanBeWeakStub() {
Kind k = kind();
return (k == LOAD_IC || k == STORE_IC || k == KEYED_LOAD_IC ||
k == KEYED_STORE_IC || k == COMPARE_NIL_IC) &&
ic_state() == MONOMORPHIC;
}
inline void set_raw_kind_specific_flags1(int value);
inline void set_raw_kind_specific_flags2(int value);
......@@ -5751,11 +5763,17 @@ class Code: public HeapObject {
void VerifyEmbeddedObjectsDependency();
#endif
inline bool CanContainWeakObjects() {
return is_optimized_code() || is_weak_stub();
}
inline bool IsWeakObject(Object* object) {
return is_optimized_code() && IsWeakObjectInOptimizedCode(object);
return (is_optimized_code() && IsWeakObjectInOptimizedCode(object)) ||
(is_weak_stub() && IsWeakObjectInIC(object));
}
static inline bool IsWeakObjectInOptimizedCode(Object* object);
static inline bool IsWeakObjectInIC(Object* object);
// Max loop nesting marker used to postpose OSR. We don't take loop
// nesting that is deeper than 5 levels into account.
......@@ -5818,11 +5836,17 @@ class Code: public HeapObject {
static const int kMarkedForDeoptimizationFirstBit =
kStackSlotsFirstBit + kStackSlotsBitCount + 1;
static const int kMarkedForDeoptimizationBitCount = 1;
static const int kWeakStubFirstBit =
kMarkedForDeoptimizationFirstBit + kMarkedForDeoptimizationBitCount;
static const int kWeakStubBitCount = 1;
static const int kInvalidatedWeakStubFirstBit =
kWeakStubFirstBit + kWeakStubBitCount;
static const int kInvalidatedWeakStubBitCount = 1;
STATIC_ASSERT(kStackSlotsFirstBit + kStackSlotsBitCount <= 32);
STATIC_ASSERT(kHasFunctionCacheFirstBit + kHasFunctionCacheBitCount <= 32);
STATIC_ASSERT(kMarkedForDeoptimizationFirstBit +
kMarkedForDeoptimizationBitCount <= 32);
STATIC_ASSERT(kInvalidatedWeakStubFirstBit +
kInvalidatedWeakStubBitCount <= 32);
class StackSlotsField: public BitField<int,
kStackSlotsFirstBit, kStackSlotsBitCount> {}; // NOLINT
......@@ -5831,6 +5855,12 @@ class Code: public HeapObject {
class MarkedForDeoptimizationField: public BitField<bool,
kMarkedForDeoptimizationFirstBit,
kMarkedForDeoptimizationBitCount> {}; // NOLINT
class WeakStubField: public BitField<bool,
kWeakStubFirstBit,
kWeakStubBitCount> {}; // NOLINT
class InvalidatedWeakStubField: public BitField<bool,
kInvalidatedWeakStubFirstBit,
kInvalidatedWeakStubBitCount> {}; // NOLINT
// KindSpecificFlags2 layout (ALL)
static const int kIsCrankshaftedBit = 0;
......@@ -5915,9 +5945,14 @@ class CompilationInfo;
class DependentCode: public FixedArray {
public:
enum DependencyGroup {
// Group of IC stubs that weakly embed this map and depend on being
// invalidated when the map is garbage collected. Dependent IC stubs form
// a linked list. This group stores only the head of the list. This means
// that the number_of_entries(kWeakICGroup) is 0 or 1.
kWeakICGroup,
// Group of code that weakly embed this map and depend on being
// deoptimized when the map is garbage collected.
kWeaklyEmbeddedGroup,
kWeakCodeGroup,
// Group of code that embed a transition to this map, and depend on being
// deoptimized when the transition is replaced by a new version.
kTransitionGroup,
......@@ -5968,6 +6003,7 @@ class DependentCode: public FixedArray {
bool MarkCodeForDeoptimization(Isolate* isolate,
DependentCode::DependencyGroup group);
void AddToDependentICList(Handle<Code> stub);
// The following low-level accessors should only be used by this class
// and the mark compact collector.
......@@ -6278,7 +6314,7 @@ class Map: public HeapObject {
// [stub cache]: contains stubs compiled for this map.
DECL_ACCESSORS(code_cache, Object)
// [dependent code]: list of optimized codes that have this map embedded.
// [dependent code]: list of optimized codes that weakly embed this map.
DECL_ACCESSORS(dependent_code, DependentCode)
// [back pointer]: points back to the parent map from which a transition
......@@ -6563,6 +6599,7 @@ class Map: public HeapObject {
void AddDependentCode(DependentCode::DependencyGroup group,
Handle<Code> code);
void AddDependentIC(Handle<Code> stub);
bool IsMapInArrayPrototypeChain();
......
......@@ -1277,6 +1277,7 @@ Handle<Code> BaseLoadStoreStubCompiler::GetICCode(Code::Kind kind,
InlineCacheState state) {
Code::Flags flags = Code::ComputeFlags(kind, state, extra_state(), type);
Handle<Code> code = GetCodeWithFlags(flags, name);
IC::RegisterWeakMapDependency(code);
PROFILE(isolate(), CodeCreateEvent(log_kind(code), *code, *name));
JitEvent(name, code);
return code;
......
......@@ -3848,6 +3848,114 @@ TEST(NextCodeLinkIsWeak2) {
}
static bool weak_ic_cleared = false;
static void ClearWeakIC(const v8::WeakCallbackData<v8::Object, void>& data) {
printf("clear weak is called\n");
weak_ic_cleared = true;
v8::Persistent<v8::Value>* p =
reinterpret_cast<v8::Persistent<v8::Value>*>(data.GetParameter());
CHECK(p->IsNearDeath());
p->Reset();
}
// Checks that the value returned by execution of the source is weak.
void CheckWeakness(const char* source) {
i::FLAG_stress_compaction = false;
CcTest::InitializeVM();
v8::Isolate* isolate = CcTest::isolate();
v8::HandleScope scope(isolate);
v8::Persistent<v8::Object> garbage;
{
v8::HandleScope scope(isolate);
garbage.Reset(isolate, CompileRun(source)->ToObject());
}
weak_ic_cleared = false;
garbage.SetWeak(static_cast<void*>(&garbage), &ClearWeakIC);
Heap* heap = CcTest::i_isolate()->heap();
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CHECK(weak_ic_cleared);
}
// Each of the following "weak IC" tests creates an IC that embeds a map with
// the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
TEST(WeakMapInMonomorphicLoadIC) {
CheckWeakness("function loadIC(obj) {"
" return obj.name;"
"}"
" (function() {"
" var proto = {'name' : 'weak'};"
" var obj = Object.create(proto);"
" loadIC(obj);"
" loadIC(obj);"
" loadIC(obj);"
" return proto;"
" })();");
}
TEST(WeakMapInMonomorphicKeyedLoadIC) {
CheckWeakness("function keyedLoadIC(obj, field) {"
" return obj[field];"
"}"
" (function() {"
" var proto = {'name' : 'weak'};"
" var obj = Object.create(proto);"
" keyedLoadIC(obj, 'name');"
" keyedLoadIC(obj, 'name');"
" keyedLoadIC(obj, 'name');"
" return proto;"
" })();");
}
TEST(WeakMapInMonomorphicStoreIC) {
CheckWeakness("function storeIC(obj, value) {"
" obj.name = value;"
"}"
" (function() {"
" var proto = {'name' : 'weak'};"
" var obj = Object.create(proto);"
" storeIC(obj, 'x');"
" storeIC(obj, 'x');"
" storeIC(obj, 'x');"
" return proto;"
" })();");
}
TEST(WeakMapInMonomorphicKeyedStoreIC) {
CheckWeakness("function keyedStoreIC(obj, field, value) {"
" obj[field] = value;"
"}"
" (function() {"
" var proto = {'name' : 'weak'};"
" var obj = Object.create(proto);"
" keyedStoreIC(obj, 'x');"
" keyedStoreIC(obj, 'x');"
" keyedStoreIC(obj, 'x');"
" return proto;"
" })();");
}
TEST(WeakMapInMonomorphicCompareNilIC) {
CheckWeakness("function compareNilIC(obj) {"
" return obj == null;"
"}"
" (function() {"
" var proto = {'name' : 'weak'};"
" var obj = Object.create(proto);"
" compareNilIC(obj);"
" compareNilIC(obj);"
" compareNilIC(obj);"
" return proto;"
" })();");
}
#ifdef DEBUG
TEST(AddInstructionChangesNewSpacePromotion) {
i::FLAG_allow_natives_syntax = true;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment