Commit fe7bdf1f authored by vitalyr@chromium.org's avatar vitalyr@chromium.org

Remove unmarked entries from per context map caches.

Made "map_cache" a weak field of global context and added a pass over
all caches late in the marking phase.

R=vegorov@chromium.org
BUG=v8:1516
TEST=cctest/test-api/Regress1516

Review URL: http://codereview.chromium.org/7285031

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@8515 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 77a3c722
......@@ -225,7 +225,6 @@ class Context: public FixedArray {
OPAQUE_REFERENCE_FUNCTION_INDEX,
CONTEXT_EXTENSION_FUNCTION_INDEX,
OUT_OF_MEMORY_INDEX,
MAP_CACHE_INDEX,
CONTEXT_DATA_INDEX,
ALLOW_CODE_GEN_FROM_STRINGS_INDEX,
DERIVED_GET_TRAP_INDEX,
......@@ -234,6 +233,7 @@ class Context: public FixedArray {
// Properties from here are treated as weak references by the full GC.
// Scavenge treats them as strong references.
OPTIMIZED_FUNCTIONS_LIST, // Weak.
MAP_CACHE_INDEX, // Weak.
NEXT_CONTEXT_LINK, // Weak.
// Total number of slots.
......
......@@ -1424,6 +1424,12 @@ void MarkCompactCollector::MarkLiveObjects() {
// reachable from the weak roots.
ProcessExternalMarking();
// Object literal map caches reference symbols (cache keys) and maps
// (cache values). At this point still useful maps have already been
// marked. Mark the keys for the alive values before we process the
// symbol table.
ProcessMapCaches();
// Prune the symbol table removing all symbols only pointed to by the
// symbol table. Cannot use symbol_table() here because the symbol
// table is marked.
......@@ -1452,6 +1458,57 @@ void MarkCompactCollector::MarkLiveObjects() {
}
void MarkCompactCollector::ProcessMapCaches() {
Object* raw_context = heap()->global_contexts_list_;
while (raw_context != heap()->undefined_value()) {
Context* context = reinterpret_cast<Context*>(raw_context);
if (context->IsMarked()) {
HeapObject* raw_map_cache =
HeapObject::cast(context->get(Context::MAP_CACHE_INDEX));
// A map cache may be reachable from the stack. In this case
// it's already transitively marked and it's too late to clean
// up its parts.
if (!raw_map_cache->IsMarked() &&
raw_map_cache != heap()->undefined_value()) {
MapCache* map_cache = reinterpret_cast<MapCache*>(raw_map_cache);
int existing_elements = map_cache->NumberOfElements();
int used_elements = 0;
for (int i = MapCache::kElementsStartIndex;
i < map_cache->length();
i += MapCache::kEntrySize) {
Object* raw_key = map_cache->get(i);
if (raw_key == heap()->undefined_value() ||
raw_key == heap()->null_value()) continue;
STATIC_ASSERT(MapCache::kEntrySize == 2);
Object* raw_map = map_cache->get(i + 1);
if (raw_map->IsHeapObject() &&
HeapObject::cast(raw_map)->IsMarked()) {
++used_elements;
} else {
// Delete useless entries with unmarked maps.
ASSERT(raw_map->IsMap());
map_cache->set_null_unchecked(heap(), i);
map_cache->set_null_unchecked(heap(), i + 1);
}
}
if (used_elements == 0) {
context->set(Context::MAP_CACHE_INDEX, heap()->undefined_value());
} else {
// Note: we don't actually shrink the cache here to avoid
// extra complexity during GC. We rely on subsequent cache
// usages (EnsureCapacity) to do this.
map_cache->ElementsRemoved(existing_elements - used_elements);
MarkObject(map_cache);
}
}
}
// Move to next element in the list.
raw_context = context->get(Context::NEXT_CONTEXT_LINK);
}
ProcessMarkingStack();
}
#ifdef DEBUG
void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) {
live_bytes_ += obj->Size();
......
......@@ -306,6 +306,10 @@ class MarkCompactCollector {
// flag on the marking stack.
void RefillMarkingStack();
// After reachable maps have been marked process per context object
// literal map caches removing unmarked entries.
void ProcessMapCaches();
// Callback function for telling whether the object *p is an unmarked
// heap object.
static bool IsUnmarkedHeapObject(Object** p);
......
......@@ -14659,3 +14659,28 @@ THREADED_TEST(ReadOnlyIndexedProperties) {
obj->Set(v8_str("2000000000"), v8_str("foobar"));
CHECK_EQ(v8_str("DONT_CHANGE"), obj->Get(v8_str("2000000000")));
}
THREADED_TEST(Regress1516) {
v8::HandleScope scope;
LocalContext context;
{ v8::HandleScope temp_scope;
CompileRun("({'a': 0})");
}
int elements;
{ i::MapCache* map_cache =
i::MapCache::cast(i::Isolate::Current()->context()->map_cache());
elements = map_cache->NumberOfElements();
CHECK_LE(1, elements);
}
i::Isolate::Current()->heap()->CollectAllGarbage(true);
{ i::Object* raw_map_cache = i::Isolate::Current()->context()->map_cache();
if (raw_map_cache != i::Isolate::Current()->heap()->undefined_value()) {
i::MapCache* map_cache = i::MapCache::cast(raw_map_cache);
CHECK_GT(elements, map_cache->NumberOfElements());
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment