Commit d4ac509f authored by mstarzinger's avatar mstarzinger Committed by Commit bot

Fix stale entries in optimized code map.

This fixes a corner-case where extending an optimized code map left
stale entries in the abandoned copy. This can cause havoc not only in
the heap verifier but also in the GC, because stale entries have not
been recorded when being trated weakly.

Note that this also pre-tenures all optimized code maps into old-space
because their lifetime is coupled to the SharedFunctionInfo anyways.

R=hpayer@chromium.org
TEST=cctest/test-heap/Regress514122
BUG=chromium:514122
LOG=N

Review URL: https://codereview.chromium.org/1277873002

Cr-Commit-Position: refs/heads/master@{#30047}
parent 32525774
......@@ -9508,6 +9508,7 @@ void SharedFunctionInfo::AddToOptimizedCodeMap(
Handle<FixedArray> literals,
BailoutId osr_ast_id) {
Isolate* isolate = shared->GetIsolate();
DCHECK(!shared->SearchOptimizedCodeMap(*native_context, osr_ast_id).code);
DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
DCHECK(native_context->IsNativeContext());
STATIC_ASSERT(kEntryLength == 4);
......@@ -9517,20 +9518,18 @@ void SharedFunctionInfo::AddToOptimizedCodeMap(
if (value->IsSmi()) {
// No optimized code map.
DCHECK_EQ(0, Smi::cast(*value)->value());
new_code_map = isolate->factory()->NewFixedArray(kInitialLength);
new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED);
old_length = kEntriesStart;
} else {
// Copy old map and append one new entry.
// Copy old optimized code map and append one new entry.
Handle<FixedArray> old_code_map = Handle<FixedArray>::cast(value);
DCHECK(!shared->SearchOptimizedCodeMap(*native_context, osr_ast_id).code);
new_code_map =
isolate->factory()->CopyFixedArrayAndGrow(old_code_map, kEntryLength);
new_code_map = isolate->factory()->CopyFixedArrayAndGrow(
old_code_map, kEntryLength, TENURED);
old_length = old_code_map->length();
// Zap the old map for the sake of the heap verifier.
if (Heap::ShouldZapGarbage()) {
Object** data = old_code_map->data_start();
MemsetPointer(data, isolate->heap()->the_hole_value(), old_length);
}
// Zap the old map to avoid any stale entries. Note that this is required
// for correctness because entries are being treated weakly by the GC.
MemsetPointer(old_code_map->data_start(), isolate->heap()->the_hole_value(),
old_length);
}
new_code_map->set(old_length + kContextOffset, *native_context);
new_code_map->set(old_length + kCachedCodeOffset, *code);
......
......@@ -4557,6 +4557,94 @@ TEST(Regress513507) {
#endif // DEBUG
TEST(Regress514122) {
i::FLAG_flush_optimized_code_cache = false;
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
HandleScope scope(isolate);
// Perfrom one initial GC to enable code flushing.
CcTest::heap()->CollectAllGarbage();
// Prepare function whose optimized code map we can use.
Handle<SharedFunctionInfo> shared;
{
HandleScope inner_scope(isolate);
CompileRun("function f() { return 1 }"
"f(); %OptimizeFunctionOnNextCall(f); f();");
Handle<JSFunction> f =
v8::Utils::OpenHandle(
*v8::Handle<v8::Function>::Cast(
CcTest::global()->Get(v8_str("f"))));
shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
CompileRun("f = null");
}
// Prepare optimized code that we can use.
Handle<Code> code;
{
HandleScope inner_scope(isolate);
CompileRun("function g() { return 2 }"
"g(); %OptimizeFunctionOnNextCall(g); g();");
Handle<JSFunction> g =
v8::Utils::OpenHandle(
*v8::Handle<v8::Function>::Cast(
CcTest::global()->Get(v8_str("g"))));
code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
if (!code->is_optimized_code()) return;
}
Handle<FixedArray> lit = isolate->factory()->empty_fixed_array();
Handle<Context> context(isolate->context());
// Add the code several times to the optimized code map.
for (int i = 0; i < 3; ++i) {
HandleScope inner_scope(isolate);
BailoutId id = BailoutId(i);
SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
}
shared->optimized_code_map()->Print();
// Add the code with a literals array to be evacuated.
Page* evac_page;
{
HandleScope inner_scope(isolate);
AlwaysAllocateScope always_allocate(isolate);
// Make sure literal is placed on an old-space evacuation candidate.
SimulateFullSpace(heap->old_space());
Handle<FixedArray> lit = isolate->factory()->NewFixedArray(23, TENURED);
evac_page = Page::FromAddress(lit->address());
BailoutId id = BailoutId(100);
SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
}
// Heap is ready, force {lit_page} to become an evacuation candidate and
// simulate incremental marking to enqueue optimized code map.
FLAG_manual_evacuation_candidates_selection = true;
evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
SimulateIncrementalMarking(heap);
// No matter whether reachable or not, {boomer} is doomed.
Handle<Object> boomer(shared->optimized_code_map(), isolate);
// Add the code several times to the optimized code map. This will leave old
// copies of the optimized code map unreachable but still marked.
for (int i = 3; i < 6; ++i) {
HandleScope inner_scope(isolate);
BailoutId id = BailoutId(i);
SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
}
// Trigger a GC to flush out the bug.
heap->CollectGarbage(i::OLD_SPACE, "fire in the hole");
boomer->Print();
}
class DummyVisitor : public ObjectVisitor {
public:
void VisitPointers(Object** start, Object** end) { }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment