Commit a457040c authored by vegorov@chromium.org's avatar vegorov@chromium.org

Ensure that non-optimized code objects are not flushed for inlined functions.

Collector was flushing them if optimized code was reachable only through the stack (not through the JSFunction object) which happens when you have a pending lazy deoptimization.

Also prevent v8::Script::New from leaking internal objects allocated by the compiler into outer HandleScope.

R=kmillikin@chromium.org
BUG=http://crbug.com/97116
TEST=test/mjsunit/regress/regress-97116.js

Review URL: http://codereview.chromium.org/8888011

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@10215 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent f206e15c
...@@ -1462,31 +1462,35 @@ Local<Script> Script::New(v8::Handle<String> source, ...@@ -1462,31 +1462,35 @@ Local<Script> Script::New(v8::Handle<String> source,
ON_BAILOUT(isolate, "v8::Script::New()", return Local<Script>()); ON_BAILOUT(isolate, "v8::Script::New()", return Local<Script>());
LOG_API(isolate, "Script::New"); LOG_API(isolate, "Script::New");
ENTER_V8(isolate); ENTER_V8(isolate);
i::Handle<i::String> str = Utils::OpenHandle(*source); i::SharedFunctionInfo* raw_result = NULL;
i::Handle<i::Object> name_obj; { i::HandleScope scope(isolate);
int line_offset = 0; i::Handle<i::String> str = Utils::OpenHandle(*source);
int column_offset = 0; i::Handle<i::Object> name_obj;
if (origin != NULL) { int line_offset = 0;
if (!origin->ResourceName().IsEmpty()) { int column_offset = 0;
name_obj = Utils::OpenHandle(*origin->ResourceName()); if (origin != NULL) {
} if (!origin->ResourceName().IsEmpty()) {
if (!origin->ResourceLineOffset().IsEmpty()) { name_obj = Utils::OpenHandle(*origin->ResourceName());
line_offset = static_cast<int>(origin->ResourceLineOffset()->Value()); }
if (!origin->ResourceLineOffset().IsEmpty()) {
line_offset = static_cast<int>(origin->ResourceLineOffset()->Value());
}
if (!origin->ResourceColumnOffset().IsEmpty()) {
column_offset =
static_cast<int>(origin->ResourceColumnOffset()->Value());
}
} }
if (!origin->ResourceColumnOffset().IsEmpty()) { EXCEPTION_PREAMBLE(isolate);
column_offset = static_cast<int>(origin->ResourceColumnOffset()->Value()); i::ScriptDataImpl* pre_data_impl =
static_cast<i::ScriptDataImpl*>(pre_data);
// We assert that the pre-data is sane, even though we can actually
// handle it if it turns out not to be in release mode.
ASSERT(pre_data_impl == NULL || pre_data_impl->SanityCheck());
// If the pre-data isn't sane we simply ignore it
if (pre_data_impl != NULL && !pre_data_impl->SanityCheck()) {
pre_data_impl = NULL;
} }
} i::Handle<i::SharedFunctionInfo> result =
EXCEPTION_PREAMBLE(isolate);
i::ScriptDataImpl* pre_data_impl = static_cast<i::ScriptDataImpl*>(pre_data);
// We assert that the pre-data is sane, even though we can actually
// handle it if it turns out not to be in release mode.
ASSERT(pre_data_impl == NULL || pre_data_impl->SanityCheck());
// If the pre-data isn't sane we simply ignore it
if (pre_data_impl != NULL && !pre_data_impl->SanityCheck()) {
pre_data_impl = NULL;
}
i::Handle<i::SharedFunctionInfo> result =
i::Compiler::Compile(str, i::Compiler::Compile(str,
name_obj, name_obj,
line_offset, line_offset,
...@@ -1495,8 +1499,11 @@ Local<Script> Script::New(v8::Handle<String> source, ...@@ -1495,8 +1499,11 @@ Local<Script> Script::New(v8::Handle<String> source,
pre_data_impl, pre_data_impl,
Utils::OpenHandle(*script_data), Utils::OpenHandle(*script_data),
i::NOT_NATIVES_CODE); i::NOT_NATIVES_CODE);
has_pending_exception = result.is_null(); has_pending_exception = result.is_null();
EXCEPTION_BAILOUT_CHECK(isolate, Local<Script>()); EXCEPTION_BAILOUT_CHECK(isolate, Local<Script>());
raw_result = *result;
}
i::Handle<i::SharedFunctionInfo> result(raw_result, isolate);
return Local<Script>(ToApi<Script>(result)); return Local<Script>(ToApi<Script>(result));
} }
......
...@@ -619,8 +619,7 @@ class CodeFlusher { ...@@ -619,8 +619,7 @@ class CodeFlusher {
} }
void AddCandidate(JSFunction* function) { void AddCandidate(JSFunction* function) {
ASSERT(function->unchecked_code() == ASSERT(function->code() == function->shared()->code());
function->unchecked_shared()->unchecked_code());
SetNextCandidate(function, jsfunction_candidates_head_); SetNextCandidate(function, jsfunction_candidates_head_);
jsfunction_candidates_head_ = function; jsfunction_candidates_head_ = function;
...@@ -640,15 +639,15 @@ class CodeFlusher { ...@@ -640,15 +639,15 @@ class CodeFlusher {
while (candidate != NULL) { while (candidate != NULL) {
next_candidate = GetNextCandidate(candidate); next_candidate = GetNextCandidate(candidate);
SharedFunctionInfo* shared = candidate->unchecked_shared(); SharedFunctionInfo* shared = candidate->shared();
Code* code = shared->unchecked_code(); Code* code = shared->code();
MarkBit code_mark = Marking::MarkBitFrom(code); MarkBit code_mark = Marking::MarkBitFrom(code);
if (!code_mark.Get()) { if (!code_mark.Get()) {
shared->set_code(lazy_compile); shared->set_code(lazy_compile);
candidate->set_code(lazy_compile); candidate->set_code(lazy_compile);
} else { } else {
candidate->set_code(shared->unchecked_code()); candidate->set_code(shared->code());
} }
// We are in the middle of a GC cycle so the write barrier in the code // We are in the middle of a GC cycle so the write barrier in the code
...@@ -674,7 +673,7 @@ class CodeFlusher { ...@@ -674,7 +673,7 @@ class CodeFlusher {
next_candidate = GetNextCandidate(candidate); next_candidate = GetNextCandidate(candidate);
SetNextCandidate(candidate, NULL); SetNextCandidate(candidate, NULL);
Code* code = candidate->unchecked_code(); Code* code = candidate->code();
MarkBit code_mark = Marking::MarkBitFrom(code); MarkBit code_mark = Marking::MarkBitFrom(code);
if (!code_mark.Get()) { if (!code_mark.Get()) {
candidate->set_code(lazy_compile); candidate->set_code(lazy_compile);
...@@ -702,7 +701,7 @@ class CodeFlusher { ...@@ -702,7 +701,7 @@ class CodeFlusher {
static SharedFunctionInfo** GetNextCandidateField( static SharedFunctionInfo** GetNextCandidateField(
SharedFunctionInfo* candidate) { SharedFunctionInfo* candidate) {
Code* code = candidate->unchecked_code(); Code* code = candidate->code();
return reinterpret_cast<SharedFunctionInfo**>( return reinterpret_cast<SharedFunctionInfo**>(
code->address() + Code::kNextCodeFlushingCandidateOffset); code->address() + Code::kNextCodeFlushingCandidateOffset);
} }
...@@ -1037,12 +1036,12 @@ class StaticMarkingVisitor : public StaticVisitorBase { ...@@ -1037,12 +1036,12 @@ class StaticMarkingVisitor : public StaticVisitorBase {
inline static bool IsCompiled(JSFunction* function) { inline static bool IsCompiled(JSFunction* function) {
return function->unchecked_code() != return function->code() !=
function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile); function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
} }
inline static bool IsCompiled(SharedFunctionInfo* function) { inline static bool IsCompiled(SharedFunctionInfo* function) {
return function->unchecked_code() != return function->code() !=
function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile); function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
} }
...@@ -1051,8 +1050,7 @@ class StaticMarkingVisitor : public StaticVisitorBase { ...@@ -1051,8 +1050,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
// Code is either on stack, in compilation cache or referenced // Code is either on stack, in compilation cache or referenced
// by optimized version of function. // by optimized version of function.
MarkBit code_mark = MarkBit code_mark = Marking::MarkBitFrom(function->code());
Marking::MarkBitFrom(function->unchecked_code());
if (code_mark.Get()) { if (code_mark.Get()) {
if (!Marking::MarkBitFrom(shared_info).Get()) { if (!Marking::MarkBitFrom(shared_info).Get()) {
shared_info->set_code_age(0); shared_info->set_code_age(0);
...@@ -1061,7 +1059,7 @@ class StaticMarkingVisitor : public StaticVisitorBase { ...@@ -1061,7 +1059,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
} }
// We do not flush code for optimized functions. // We do not flush code for optimized functions.
if (function->code() != shared_info->unchecked_code()) { if (function->code() != shared_info->code()) {
return false; return false;
} }
...@@ -1072,7 +1070,7 @@ class StaticMarkingVisitor : public StaticVisitorBase { ...@@ -1072,7 +1070,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
// Code is either on stack, in compilation cache or referenced // Code is either on stack, in compilation cache or referenced
// by optimized version of function. // by optimized version of function.
MarkBit code_mark = MarkBit code_mark =
Marking::MarkBitFrom(shared_info->unchecked_code()); Marking::MarkBitFrom(shared_info->code());
if (code_mark.Get()) { if (code_mark.Get()) {
return false; return false;
} }
...@@ -1085,16 +1083,24 @@ class StaticMarkingVisitor : public StaticVisitorBase { ...@@ -1085,16 +1083,24 @@ class StaticMarkingVisitor : public StaticVisitorBase {
// We never flush code for Api functions. // We never flush code for Api functions.
Object* function_data = shared_info->function_data(); Object* function_data = shared_info->function_data();
if (function_data->IsFunctionTemplateInfo()) return false; if (function_data->IsFunctionTemplateInfo()) {
return false;
}
// Only flush code for functions. // Only flush code for functions.
if (shared_info->code()->kind() != Code::FUNCTION) return false; if (shared_info->code()->kind() != Code::FUNCTION) {
return false;
}
// Function must be lazy compilable. // Function must be lazy compilable.
if (!shared_info->allows_lazy_compilation()) return false; if (!shared_info->allows_lazy_compilation()) {
return false;
}
// If this is a full script wrapped in a function we do no flush the code. // If this is a full script wrapped in a function we do no flush the code.
if (shared_info->is_toplevel()) return false; if (shared_info->is_toplevel()) {
return false;
}
// Age this shared function info. // Age this shared function info.
if (shared_info->code_age() < kCodeAgeThreshold) { if (shared_info->code_age() < kCodeAgeThreshold) {
...@@ -1267,30 +1273,12 @@ class StaticMarkingVisitor : public StaticVisitorBase { ...@@ -1267,30 +1273,12 @@ class StaticMarkingVisitor : public StaticVisitorBase {
} }
if (!flush_code_candidate) { if (!flush_code_candidate) {
Code* code = jsfunction->unchecked_shared()->unchecked_code(); Code* code = jsfunction->shared()->code();
MarkBit code_mark = Marking::MarkBitFrom(code); MarkBit code_mark = Marking::MarkBitFrom(code);
heap->mark_compact_collector()->MarkObject(code, code_mark); collector->MarkObject(code, code_mark);
if (jsfunction->unchecked_code()->kind() == Code::OPTIMIZED_FUNCTION) { if (jsfunction->code()->kind() == Code::OPTIMIZED_FUNCTION) {
// For optimized functions we should retain both non-optimized version collector->MarkInlinedFunctionsCode(jsfunction->code());
// of it's code and non-optimized version of all inlined functions.
// This is required to support bailing out from inlined code.
DeoptimizationInputData* data =
reinterpret_cast<DeoptimizationInputData*>(
jsfunction->unchecked_code()->unchecked_deoptimization_data());
FixedArray* literals = data->UncheckedLiteralArray();
for (int i = 0, count = data->InlinedFunctionCount()->value();
i < count;
i++) {
JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i));
Code* inlined_code = inlined->unchecked_shared()->unchecked_code();
MarkBit inlined_code_mark =
Marking::MarkBitFrom(inlined_code);
heap->mark_compact_collector()->MarkObject(
inlined_code, inlined_code_mark);
}
} }
} }
...@@ -1415,11 +1403,7 @@ class CodeMarkingVisitor : public ThreadVisitor { ...@@ -1415,11 +1403,7 @@ class CodeMarkingVisitor : public ThreadVisitor {
: collector_(collector) {} : collector_(collector) {}
void VisitThread(Isolate* isolate, ThreadLocalTop* top) { void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) { collector_->PrepareThreadForCodeFlushing(isolate, top);
Code* code = it.frame()->unchecked_code();
MarkBit code_bit = Marking::MarkBitFrom(code);
collector_->MarkObject(it.frame()->unchecked_code(), code_bit);
}
} }
private: private:
...@@ -1441,8 +1425,8 @@ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor { ...@@ -1441,8 +1425,8 @@ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
if (obj->IsSharedFunctionInfo()) { if (obj->IsSharedFunctionInfo()) {
SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj); SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj);
MarkBit shared_mark = Marking::MarkBitFrom(shared); MarkBit shared_mark = Marking::MarkBitFrom(shared);
MarkBit code_mark = Marking::MarkBitFrom(shared->unchecked_code()); MarkBit code_mark = Marking::MarkBitFrom(shared->code());
collector_->MarkObject(shared->unchecked_code(), code_mark); collector_->MarkObject(shared->code(), code_mark);
collector_->MarkObject(shared, shared_mark); collector_->MarkObject(shared, shared_mark);
} }
} }
...@@ -1452,6 +1436,44 @@ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor { ...@@ -1452,6 +1436,44 @@ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
}; };
void MarkCompactCollector::MarkInlinedFunctionsCode(Code* code) {
// For optimized functions we should retain both non-optimized version
// of it's code and non-optimized version of all inlined functions.
// This is required to support bailing out from inlined code.
DeoptimizationInputData* data =
DeoptimizationInputData::cast(code->deoptimization_data());
FixedArray* literals = data->LiteralArray();
for (int i = 0, count = data->InlinedFunctionCount()->value();
i < count;
i++) {
JSFunction* inlined = JSFunction::cast(literals->get(i));
Code* inlined_code = inlined->shared()->code();
MarkBit inlined_code_mark = Marking::MarkBitFrom(inlined_code);
MarkObject(inlined_code, inlined_code_mark);
}
}
void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
ThreadLocalTop* top) {
for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
// Note: for the frame that has a pending lazy deoptimization
// StackFrame::unchecked_code will return a non-optimized code object for
// the outermost function and StackFrame::LookupCode will return
// actual optimized code object.
StackFrame* frame = it.frame();
Code* code = frame->unchecked_code();
MarkBit code_mark = Marking::MarkBitFrom(code);
MarkObject(code, code_mark);
if (frame->is_optimized()) {
MarkInlinedFunctionsCode(frame->LookupCode());
}
}
}
void MarkCompactCollector::PrepareForCodeFlushing() { void MarkCompactCollector::PrepareForCodeFlushing() {
ASSERT(heap() == Isolate::Current()->heap()); ASSERT(heap() == Isolate::Current()->heap());
...@@ -1479,11 +1501,8 @@ void MarkCompactCollector::PrepareForCodeFlushing() { ...@@ -1479,11 +1501,8 @@ void MarkCompactCollector::PrepareForCodeFlushing() {
// Make sure we are not referencing the code from the stack. // Make sure we are not referencing the code from the stack.
ASSERT(this == heap()->mark_compact_collector()); ASSERT(this == heap()->mark_compact_collector());
for (StackFrameIterator it; !it.done(); it.Advance()) { PrepareThreadForCodeFlushing(heap()->isolate(),
Code* code = it.frame()->unchecked_code(); heap()->isolate()->thread_local_top());
MarkBit code_mark = Marking::MarkBitFrom(code);
MarkObject(code, code_mark);
}
// Iterate the archived stacks in all threads to check if // Iterate the archived stacks in all threads to check if
// the code is referenced. // the code is referenced.
......
...@@ -383,6 +383,10 @@ class SlotsBuffer { ...@@ -383,6 +383,10 @@ class SlotsBuffer {
}; };
// Defined in isolate.h.
class ThreadLocalTop;
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
// Mark-Compact collector // Mark-Compact collector
class MarkCompactCollector { class MarkCompactCollector {
...@@ -603,6 +607,14 @@ class MarkCompactCollector { ...@@ -603,6 +607,14 @@ class MarkCompactCollector {
friend class CodeMarkingVisitor; friend class CodeMarkingVisitor;
friend class SharedFunctionInfoMarkingVisitor; friend class SharedFunctionInfoMarkingVisitor;
// Mark non-optimize code for functions inlined into the given optimized
// code. This will prevent it from being flushed.
void MarkInlinedFunctionsCode(Code* code);
// Mark code objects that are active on the stack to prevent them
// from being flushed.
void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top);
void PrepareForCodeFlushing(); void PrepareForCodeFlushing();
// Marking operations for objects reachable from roots. // Marking operations for objects reachable from roots.
......
...@@ -3748,11 +3748,6 @@ class DeoptimizationInputData: public FixedArray { ...@@ -3748,11 +3748,6 @@ class DeoptimizationInputData: public FixedArray {
DEFINE_ELEMENT_ACCESSORS(OsrAstId, Smi) DEFINE_ELEMENT_ACCESSORS(OsrAstId, Smi)
DEFINE_ELEMENT_ACCESSORS(OsrPcOffset, Smi) DEFINE_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
// Unchecked accessor to be used during GC.
FixedArray* UncheckedLiteralArray() {
return reinterpret_cast<FixedArray*>(get(kLiteralArrayIndex));
}
#undef DEFINE_ELEMENT_ACCESSORS #undef DEFINE_ELEMENT_ACCESSORS
// Accessors for elements of the ith deoptimization entry. // Accessors for elements of the ith deoptimization entry.
......
...@@ -72,7 +72,7 @@ class ThreadState { ...@@ -72,7 +72,7 @@ class ThreadState {
}; };
// Defined in top.h // Defined in isolate.h.
class ThreadLocalTop; class ThreadLocalTop;
......
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-gc --allow-natives-syntax
// Check that we are not flushing code for inlined functions that
// have a pending lazy deoptimization on the stack.
function deopt() {
try { } catch (e) { } // Avoid inlining.
%DeoptimizeFunction(outer);
for (var i = 0; i < 10; i++) gc(); // Force code flushing.
}
function outer(should_deopt) {
inner(should_deopt);
}
function inner(should_deopt) {
if (should_deopt) deopt();
}
outer(false);
outer(false);
%OptimizeFunctionOnNextCall(outer);
outer(true);
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment