Commit 76bee806 authored by mvstanton's avatar mvstanton Committed by Commit bot

Remove FLAG_cache_optimized_code because we always have it on.

The data structure that holds the optimized code is becoming essential for
additional functionality, too.

R=mstarzinger@chromium.org
BUG=

Review URL: https://codereview.chromium.org/1399033002

Cr-Commit-Position: refs/heads/master@{#31216}
parent c84c96ca
......@@ -811,13 +811,11 @@ static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
if (function->shared()->bound()) return;
// Cache optimized context-specific code.
if (FLAG_cache_optimized_code) {
Handle<SharedFunctionInfo> shared(function->shared());
Handle<LiteralsArray> literals(function->literals());
Handle<Context> native_context(function->context()->native_context());
SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code,
literals, info->osr_ast_id());
}
Handle<SharedFunctionInfo> shared(function->shared());
Handle<LiteralsArray> literals(function->literals());
Handle<Context> native_context(function->context()->native_context());
SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code,
literals, info->osr_ast_id());
// Do not cache (native) context-independent code compiled for OSR.
if (code->is_turbofanned() && info->is_osr()) return;
......
......@@ -1346,12 +1346,11 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
LiteralsArray::New(isolate(), handle(info->feedback_vector()),
number_of_literals, pretenure);
result->set_literals(*literals);
// Cache context-specific literals.
if (FLAG_cache_optimized_code) {
Handle<Context> native_context(context->native_context());
SharedFunctionInfo::AddToOptimizedCodeMap(
info, native_context, undefined_value(), literals, BailoutId::None());
}
Handle<Context> native_context(context->native_context());
SharedFunctionInfo::AddToOptimizedCodeMap(
info, native_context, undefined_value(), literals, BailoutId::None());
}
return result;
......
......@@ -373,7 +373,6 @@ DEFINE_INT(stress_runs, 0, "number of stress runs")
DEFINE_BOOL(lookup_sample_by_shared, true,
"when picking a function to optimize, watch for shared function "
"info, not JSFunction itself")
DEFINE_BOOL(cache_optimized_code, true, "cache optimized code for closures")
DEFINE_BOOL(flush_optimized_code_cache, true,
"flushes the cache of optimized code for closures on every GC")
DEFINE_BOOL(inline_construct, true, "inline constructor calls")
......
......@@ -366,7 +366,6 @@ TEST(FeedbackVectorUnaffectedByScopeChanges) {
TEST(OptimizedCodeSharing1) {
FLAG_stress_compaction = false;
FLAG_allow_natives_syntax = true;
FLAG_cache_optimized_code = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
for (int i = 0; i < 3; i++) {
......@@ -399,7 +398,6 @@ TEST(OptimizedCodeSharing1) {
TEST(OptimizedCodeSharing2) {
if (FLAG_stress_compaction) return;
FLAG_allow_natives_syntax = true;
FLAG_cache_optimized_code = true;
FLAG_native_context_specialization = false;
FLAG_turbo_cache_shared_code = true;
const char* flag = "--turbo-filter=*";
......@@ -455,7 +453,6 @@ TEST(OptimizedCodeSharing2) {
TEST(OptimizedCodeSharing3) {
if (FLAG_stress_compaction) return;
FLAG_allow_natives_syntax = true;
FLAG_cache_optimized_code = true;
FLAG_native_context_specialization = false;
FLAG_turbo_cache_shared_code = true;
const char* flag = "--turbo-filter=*";
......
......@@ -4319,115 +4319,6 @@ TEST(Regress169928) {
}
TEST(Regress168801) {
if (i::FLAG_never_compact) return;
i::FLAG_always_compact = true;
i::FLAG_cache_optimized_code = false;
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
HandleScope scope(isolate);
// Perform one initial GC to enable code flushing.
heap->CollectAllGarbage();
// Ensure the code ends up on an evacuation candidate.
SimulateFullSpace(heap->code_space());
// Prepare an unoptimized function that is eligible for code flushing.
Handle<JSFunction> function;
{
HandleScope inner_scope(isolate);
CompileRun("function mkClosure() {"
" return function(x) { return x + 1; };"
"}"
"var f = mkClosure();"
"f(1); f(2);");
Handle<JSFunction> f =
v8::Utils::OpenHandle(
*v8::Handle<v8::Function>::Cast(
CcTest::global()->Get(v8_str("f"))));
CHECK(f->is_compiled());
const int kAgingThreshold = 6;
for (int i = 0; i < kAgingThreshold; i++) {
f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
}
function = inner_scope.CloseAndEscape(handle(*f, isolate));
}
// Simulate incremental marking so that unoptimized function is enqueued as a
// candidate for code flushing. The shared function info however will not be
// explicitly enqueued.
SimulateIncrementalMarking(heap);
// Now optimize the function so that it is taken off the candidate list.
{
HandleScope inner_scope(isolate);
CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
}
// This cycle will bust the heap and subsequent cycles will go ballistic.
heap->CollectAllGarbage();
heap->CollectAllGarbage();
}
TEST(Regress173458) {
if (i::FLAG_never_compact) return;
i::FLAG_always_compact = true;
i::FLAG_cache_optimized_code = false;
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
HandleScope scope(isolate);
// Perform one initial GC to enable code flushing.
heap->CollectAllGarbage();
// Ensure the code ends up on an evacuation candidate.
SimulateFullSpace(heap->code_space());
// Prepare an unoptimized function that is eligible for code flushing.
Handle<JSFunction> function;
{
HandleScope inner_scope(isolate);
CompileRun("function mkClosure() {"
" return function(x) { return x + 1; };"
"}"
"var f = mkClosure();"
"f(1); f(2);");
Handle<JSFunction> f =
v8::Utils::OpenHandle(
*v8::Handle<v8::Function>::Cast(
CcTest::global()->Get(v8_str("f"))));
CHECK(f->is_compiled());
const int kAgingThreshold = 6;
for (int i = 0; i < kAgingThreshold; i++) {
f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
}
function = inner_scope.CloseAndEscape(handle(*f, isolate));
}
// Simulate incremental marking so that unoptimized function is enqueued as a
// candidate for code flushing. The shared function info however will not be
// explicitly enqueued.
SimulateIncrementalMarking(heap);
// Now enable the debugger which in turn will disable code flushing.
CHECK(isolate->debug()->Load());
// This cycle will bust the heap and subsequent cycles will go ballistic.
heap->CollectAllGarbage();
heap->CollectAllGarbage();
}
#ifdef DEBUG
TEST(Regress513507) {
i::FLAG_flush_optimized_code_cache = false;
......
......@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax --cache-optimized-code
// Flags: --allow-natives-syntax
function bozo() {};
function MakeClosure() {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment