Commit 76bee806 authored by mvstanton's avatar mvstanton Committed by Commit bot

Remove FLAG_cache_optimized_code because we always have it on.

The data structure that holds the optimized code is becoming essential for
additional functionality, too.

R=mstarzinger@chromium.org
BUG=

Review URL: https://codereview.chromium.org/1399033002

Cr-Commit-Position: refs/heads/master@{#31216}
parent c84c96ca
...@@ -811,13 +811,11 @@ static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) { ...@@ -811,13 +811,11 @@ static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
if (function->shared()->bound()) return; if (function->shared()->bound()) return;
// Cache optimized context-specific code. // Cache optimized context-specific code.
if (FLAG_cache_optimized_code) { Handle<SharedFunctionInfo> shared(function->shared());
Handle<SharedFunctionInfo> shared(function->shared()); Handle<LiteralsArray> literals(function->literals());
Handle<LiteralsArray> literals(function->literals()); Handle<Context> native_context(function->context()->native_context());
Handle<Context> native_context(function->context()->native_context()); SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code,
SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code, literals, info->osr_ast_id());
literals, info->osr_ast_id());
}
// Do not cache (native) context-independent code compiled for OSR. // Do not cache (native) context-independent code compiled for OSR.
if (code->is_turbofanned() && info->is_osr()) return; if (code->is_turbofanned() && info->is_osr()) return;
......
...@@ -1346,12 +1346,11 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo( ...@@ -1346,12 +1346,11 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
LiteralsArray::New(isolate(), handle(info->feedback_vector()), LiteralsArray::New(isolate(), handle(info->feedback_vector()),
number_of_literals, pretenure); number_of_literals, pretenure);
result->set_literals(*literals); result->set_literals(*literals);
// Cache context-specific literals. // Cache context-specific literals.
if (FLAG_cache_optimized_code) { Handle<Context> native_context(context->native_context());
Handle<Context> native_context(context->native_context()); SharedFunctionInfo::AddToOptimizedCodeMap(
SharedFunctionInfo::AddToOptimizedCodeMap( info, native_context, undefined_value(), literals, BailoutId::None());
info, native_context, undefined_value(), literals, BailoutId::None());
}
} }
return result; return result;
......
...@@ -373,7 +373,6 @@ DEFINE_INT(stress_runs, 0, "number of stress runs") ...@@ -373,7 +373,6 @@ DEFINE_INT(stress_runs, 0, "number of stress runs")
DEFINE_BOOL(lookup_sample_by_shared, true, DEFINE_BOOL(lookup_sample_by_shared, true,
"when picking a function to optimize, watch for shared function " "when picking a function to optimize, watch for shared function "
"info, not JSFunction itself") "info, not JSFunction itself")
DEFINE_BOOL(cache_optimized_code, true, "cache optimized code for closures")
DEFINE_BOOL(flush_optimized_code_cache, true, DEFINE_BOOL(flush_optimized_code_cache, true,
"flushes the cache of optimized code for closures on every GC") "flushes the cache of optimized code for closures on every GC")
DEFINE_BOOL(inline_construct, true, "inline constructor calls") DEFINE_BOOL(inline_construct, true, "inline constructor calls")
......
...@@ -366,7 +366,6 @@ TEST(FeedbackVectorUnaffectedByScopeChanges) { ...@@ -366,7 +366,6 @@ TEST(FeedbackVectorUnaffectedByScopeChanges) {
TEST(OptimizedCodeSharing1) { TEST(OptimizedCodeSharing1) {
FLAG_stress_compaction = false; FLAG_stress_compaction = false;
FLAG_allow_natives_syntax = true; FLAG_allow_natives_syntax = true;
FLAG_cache_optimized_code = true;
CcTest::InitializeVM(); CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
for (int i = 0; i < 3; i++) { for (int i = 0; i < 3; i++) {
...@@ -399,7 +398,6 @@ TEST(OptimizedCodeSharing1) { ...@@ -399,7 +398,6 @@ TEST(OptimizedCodeSharing1) {
TEST(OptimizedCodeSharing2) { TEST(OptimizedCodeSharing2) {
if (FLAG_stress_compaction) return; if (FLAG_stress_compaction) return;
FLAG_allow_natives_syntax = true; FLAG_allow_natives_syntax = true;
FLAG_cache_optimized_code = true;
FLAG_native_context_specialization = false; FLAG_native_context_specialization = false;
FLAG_turbo_cache_shared_code = true; FLAG_turbo_cache_shared_code = true;
const char* flag = "--turbo-filter=*"; const char* flag = "--turbo-filter=*";
...@@ -455,7 +453,6 @@ TEST(OptimizedCodeSharing2) { ...@@ -455,7 +453,6 @@ TEST(OptimizedCodeSharing2) {
TEST(OptimizedCodeSharing3) { TEST(OptimizedCodeSharing3) {
if (FLAG_stress_compaction) return; if (FLAG_stress_compaction) return;
FLAG_allow_natives_syntax = true; FLAG_allow_natives_syntax = true;
FLAG_cache_optimized_code = true;
FLAG_native_context_specialization = false; FLAG_native_context_specialization = false;
FLAG_turbo_cache_shared_code = true; FLAG_turbo_cache_shared_code = true;
const char* flag = "--turbo-filter=*"; const char* flag = "--turbo-filter=*";
......
...@@ -4319,115 +4319,6 @@ TEST(Regress169928) { ...@@ -4319,115 +4319,6 @@ TEST(Regress169928) {
} }
TEST(Regress168801) {
if (i::FLAG_never_compact) return;
i::FLAG_always_compact = true;
i::FLAG_cache_optimized_code = false;
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
HandleScope scope(isolate);
// Perform one initial GC to enable code flushing.
heap->CollectAllGarbage();
// Ensure the code ends up on an evacuation candidate.
SimulateFullSpace(heap->code_space());
// Prepare an unoptimized function that is eligible for code flushing.
Handle<JSFunction> function;
{
HandleScope inner_scope(isolate);
CompileRun("function mkClosure() {"
" return function(x) { return x + 1; };"
"}"
"var f = mkClosure();"
"f(1); f(2);");
Handle<JSFunction> f =
v8::Utils::OpenHandle(
*v8::Handle<v8::Function>::Cast(
CcTest::global()->Get(v8_str("f"))));
CHECK(f->is_compiled());
const int kAgingThreshold = 6;
for (int i = 0; i < kAgingThreshold; i++) {
f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
}
function = inner_scope.CloseAndEscape(handle(*f, isolate));
}
// Simulate incremental marking so that unoptimized function is enqueued as a
// candidate for code flushing. The shared function info however will not be
// explicitly enqueued.
SimulateIncrementalMarking(heap);
// Now optimize the function so that it is taken off the candidate list.
{
HandleScope inner_scope(isolate);
CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
}
// This cycle will bust the heap and subsequent cycles will go ballistic.
heap->CollectAllGarbage();
heap->CollectAllGarbage();
}
TEST(Regress173458) {
if (i::FLAG_never_compact) return;
i::FLAG_always_compact = true;
i::FLAG_cache_optimized_code = false;
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
HandleScope scope(isolate);
// Perform one initial GC to enable code flushing.
heap->CollectAllGarbage();
// Ensure the code ends up on an evacuation candidate.
SimulateFullSpace(heap->code_space());
// Prepare an unoptimized function that is eligible for code flushing.
Handle<JSFunction> function;
{
HandleScope inner_scope(isolate);
CompileRun("function mkClosure() {"
" return function(x) { return x + 1; };"
"}"
"var f = mkClosure();"
"f(1); f(2);");
Handle<JSFunction> f =
v8::Utils::OpenHandle(
*v8::Handle<v8::Function>::Cast(
CcTest::global()->Get(v8_str("f"))));
CHECK(f->is_compiled());
const int kAgingThreshold = 6;
for (int i = 0; i < kAgingThreshold; i++) {
f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
}
function = inner_scope.CloseAndEscape(handle(*f, isolate));
}
// Simulate incremental marking so that unoptimized function is enqueued as a
// candidate for code flushing. The shared function info however will not be
// explicitly enqueued.
SimulateIncrementalMarking(heap);
// Now enable the debugger which in turn will disable code flushing.
CHECK(isolate->debug()->Load());
// This cycle will bust the heap and subsequent cycles will go ballistic.
heap->CollectAllGarbage();
heap->CollectAllGarbage();
}
#ifdef DEBUG #ifdef DEBUG
TEST(Regress513507) { TEST(Regress513507) {
i::FLAG_flush_optimized_code_cache = false; i::FLAG_flush_optimized_code_cache = false;
......
...@@ -25,7 +25,7 @@ ...@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax --cache-optimized-code // Flags: --allow-natives-syntax
function bozo() {}; function bozo() {};
function MakeClosure() { function MakeClosure() {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment