Commit 8f6bca54 authored by mstarzinger's avatar mstarzinger Committed by Commit bot

Remove overzealous checking of --cache-optimized-code flag.

R=bmeurer@chromium.org

Review URL: https://codereview.chromium.org/1206803003

Cr-Commit-Position: refs/heads/master@{#29284}
parent e21f1228
...@@ -1818,14 +1818,10 @@ HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() { ...@@ -1818,14 +1818,10 @@ HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(), Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
context()); context());
// Initialize the code pointer in the function to be the one // Initialize the code pointer in the function to be the one found in the
// found in the shared function info object. // shared function info object. But first check if there is an optimized
// But first check if there is an optimized version for our context. // version for our context.
if (FLAG_cache_optimized_code) { BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
} else {
BuildInstallCode(js_function, shared_info);
}
return js_function; return js_function;
} }
......
...@@ -691,18 +691,16 @@ MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCodeCommon( ...@@ -691,18 +691,16 @@ MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCodeCommon(
MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap( MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
Handle<JSFunction> function, BailoutId osr_ast_id) { Handle<JSFunction> function, BailoutId osr_ast_id) {
if (FLAG_cache_optimized_code) { Handle<SharedFunctionInfo> shared(function->shared());
Handle<SharedFunctionInfo> shared(function->shared()); DisallowHeapAllocation no_gc;
DisallowHeapAllocation no_gc; CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
CodeAndLiterals cached = shared->SearchOptimizedCodeMap( function->context()->native_context(), osr_ast_id);
function->context()->native_context(), osr_ast_id); if (cached.code != nullptr) {
if (cached.code != nullptr) { // Caching of optimized code enabled and optimized code found.
// Caching of optimized code enabled and optimized code found. if (cached.literals != nullptr) function->set_literals(cached.literals);
if (cached.literals != nullptr) function->set_literals(cached.literals); DCHECK(!cached.code->marked_for_deoptimization());
DCHECK(!cached.code->marked_for_deoptimization()); DCHECK(function->shared()->is_compiled());
DCHECK(function->shared()->is_compiled()); return Handle<Code>(cached.code);
return Handle<Code>(cached.code);
}
} }
return MaybeHandle<Code>(); return MaybeHandle<Code>();
} }
......
...@@ -899,7 +899,7 @@ void CodeFlusher::ProcessJSFunctionCandidates() { ...@@ -899,7 +899,7 @@ void CodeFlusher::ProcessJSFunctionCandidates() {
PrintF(" - age: %d]\n", code->GetAge()); PrintF(" - age: %d]\n", code->GetAge());
} }
// Always flush the optimized code map if requested by flag. // Always flush the optimized code map if requested by flag.
if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache && if (FLAG_flush_optimized_code_cache &&
!shared->optimized_code_map()->IsSmi()) { !shared->optimized_code_map()->IsSmi()) {
shared->ClearOptimizedCodeMap(); shared->ClearOptimizedCodeMap();
} }
...@@ -947,7 +947,7 @@ void CodeFlusher::ProcessSharedFunctionInfoCandidates() { ...@@ -947,7 +947,7 @@ void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
PrintF(" - age: %d]\n", code->GetAge()); PrintF(" - age: %d]\n", code->GetAge());
} }
// Always flush the optimized code map if requested by flag. // Always flush the optimized code map if requested by flag.
if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache && if (FLAG_flush_optimized_code_cache &&
!candidate->optimized_code_map()->IsSmi()) { !candidate->optimized_code_map()->IsSmi()) {
candidate->ClearOptimizedCodeMap(); candidate->ClearOptimizedCodeMap();
} }
......
...@@ -409,14 +409,14 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo( ...@@ -409,14 +409,14 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
if (FLAG_cleanup_code_caches_at_gc) { if (FLAG_cleanup_code_caches_at_gc) {
shared->ClearTypeFeedbackInfoAtGCTime(); shared->ClearTypeFeedbackInfoAtGCTime();
} }
if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache && if (FLAG_flush_optimized_code_cache &&
!shared->optimized_code_map()->IsSmi()) { !shared->optimized_code_map()->IsSmi()) {
// Always flush the optimized code map if requested by flag. // Always flush the optimized code map if requested by flag.
shared->ClearOptimizedCodeMap(); shared->ClearOptimizedCodeMap();
} }
MarkCompactCollector* collector = heap->mark_compact_collector(); MarkCompactCollector* collector = heap->mark_compact_collector();
if (collector->is_code_flushing_enabled()) { if (collector->is_code_flushing_enabled()) {
if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) { if (!shared->optimized_code_map()->IsSmi()) {
// Add the shared function info holding an optimized code map to // Add the shared function info holding an optimized code map to
// the code flusher for processing of code maps after marking. // the code flusher for processing of code maps after marking.
collector->code_flusher()->AddOptimizedCodeMap(shared); collector->code_flusher()->AddOptimizedCodeMap(shared);
...@@ -438,7 +438,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo( ...@@ -438,7 +438,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
return; return;
} }
} else { } else {
if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) { if (!shared->optimized_code_map()->IsSmi()) {
// Flush optimized code map on major GCs without code flushing, // Flush optimized code map on major GCs without code flushing,
// needed because cached code doesn't contain breakpoints. // needed because cached code doesn't contain breakpoints.
shared->ClearOptimizedCodeMap(); shared->ClearOptimizedCodeMap();
......
...@@ -10647,7 +10647,6 @@ CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap( ...@@ -10647,7 +10647,6 @@ CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap(
Context* native_context, BailoutId osr_ast_id) { Context* native_context, BailoutId osr_ast_id) {
DisallowHeapAllocation no_gc; DisallowHeapAllocation no_gc;
DCHECK(native_context->IsNativeContext()); DCHECK(native_context->IsNativeContext());
if (!FLAG_cache_optimized_code) return {nullptr, nullptr};
Object* value = optimized_code_map(); Object* value = optimized_code_map();
if (!value->IsSmi()) { if (!value->IsSmi()) {
FixedArray* optimized_code_map = FixedArray::cast(value); FixedArray* optimized_code_map = FixedArray::cast(value);
......
...@@ -371,11 +371,9 @@ TEST(FeedbackVectorUnaffectedByScopeChanges) { ...@@ -371,11 +371,9 @@ TEST(FeedbackVectorUnaffectedByScopeChanges) {
// Test that optimized code for different closures is actually shared // Test that optimized code for different closures is actually shared
// immediately by the FastNewClosureStub when run in the same context. // immediately by the FastNewClosureStub when run in the same context.
TEST(OptimizedCodeSharing) { TEST(OptimizedCodeSharing) {
// Skip test if --cache-optimized-code is not activated by default because
// FastNewClosureStub that is baked into the snapshot is incorrect.
if (!FLAG_cache_optimized_code) return;
FLAG_stress_compaction = false; FLAG_stress_compaction = false;
FLAG_allow_natives_syntax = true; FLAG_allow_natives_syntax = true;
FLAG_cache_optimized_code = true;
CcTest::InitializeVM(); CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
for (int i = 0; i < 10; i++) { for (int i = 0; i < 10; i++) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment