Commit 8f6bca54 authored by mstarzinger's avatar mstarzinger Committed by Commit bot

Remove overzealous checking of --cache-optimized-code flag.

R=bmeurer@chromium.org

Review URL: https://codereview.chromium.org/1206803003

Cr-Commit-Position: refs/heads/master@{#29284}
parent e21f1228
......@@ -1818,14 +1818,10 @@ HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
context());
// Initialize the code pointer in the function to be the one
// found in the shared function info object.
// But first check if there is an optimized version for our context.
if (FLAG_cache_optimized_code) {
BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
} else {
BuildInstallCode(js_function, shared_info);
}
// Initialize the code pointer in the function to be the one found in the
// shared function info object. But first check if there is an optimized
// version for our context.
BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
return js_function;
}
......
......@@ -691,18 +691,16 @@ MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCodeCommon(
MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
Handle<JSFunction> function, BailoutId osr_ast_id) {
if (FLAG_cache_optimized_code) {
Handle<SharedFunctionInfo> shared(function->shared());
DisallowHeapAllocation no_gc;
CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
function->context()->native_context(), osr_ast_id);
if (cached.code != nullptr) {
// Caching of optimized code enabled and optimized code found.
if (cached.literals != nullptr) function->set_literals(cached.literals);
DCHECK(!cached.code->marked_for_deoptimization());
DCHECK(function->shared()->is_compiled());
return Handle<Code>(cached.code);
}
Handle<SharedFunctionInfo> shared(function->shared());
DisallowHeapAllocation no_gc;
CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
function->context()->native_context(), osr_ast_id);
if (cached.code != nullptr) {
// Caching of optimized code enabled and optimized code found.
if (cached.literals != nullptr) function->set_literals(cached.literals);
DCHECK(!cached.code->marked_for_deoptimization());
DCHECK(function->shared()->is_compiled());
return Handle<Code>(cached.code);
}
return MaybeHandle<Code>();
}
......
......@@ -899,7 +899,7 @@ void CodeFlusher::ProcessJSFunctionCandidates() {
PrintF(" - age: %d]\n", code->GetAge());
}
// Always flush the optimized code map if requested by flag.
if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
if (FLAG_flush_optimized_code_cache &&
!shared->optimized_code_map()->IsSmi()) {
shared->ClearOptimizedCodeMap();
}
......@@ -947,7 +947,7 @@ void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
PrintF(" - age: %d]\n", code->GetAge());
}
// Always flush the optimized code map if requested by flag.
if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
if (FLAG_flush_optimized_code_cache &&
!candidate->optimized_code_map()->IsSmi()) {
candidate->ClearOptimizedCodeMap();
}
......
......@@ -409,14 +409,14 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
if (FLAG_cleanup_code_caches_at_gc) {
shared->ClearTypeFeedbackInfoAtGCTime();
}
if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
if (FLAG_flush_optimized_code_cache &&
!shared->optimized_code_map()->IsSmi()) {
// Always flush the optimized code map if requested by flag.
shared->ClearOptimizedCodeMap();
}
MarkCompactCollector* collector = heap->mark_compact_collector();
if (collector->is_code_flushing_enabled()) {
if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
if (!shared->optimized_code_map()->IsSmi()) {
// Add the shared function info holding an optimized code map to
// the code flusher for processing of code maps after marking.
collector->code_flusher()->AddOptimizedCodeMap(shared);
......@@ -438,7 +438,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
return;
}
} else {
if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
if (!shared->optimized_code_map()->IsSmi()) {
// Flush optimized code map on major GCs without code flushing,
// needed because cached code doesn't contain breakpoints.
shared->ClearOptimizedCodeMap();
......
......@@ -10647,7 +10647,6 @@ CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap(
Context* native_context, BailoutId osr_ast_id) {
DisallowHeapAllocation no_gc;
DCHECK(native_context->IsNativeContext());
if (!FLAG_cache_optimized_code) return {nullptr, nullptr};
Object* value = optimized_code_map();
if (!value->IsSmi()) {
FixedArray* optimized_code_map = FixedArray::cast(value);
......
......@@ -371,11 +371,9 @@ TEST(FeedbackVectorUnaffectedByScopeChanges) {
// Test that optimized code for different closures is actually shared
// immediately by the FastNewClosureStub when run in the same context.
TEST(OptimizedCodeSharing) {
// Skip test if --cache-optimized-code is not activated by default because
// FastNewClosureStub that is baked into the snapshot is incorrect.
if (!FLAG_cache_optimized_code) return;
FLAG_stress_compaction = false;
FLAG_allow_natives_syntax = true;
FLAG_cache_optimized_code = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
for (int i = 0; i < 10; i++) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment