Commit 685210ec authored by mstarzinger's avatar mstarzinger Committed by Commit bot

[interpreter] Switch profiler to use frames for OSR.

This switches the interface of the runtime profiler to use frames as
opposed to functions for performing on-stack replacement. Requests for
such replacements need to target a specific frame. This will enable us
to activate bytecode as well as baseline code for the same function.

The existing %OptimizeOsr runtime function also had to adapted and now
takes an optional stack depth to target a specific stack frame.

R=bmeurer@chromium.org
BUG=v8:4764

Review-Url: https://codereview.chromium.org/2230783004
Cr-Commit-Position: refs/heads/master@{#38548}
parent 09e7c01b
...@@ -127,8 +127,9 @@ void RuntimeProfiler::Baseline(JSFunction* function, const char* reason) { ...@@ -127,8 +127,9 @@ void RuntimeProfiler::Baseline(JSFunction* function, const char* reason) {
function->MarkForBaseline(); function->MarkForBaseline();
} }
void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function, void RuntimeProfiler::AttemptOnStackReplacement(JavaScriptFrame* frame,
int loop_nesting_levels) { int loop_nesting_levels) {
JSFunction* function = frame->function();
SharedFunctionInfo* shared = function->shared(); SharedFunctionInfo* shared = function->shared();
if (!FLAG_use_osr || function->shared()->IsBuiltin()) { if (!FLAG_use_osr || function->shared()->IsBuiltin()) {
return; return;
...@@ -153,13 +154,15 @@ void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function, ...@@ -153,13 +154,15 @@ void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function,
PrintF("]\n"); PrintF("]\n");
} }
if (shared->code()->kind() == Code::FUNCTION) { if (frame->type() == StackFrame::JAVA_SCRIPT) {
DCHECK(shared->HasBaselineCode());
DCHECK(BackEdgeTable::Verify(shared->GetIsolate(), shared->code())); DCHECK(BackEdgeTable::Verify(shared->GetIsolate(), shared->code()));
for (int i = 0; i < loop_nesting_levels; i++) { for (int i = 0; i < loop_nesting_levels; i++) {
BackEdgeTable::Patch(isolate_, shared->code()); BackEdgeTable::Patch(isolate_, shared->code());
} }
} else if (shared->HasBytecodeArray()) { } else if (frame->type() == StackFrame::INTERPRETED) {
DCHECK(FLAG_ignition_osr); // Should only happen when enabled. DCHECK(shared->HasBytecodeArray());
if (!FLAG_ignition_osr) return; // Only use this when enabled.
int level = shared->bytecode_array()->osr_loop_nesting_level(); int level = shared->bytecode_array()->osr_loop_nesting_level();
shared->bytecode_array()->set_osr_loop_nesting_level( shared->bytecode_array()->set_osr_loop_nesting_level(
Min(level + loop_nesting_levels, AbstractCode::kMaxLoopNestingMarker)); Min(level + loop_nesting_levels, AbstractCode::kMaxLoopNestingMarker));
...@@ -169,17 +172,17 @@ void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function, ...@@ -169,17 +172,17 @@ void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function,
} }
void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function, void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function,
int frame_count, JavaScriptFrame* frame,
bool frame_optimized) { int frame_count) {
SharedFunctionInfo* shared = function->shared(); SharedFunctionInfo* shared = function->shared();
Code* shared_code = shared->code(); Code* shared_code = shared->code();
if (shared_code->kind() != Code::FUNCTION) return; if (shared_code->kind() != Code::FUNCTION) return;
if (function->IsInOptimizationQueue()) return; if (function->IsInOptimizationQueue()) return;
if (FLAG_always_osr) { if (FLAG_always_osr) {
AttemptOnStackReplacement(function, AbstractCode::kMaxLoopNestingMarker); AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
// Fall through and do a normal optimized compile as well. // Fall through and do a normal optimized compile as well.
} else if (!frame_optimized && } else if (!frame->is_optimized() &&
(function->IsMarkedForOptimization() || (function->IsMarkedForOptimization() ||
function->IsMarkedForConcurrentOptimization() || function->IsMarkedForConcurrentOptimization() ||
function->IsOptimized())) { function->IsOptimized())) {
...@@ -193,7 +196,7 @@ void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function, ...@@ -193,7 +196,7 @@ void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function,
ticks < Code::ProfilerTicksField::kMax) { ticks < Code::ProfilerTicksField::kMax) {
shared_code->set_profiler_ticks(ticks + 1); shared_code->set_profiler_ticks(ticks + 1);
} else { } else {
AttemptOnStackReplacement(function); AttemptOnStackReplacement(frame);
} }
return; return;
} }
...@@ -265,7 +268,7 @@ void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function, ...@@ -265,7 +268,7 @@ void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function,
} }
void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function, void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function,
bool frame_optimized) { JavaScriptFrame* frame) {
if (function->IsInOptimizationQueue()) return; if (function->IsInOptimizationQueue()) return;
SharedFunctionInfo* shared = function->shared(); SharedFunctionInfo* shared = function->shared();
...@@ -274,10 +277,10 @@ void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function, ...@@ -274,10 +277,10 @@ void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function,
// TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
// than kMaxToplevelSourceSize. // than kMaxToplevelSourceSize.
if (FLAG_ignition_osr && FLAG_always_osr) { if (FLAG_always_osr) {
AttemptOnStackReplacement(function, AbstractCode::kMaxLoopNestingMarker); AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
// Fall through and do a normal baseline compile as well. // Fall through and do a normal baseline compile as well.
} else if (!frame_optimized && } else if (!frame->is_optimized() &&
(function->IsMarkedForBaseline() || (function->IsMarkedForBaseline() ||
function->IsMarkedForOptimization() || function->IsMarkedForOptimization() ||
function->IsMarkedForConcurrentOptimization() || function->IsMarkedForConcurrentOptimization() ||
...@@ -287,9 +290,8 @@ void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function, ...@@ -287,9 +290,8 @@ void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function,
int64_t allowance = int64_t allowance =
kOSRCodeSizeAllowanceBaseIgnition + kOSRCodeSizeAllowanceBaseIgnition +
static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition; static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition;
if (FLAG_ignition_osr && shared->HasBytecodeArray() && if (shared->bytecode_array()->Size() <= allowance) {
shared->bytecode_array()->Size() <= allowance) { AttemptOnStackReplacement(frame);
AttemptOnStackReplacement(function);
} }
return; return;
} }
...@@ -307,7 +309,7 @@ void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function, ...@@ -307,7 +309,7 @@ void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function,
} }
void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function, void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function,
bool frame_optimized) { JavaScriptFrame* frame) {
if (function->IsInOptimizationQueue()) return; if (function->IsInOptimizationQueue()) return;
SharedFunctionInfo* shared = function->shared(); SharedFunctionInfo* shared = function->shared();
...@@ -316,10 +318,10 @@ void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function, ...@@ -316,10 +318,10 @@ void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function,
// TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
// than kMaxToplevelSourceSize. // than kMaxToplevelSourceSize.
if (FLAG_ignition_osr && FLAG_always_osr) { if (FLAG_always_osr) {
AttemptOnStackReplacement(function, AbstractCode::kMaxLoopNestingMarker); AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
// Fall through and do a normal optimized compile as well. // Fall through and do a normal optimized compile as well.
} else if (!frame_optimized && } else if (!frame->is_optimized() &&
(function->IsMarkedForBaseline() || (function->IsMarkedForBaseline() ||
function->IsMarkedForOptimization() || function->IsMarkedForOptimization() ||
function->IsMarkedForConcurrentOptimization() || function->IsMarkedForConcurrentOptimization() ||
...@@ -329,9 +331,8 @@ void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function, ...@@ -329,9 +331,8 @@ void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function,
int64_t allowance = int64_t allowance =
kOSRCodeSizeAllowanceBaseIgnition + kOSRCodeSizeAllowanceBaseIgnition +
static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition; static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition;
if (FLAG_ignition_osr && shared->HasBytecodeArray() && if (shared->bytecode_array()->Size() <= allowance) {
shared->bytecode_array()->Size() <= allowance) { AttemptOnStackReplacement(frame);
AttemptOnStackReplacement(function);
} }
return; return;
} }
...@@ -405,14 +406,14 @@ void RuntimeProfiler::MarkCandidatesForOptimization() { ...@@ -405,14 +406,14 @@ void RuntimeProfiler::MarkCandidatesForOptimization() {
Compiler::NextCompilationTier(function); Compiler::NextCompilationTier(function);
if (function->shared()->HasBytecodeArray()) { if (function->shared()->HasBytecodeArray()) {
if (next_tier == Compiler::BASELINE) { if (next_tier == Compiler::BASELINE) {
MaybeBaselineIgnition(function, frame->is_optimized()); MaybeBaselineIgnition(function, frame);
} else { } else {
DCHECK_EQ(next_tier, Compiler::OPTIMIZED); DCHECK_EQ(next_tier, Compiler::OPTIMIZED);
MaybeOptimizeIgnition(function, frame->is_optimized()); MaybeOptimizeIgnition(function, frame);
} }
} else { } else {
DCHECK_EQ(next_tier, Compiler::OPTIMIZED); DCHECK_EQ(next_tier, Compiler::OPTIMIZED);
MaybeOptimizeFullCodegen(function, frame_count, frame->is_optimized()); MaybeOptimizeFullCodegen(function, frame, frame_count);
} }
} }
any_ic_changed_ = false; any_ic_changed_ = false;
......
...@@ -11,6 +11,7 @@ namespace v8 { ...@@ -11,6 +11,7 @@ namespace v8 {
namespace internal { namespace internal {
class Isolate; class Isolate;
class JavaScriptFrame;
class JSFunction; class JSFunction;
class RuntimeProfiler { class RuntimeProfiler {
...@@ -21,13 +22,14 @@ class RuntimeProfiler { ...@@ -21,13 +22,14 @@ class RuntimeProfiler {
void NotifyICChanged() { any_ic_changed_ = true; } void NotifyICChanged() { any_ic_changed_ = true; }
void AttemptOnStackReplacement(JSFunction* function, int nesting_levels = 1); void AttemptOnStackReplacement(JavaScriptFrame* frame,
int nesting_levels = 1);
private: private:
void MaybeOptimizeFullCodegen(JSFunction* function, int frame_count, void MaybeOptimizeFullCodegen(JSFunction* function, JavaScriptFrame* frame,
bool frame_optimized); int frame_count);
void MaybeBaselineIgnition(JSFunction* function, bool frame_optimized); void MaybeBaselineIgnition(JSFunction* function, JavaScriptFrame* frame);
void MaybeOptimizeIgnition(JSFunction* function, bool frame_optimized); void MaybeOptimizeIgnition(JSFunction* function, JavaScriptFrame* frame);
void Optimize(JSFunction* function, const char* reason); void Optimize(JSFunction* function, const char* reason);
void Baseline(JSFunction* function, const char* reason); void Baseline(JSFunction* function, const char* reason);
......
...@@ -143,37 +143,27 @@ RUNTIME_FUNCTION(Runtime_OptimizeFunctionOnNextCall) { ...@@ -143,37 +143,27 @@ RUNTIME_FUNCTION(Runtime_OptimizeFunctionOnNextCall) {
RUNTIME_FUNCTION(Runtime_OptimizeOsr) { RUNTIME_FUNCTION(Runtime_OptimizeOsr) {
HandleScope scope(isolate); HandleScope scope(isolate);
DCHECK(args.length() == 0 || args.length() == 1);
// This function is used by fuzzers, ignore calls with bogus arguments count. Handle<JSFunction> function;
if (args.length() != 0 && args.length() != 1) {
return isolate->heap()->undefined_value();
}
Handle<JSFunction> function = Handle<JSFunction>::null(); // The optional parameter determines the frame being targeted.
if (args.length() == 0) { int stack_depth = args.length() == 1 ? args.smi_at(0) : 0;
// Find the JavaScript function on the top of the stack.
JavaScriptFrameIterator it(isolate);
if (!it.done()) function = Handle<JSFunction>(it.frame()->function());
if (function.is_null()) return isolate->heap()->undefined_value();
} else {
// Function was passed as an argument.
CONVERT_ARG_HANDLE_CHECKED(JSFunction, arg, 0);
function = arg;
}
// If function is interpreted but OSR hasn't been enabled, just return. // Find the JavaScript function on the top of the stack.
if (function->shared()->HasBytecodeArray() && !FLAG_ignition_osr) { JavaScriptFrameIterator it(isolate);
return isolate->heap()->undefined_value(); while (!it.done() && stack_depth--) it.Advance();
} if (!it.done()) function = Handle<JSFunction>(it.frame()->function());
if (function.is_null()) return isolate->heap()->undefined_value();
// If the function is already optimized, just return. // If the function is already optimized, just return.
if (function->IsOptimized()) return isolate->heap()->undefined_value(); if (function->IsOptimized()) return isolate->heap()->undefined_value();
// Make the profiler arm all back edges in unoptimized code. // Make the profiler arm all back edges in unoptimized code.
if (function->shared()->HasBytecodeArray() || if (it.frame()->type() == StackFrame::JAVA_SCRIPT ||
function->shared()->HasBaselineCode()) { it.frame()->type() == StackFrame::INTERPRETED) {
isolate->runtime_profiler()->AttemptOnStackReplacement( isolate->runtime_profiler()->AttemptOnStackReplacement(
*function, AbstractCode::kMaxLoopNestingMarker); it.frame(), AbstractCode::kMaxLoopNestingMarker);
} }
return isolate->heap()->undefined_value(); return isolate->heap()->undefined_value();
......
...@@ -8,7 +8,7 @@ var global_counter = 0; ...@@ -8,7 +8,7 @@ var global_counter = 0;
function thrower() { function thrower() {
var x = global_counter++; var x = global_counter++;
if (x == 5) %OptimizeOsr(thrower.caller); if (x == 5) %OptimizeOsr(1);
if (x == 10) throw "terminate"; if (x == 10) throw "terminate";
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment