Commit 685210ec authored by mstarzinger's avatar mstarzinger Committed by Commit bot

[interpreter] Switch profiler to use frames for OSR.

This switches the interface of the runtime profiler to use frames as
opposed to functions for performing on-stack replacement. Requests for
such replacements need to target a specific frame. This will enable us
to activate bytecode as well as baseline code for the same function.

The existing %OptimizeOsr runtime function also had to adapted and now
takes an optional stack depth to target a specific stack frame.

R=bmeurer@chromium.org
BUG=v8:4764

Review-Url: https://codereview.chromium.org/2230783004
Cr-Commit-Position: refs/heads/master@{#38548}
parent 09e7c01b
......@@ -127,8 +127,9 @@ void RuntimeProfiler::Baseline(JSFunction* function, const char* reason) {
function->MarkForBaseline();
}
void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function,
void RuntimeProfiler::AttemptOnStackReplacement(JavaScriptFrame* frame,
int loop_nesting_levels) {
JSFunction* function = frame->function();
SharedFunctionInfo* shared = function->shared();
if (!FLAG_use_osr || function->shared()->IsBuiltin()) {
return;
......@@ -153,13 +154,15 @@ void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function,
PrintF("]\n");
}
if (shared->code()->kind() == Code::FUNCTION) {
if (frame->type() == StackFrame::JAVA_SCRIPT) {
DCHECK(shared->HasBaselineCode());
DCHECK(BackEdgeTable::Verify(shared->GetIsolate(), shared->code()));
for (int i = 0; i < loop_nesting_levels; i++) {
BackEdgeTable::Patch(isolate_, shared->code());
}
} else if (shared->HasBytecodeArray()) {
DCHECK(FLAG_ignition_osr); // Should only happen when enabled.
} else if (frame->type() == StackFrame::INTERPRETED) {
DCHECK(shared->HasBytecodeArray());
if (!FLAG_ignition_osr) return; // Only use this when enabled.
int level = shared->bytecode_array()->osr_loop_nesting_level();
shared->bytecode_array()->set_osr_loop_nesting_level(
Min(level + loop_nesting_levels, AbstractCode::kMaxLoopNestingMarker));
......@@ -169,17 +172,17 @@ void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function,
}
void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function,
int frame_count,
bool frame_optimized) {
JavaScriptFrame* frame,
int frame_count) {
SharedFunctionInfo* shared = function->shared();
Code* shared_code = shared->code();
if (shared_code->kind() != Code::FUNCTION) return;
if (function->IsInOptimizationQueue()) return;
if (FLAG_always_osr) {
AttemptOnStackReplacement(function, AbstractCode::kMaxLoopNestingMarker);
AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
// Fall through and do a normal optimized compile as well.
} else if (!frame_optimized &&
} else if (!frame->is_optimized() &&
(function->IsMarkedForOptimization() ||
function->IsMarkedForConcurrentOptimization() ||
function->IsOptimized())) {
......@@ -193,7 +196,7 @@ void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function,
ticks < Code::ProfilerTicksField::kMax) {
shared_code->set_profiler_ticks(ticks + 1);
} else {
AttemptOnStackReplacement(function);
AttemptOnStackReplacement(frame);
}
return;
}
......@@ -265,7 +268,7 @@ void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function,
}
void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function,
bool frame_optimized) {
JavaScriptFrame* frame) {
if (function->IsInOptimizationQueue()) return;
SharedFunctionInfo* shared = function->shared();
......@@ -274,10 +277,10 @@ void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function,
// TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
// than kMaxToplevelSourceSize.
if (FLAG_ignition_osr && FLAG_always_osr) {
AttemptOnStackReplacement(function, AbstractCode::kMaxLoopNestingMarker);
if (FLAG_always_osr) {
AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
// Fall through and do a normal baseline compile as well.
} else if (!frame_optimized &&
} else if (!frame->is_optimized() &&
(function->IsMarkedForBaseline() ||
function->IsMarkedForOptimization() ||
function->IsMarkedForConcurrentOptimization() ||
......@@ -287,9 +290,8 @@ void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function,
int64_t allowance =
kOSRCodeSizeAllowanceBaseIgnition +
static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition;
if (FLAG_ignition_osr && shared->HasBytecodeArray() &&
shared->bytecode_array()->Size() <= allowance) {
AttemptOnStackReplacement(function);
if (shared->bytecode_array()->Size() <= allowance) {
AttemptOnStackReplacement(frame);
}
return;
}
......@@ -307,7 +309,7 @@ void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function,
}
void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function,
bool frame_optimized) {
JavaScriptFrame* frame) {
if (function->IsInOptimizationQueue()) return;
SharedFunctionInfo* shared = function->shared();
......@@ -316,10 +318,10 @@ void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function,
// TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
// than kMaxToplevelSourceSize.
if (FLAG_ignition_osr && FLAG_always_osr) {
AttemptOnStackReplacement(function, AbstractCode::kMaxLoopNestingMarker);
if (FLAG_always_osr) {
AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
// Fall through and do a normal optimized compile as well.
} else if (!frame_optimized &&
} else if (!frame->is_optimized() &&
(function->IsMarkedForBaseline() ||
function->IsMarkedForOptimization() ||
function->IsMarkedForConcurrentOptimization() ||
......@@ -329,9 +331,8 @@ void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function,
int64_t allowance =
kOSRCodeSizeAllowanceBaseIgnition +
static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition;
if (FLAG_ignition_osr && shared->HasBytecodeArray() &&
shared->bytecode_array()->Size() <= allowance) {
AttemptOnStackReplacement(function);
if (shared->bytecode_array()->Size() <= allowance) {
AttemptOnStackReplacement(frame);
}
return;
}
......@@ -405,14 +406,14 @@ void RuntimeProfiler::MarkCandidatesForOptimization() {
Compiler::NextCompilationTier(function);
if (function->shared()->HasBytecodeArray()) {
if (next_tier == Compiler::BASELINE) {
MaybeBaselineIgnition(function, frame->is_optimized());
MaybeBaselineIgnition(function, frame);
} else {
DCHECK_EQ(next_tier, Compiler::OPTIMIZED);
MaybeOptimizeIgnition(function, frame->is_optimized());
MaybeOptimizeIgnition(function, frame);
}
} else {
DCHECK_EQ(next_tier, Compiler::OPTIMIZED);
MaybeOptimizeFullCodegen(function, frame_count, frame->is_optimized());
MaybeOptimizeFullCodegen(function, frame, frame_count);
}
}
any_ic_changed_ = false;
......
......@@ -11,6 +11,7 @@ namespace v8 {
namespace internal {
class Isolate;
class JavaScriptFrame;
class JSFunction;
class RuntimeProfiler {
......@@ -21,13 +22,14 @@ class RuntimeProfiler {
void NotifyICChanged() { any_ic_changed_ = true; }
void AttemptOnStackReplacement(JSFunction* function, int nesting_levels = 1);
void AttemptOnStackReplacement(JavaScriptFrame* frame,
int nesting_levels = 1);
private:
void MaybeOptimizeFullCodegen(JSFunction* function, int frame_count,
bool frame_optimized);
void MaybeBaselineIgnition(JSFunction* function, bool frame_optimized);
void MaybeOptimizeIgnition(JSFunction* function, bool frame_optimized);
void MaybeOptimizeFullCodegen(JSFunction* function, JavaScriptFrame* frame,
int frame_count);
void MaybeBaselineIgnition(JSFunction* function, JavaScriptFrame* frame);
void MaybeOptimizeIgnition(JSFunction* function, JavaScriptFrame* frame);
void Optimize(JSFunction* function, const char* reason);
void Baseline(JSFunction* function, const char* reason);
......
......@@ -143,37 +143,27 @@ RUNTIME_FUNCTION(Runtime_OptimizeFunctionOnNextCall) {
RUNTIME_FUNCTION(Runtime_OptimizeOsr) {
HandleScope scope(isolate);
DCHECK(args.length() == 0 || args.length() == 1);
// This function is used by fuzzers, ignore calls with bogus arguments count.
if (args.length() != 0 && args.length() != 1) {
return isolate->heap()->undefined_value();
}
Handle<JSFunction> function;
Handle<JSFunction> function = Handle<JSFunction>::null();
if (args.length() == 0) {
// Find the JavaScript function on the top of the stack.
JavaScriptFrameIterator it(isolate);
if (!it.done()) function = Handle<JSFunction>(it.frame()->function());
if (function.is_null()) return isolate->heap()->undefined_value();
} else {
// Function was passed as an argument.
CONVERT_ARG_HANDLE_CHECKED(JSFunction, arg, 0);
function = arg;
}
// The optional parameter determines the frame being targeted.
int stack_depth = args.length() == 1 ? args.smi_at(0) : 0;
// If function is interpreted but OSR hasn't been enabled, just return.
if (function->shared()->HasBytecodeArray() && !FLAG_ignition_osr) {
return isolate->heap()->undefined_value();
}
// Find the JavaScript function on the top of the stack.
JavaScriptFrameIterator it(isolate);
while (!it.done() && stack_depth--) it.Advance();
if (!it.done()) function = Handle<JSFunction>(it.frame()->function());
if (function.is_null()) return isolate->heap()->undefined_value();
// If the function is already optimized, just return.
if (function->IsOptimized()) return isolate->heap()->undefined_value();
// Make the profiler arm all back edges in unoptimized code.
if (function->shared()->HasBytecodeArray() ||
function->shared()->HasBaselineCode()) {
if (it.frame()->type() == StackFrame::JAVA_SCRIPT ||
it.frame()->type() == StackFrame::INTERPRETED) {
isolate->runtime_profiler()->AttemptOnStackReplacement(
*function, AbstractCode::kMaxLoopNestingMarker);
it.frame(), AbstractCode::kMaxLoopNestingMarker);
}
return isolate->heap()->undefined_value();
......
......@@ -8,7 +8,7 @@ var global_counter = 0;
function thrower() {
var x = global_counter++;
if (x == 5) %OptimizeOsr(thrower.caller);
if (x == 5) %OptimizeOsr(1);
if (x == 10) throw "terminate";
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment