Commit 49abe45e authored by Jakob Linke's avatar Jakob Linke Committed by V8 LUCI CQ

[tiering] Fix active tier detection in OnInterruptTick

We've previously used JSFunction::GetActiveTier, which looks at
JSFunction::code to determine the active tier. However, that may
diverge from the actually active tier (i.e. the calling frame type),
e.g. when Turbofan code is available but we haven't yet tiered up
because we're stuck in a longrunning loop.

With this CL, we determine the caller code_kind by splitting the
BytecodeBudgetInterrupt runtime function up into _Ignition,
_Sparkplug, and _Maglev variants s.t. the tier is passed implicitly
without extra overhead at runtime.

Bug: v8:7700
Change-Id: I46d19c4676e3debb6d608d9fbc53495feef5cadf
Fixed: chromium:1358577
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3871196
Commit-Queue: Jakob Linke <jgruber@chromium.org>
Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83020}
parent b2576418
...@@ -573,7 +573,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) { ...@@ -573,7 +573,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
__ LoadContext(kContextRegister); __ LoadContext(kContextRegister);
__ LoadFunction(kJSFunctionRegister); __ LoadFunction(kJSFunctionRegister);
__ Push(kJSFunctionRegister); __ Push(kJSFunctionRegister);
__ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1); __ CallRuntime(Runtime::kBytecodeBudgetInterrupt_Sparkplug, 1);
__ Pop(kInterpreterAccumulatorRegister, params_size); __ Pop(kInterpreterAccumulatorRegister, params_size);
__ masm()->SmiUntag(params_size); __ masm()->SmiUntag(params_size);
......
...@@ -642,7 +642,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) { ...@@ -642,7 +642,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
__ LoadContext(kContextRegister); __ LoadContext(kContextRegister);
__ LoadFunction(kJSFunctionRegister); __ LoadFunction(kJSFunctionRegister);
__ masm()->PushArgument(kJSFunctionRegister); __ masm()->PushArgument(kJSFunctionRegister);
__ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1); __ CallRuntime(Runtime::kBytecodeBudgetInterrupt_Sparkplug, 1);
__ masm()->Pop(kInterpreterAccumulatorRegister, params_size); __ masm()->Pop(kInterpreterAccumulatorRegister, params_size);
__ masm()->SmiUntag(params_size); __ masm()->SmiUntag(params_size);
......
...@@ -606,7 +606,7 @@ void BaselineCompiler::UpdateInterruptBudgetAndJumpToLabel( ...@@ -606,7 +606,7 @@ void BaselineCompiler::UpdateInterruptBudgetAndJumpToLabel(
if (weight < 0) { if (weight < 0) {
SaveAccumulatorScope accumulator_scope(&basm_); SaveAccumulatorScope accumulator_scope(&basm_);
CallRuntime(Runtime::kBytecodeBudgetInterruptWithStackCheck, CallRuntime(Runtime::kBytecodeBudgetInterruptWithStackCheck_Sparkplug,
__ FunctionOperand()); __ FunctionOperand());
} }
} }
......
...@@ -537,7 +537,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) { ...@@ -537,7 +537,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
__ LoadContext(kContextRegister); __ LoadContext(kContextRegister);
__ Push(MemOperand(ebp, InterpreterFrameConstants::kFunctionOffset)); __ Push(MemOperand(ebp, InterpreterFrameConstants::kFunctionOffset));
__ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1); __ CallRuntime(Runtime::kBytecodeBudgetInterrupt_Sparkplug, 1);
__ Pop(kInterpreterAccumulatorRegister, params_size); __ Pop(kInterpreterAccumulatorRegister, params_size);
__ masm()->SmiUntag(params_size); __ masm()->SmiUntag(params_size);
......
...@@ -535,7 +535,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) { ...@@ -535,7 +535,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
__ LoadContext(kContextRegister); __ LoadContext(kContextRegister);
__ LoadFunction(kJSFunctionRegister); __ LoadFunction(kJSFunctionRegister);
__ masm()->Push(kJSFunctionRegister); __ masm()->Push(kJSFunctionRegister);
__ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1); __ CallRuntime(Runtime::kBytecodeBudgetInterrupt_Sparkplug, 1);
__ masm()->Pop(params_size, kInterpreterAccumulatorRegister); __ masm()->Pop(params_size, kInterpreterAccumulatorRegister);
__ masm()->SmiUntag(params_size); __ masm()->SmiUntag(params_size);
......
...@@ -526,7 +526,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) { ...@@ -526,7 +526,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
__ LoadContext(kContextRegister); __ LoadContext(kContextRegister);
__ LoadFunction(kJSFunctionRegister); __ LoadFunction(kJSFunctionRegister);
__ masm()->Push(kJSFunctionRegister); __ masm()->Push(kJSFunctionRegister);
__ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1); __ CallRuntime(Runtime::kBytecodeBudgetInterrupt_Sparkplug, 1);
__ masm()->Pop(params_size, kInterpreterAccumulatorRegister); __ masm()->Pop(params_size, kInterpreterAccumulatorRegister);
__ masm()->SmiUntag(params_size); __ masm()->SmiUntag(params_size);
......
...@@ -545,7 +545,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) { ...@@ -545,7 +545,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
__ LoadContext(kContextRegister); __ LoadContext(kContextRegister);
__ LoadFunction(kJSFunctionRegister); __ LoadFunction(kJSFunctionRegister);
__ masm()->Push(kJSFunctionRegister); __ masm()->Push(kJSFunctionRegister);
__ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1); __ CallRuntime(Runtime::kBytecodeBudgetInterrupt_Sparkplug, 1);
__ masm()->Pop(params_size, kInterpreterAccumulatorRegister); __ masm()->Pop(params_size, kInterpreterAccumulatorRegister);
__ masm()->SmiUntag(params_size); __ masm()->SmiUntag(params_size);
......
...@@ -713,7 +713,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) { ...@@ -713,7 +713,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
__ LoadContext(kContextRegister); __ LoadContext(kContextRegister);
__ LoadFunction(kJSFunctionRegister); __ LoadFunction(kJSFunctionRegister);
__ Push(kJSFunctionRegister); __ Push(kJSFunctionRegister);
__ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1); __ CallRuntime(Runtime::kBytecodeBudgetInterrupt_Sparkplug, 1);
__ Pop(kInterpreterAccumulatorRegister, params_size); __ Pop(kInterpreterAccumulatorRegister, params_size);
__ masm()->SmiUntag(params_size); __ masm()->SmiUntag(params_size);
......
...@@ -567,7 +567,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) { ...@@ -567,7 +567,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
__ LoadContext(kContextRegister); __ LoadContext(kContextRegister);
__ LoadFunction(kJSFunctionRegister); __ LoadFunction(kJSFunctionRegister);
__ masm()->Push(kJSFunctionRegister); __ masm()->Push(kJSFunctionRegister);
__ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1); __ CallRuntime(Runtime::kBytecodeBudgetInterrupt_Sparkplug, 1);
__ masm()->Pop(params_size, kInterpreterAccumulatorRegister); __ masm()->Pop(params_size, kInterpreterAccumulatorRegister);
__ masm()->SmiUntag(params_size); __ masm()->SmiUntag(params_size);
......
...@@ -711,7 +711,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) { ...@@ -711,7 +711,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
__ LoadContext(kContextRegister); __ LoadContext(kContextRegister);
__ LoadFunction(kJSFunctionRegister); __ LoadFunction(kJSFunctionRegister);
__ Push(kJSFunctionRegister); __ Push(kJSFunctionRegister);
__ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1); __ CallRuntime(Runtime::kBytecodeBudgetInterrupt_Sparkplug, 1);
__ Pop(kInterpreterAccumulatorRegister, params_size); __ Pop(kInterpreterAccumulatorRegister, params_size);
__ masm()->SmiUntag(params_size); __ masm()->SmiUntag(params_size);
......
...@@ -600,7 +600,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) { ...@@ -600,7 +600,7 @@ void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
__ LoadContext(kContextRegister); __ LoadContext(kContextRegister);
__ Push(MemOperand(rbp, InterpreterFrameConstants::kFunctionOffset)); __ Push(MemOperand(rbp, InterpreterFrameConstants::kFunctionOffset));
__ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1); __ CallRuntime(Runtime::kBytecodeBudgetInterrupt_Sparkplug, 1);
__ Pop(kInterpreterAccumulatorRegister, params_size); __ Pop(kInterpreterAccumulatorRegister, params_size);
__ masm()->SmiUntagUnsigned(params_size); __ masm()->SmiUntagUnsigned(params_size);
......
...@@ -323,8 +323,6 @@ void TieringManager::MaybeOptimizeFrame(JSFunction function, ...@@ -323,8 +323,6 @@ void TieringManager::MaybeOptimizeFrame(JSFunction function,
OptimizationDecision TieringManager::ShouldOptimize(JSFunction function, OptimizationDecision TieringManager::ShouldOptimize(JSFunction function,
CodeKind code_kind) { CodeKind code_kind) {
DCHECK_EQ(code_kind, function.GetActiveTier().value());
if (TiersUpToMaglev(code_kind) && if (TiersUpToMaglev(code_kind) &&
function.shared().PassesFilter(v8_flags.maglev_filter) && function.shared().PassesFilter(v8_flags.maglev_filter) &&
!function.shared(isolate_).maglev_compilation_failed()) { !function.shared(isolate_).maglev_compilation_failed()) {
...@@ -377,7 +375,8 @@ TieringManager::OnInterruptTickScope::~OnInterruptTickScope() { ...@@ -377,7 +375,8 @@ TieringManager::OnInterruptTickScope::~OnInterruptTickScope() {
profiler_->any_ic_changed_ = false; profiler_->any_ic_changed_ = false;
} }
void TieringManager::OnInterruptTick(Handle<JSFunction> function) { void TieringManager::OnInterruptTick(Handle<JSFunction> function,
CodeKind code_kind) {
IsCompiledScope is_compiled_scope( IsCompiledScope is_compiled_scope(
function->shared().is_compiled_scope(isolate_)); function->shared().is_compiled_scope(isolate_));
...@@ -441,7 +440,6 @@ void TieringManager::OnInterruptTick(Handle<JSFunction> function) { ...@@ -441,7 +440,6 @@ void TieringManager::OnInterruptTick(Handle<JSFunction> function) {
function_obj.feedback_vector().SaturatingIncrementProfilerTicks(); function_obj.feedback_vector().SaturatingIncrementProfilerTicks();
const CodeKind code_kind = function_obj.GetActiveTier().value();
MaybeOptimizeFrame(function_obj, code_kind); MaybeOptimizeFrame(function_obj, code_kind);
} }
......
...@@ -26,7 +26,7 @@ class TieringManager { ...@@ -26,7 +26,7 @@ class TieringManager {
public: public:
explicit TieringManager(Isolate* isolate) : isolate_(isolate) {} explicit TieringManager(Isolate* isolate) : isolate_(isolate) {}
void OnInterruptTick(Handle<JSFunction> function); void OnInterruptTick(Handle<JSFunction> function, CodeKind code_kind);
void NotifyICChanged() { any_ic_changed_ = true; } void NotifyICChanged() { any_ic_changed_ = true; }
......
...@@ -1039,8 +1039,8 @@ void InterpreterAssembler::UpdateInterruptBudget(TNode<Int32T> weight, ...@@ -1039,8 +1039,8 @@ void InterpreterAssembler::UpdateInterruptBudget(TNode<Int32T> weight,
BIND(&interrupt_check); BIND(&interrupt_check);
// JumpLoop should do a stack check as part of the interrupt. // JumpLoop should do a stack check as part of the interrupt.
CallRuntime(bytecode() == Bytecode::kJumpLoop CallRuntime(bytecode() == Bytecode::kJumpLoop
? Runtime::kBytecodeBudgetInterruptWithStackCheck ? Runtime::kBytecodeBudgetInterruptWithStackCheck_Ignition
: Runtime::kBytecodeBudgetInterrupt, : Runtime::kBytecodeBudgetInterrupt_Ignition,
GetContext(), function); GetContext(), function);
Goto(&done); Goto(&done);
......
...@@ -3242,7 +3242,8 @@ void ReduceInterruptBudget::GenerateCode(MaglevCodeGenState* code_gen_state, ...@@ -3242,7 +3242,8 @@ void ReduceInterruptBudget::GenerateCode(MaglevCodeGenState* code_gen_state,
code_gen_state, node->register_snapshot()); code_gen_state, node->register_snapshot());
__ Move(kContextRegister, code_gen_state->native_context().object()); __ Move(kContextRegister, code_gen_state->native_context().object());
__ Push(MemOperand(rbp, StandardFrameConstants::kFunctionOffset)); __ Push(MemOperand(rbp, StandardFrameConstants::kFunctionOffset));
__ CallRuntime(Runtime::kBytecodeBudgetInterruptWithStackCheck, 1); __ CallRuntime(Runtime::kBytecodeBudgetInterruptWithStackCheck_Maglev,
1);
save_register_state.DefineSafepointWithLazyDeopt( save_register_state.DefineSafepointWithLazyDeopt(
node->lazy_deopt_info()); node->lazy_deopt_info());
} }
......
...@@ -350,7 +350,11 @@ RUNTIME_FUNCTION(Runtime_StackGuardWithGap) { ...@@ -350,7 +350,11 @@ RUNTIME_FUNCTION(Runtime_StackGuardWithGap) {
return isolate->stack_guard()->HandleInterrupts(); return isolate->stack_guard()->HandleInterrupts();
} }
RUNTIME_FUNCTION(Runtime_BytecodeBudgetInterruptWithStackCheck) { namespace {
Object BytecodeBudgetInterruptWithStackCheck(Isolate* isolate,
RuntimeArguments& args,
CodeKind code_kind) {
HandleScope scope(isolate); HandleScope scope(isolate);
DCHECK_EQ(1, args.length()); DCHECK_EQ(1, args.length());
Handle<JSFunction> function = args.at<JSFunction>(0); Handle<JSFunction> function = args.at<JSFunction>(0);
...@@ -372,20 +376,45 @@ RUNTIME_FUNCTION(Runtime_BytecodeBudgetInterruptWithStackCheck) { ...@@ -372,20 +376,45 @@ RUNTIME_FUNCTION(Runtime_BytecodeBudgetInterruptWithStackCheck) {
} }
} }
isolate->tiering_manager()->OnInterruptTick(function); isolate->tiering_manager()->OnInterruptTick(function, code_kind);
return ReadOnlyRoots(isolate).undefined_value(); return ReadOnlyRoots(isolate).undefined_value();
} }
RUNTIME_FUNCTION(Runtime_BytecodeBudgetInterrupt) { Object BytecodeBudgetInterrupt(Isolate* isolate, RuntimeArguments& args,
CodeKind code_kind) {
HandleScope scope(isolate); HandleScope scope(isolate);
DCHECK_EQ(1, args.length()); DCHECK_EQ(1, args.length());
Handle<JSFunction> function = args.at<JSFunction>(0); Handle<JSFunction> function = args.at<JSFunction>(0);
TRACE_EVENT0("v8.execute", "V8.BytecodeBudgetInterrupt"); TRACE_EVENT0("v8.execute", "V8.BytecodeBudgetInterrupt");
isolate->tiering_manager()->OnInterruptTick(function); isolate->tiering_manager()->OnInterruptTick(function, code_kind);
return ReadOnlyRoots(isolate).undefined_value(); return ReadOnlyRoots(isolate).undefined_value();
} }
} // namespace
RUNTIME_FUNCTION(Runtime_BytecodeBudgetInterruptWithStackCheck_Ignition) {
return BytecodeBudgetInterruptWithStackCheck(isolate, args,
CodeKind::INTERPRETED_FUNCTION);
}
RUNTIME_FUNCTION(Runtime_BytecodeBudgetInterrupt_Ignition) {
return BytecodeBudgetInterrupt(isolate, args, CodeKind::INTERPRETED_FUNCTION);
}
RUNTIME_FUNCTION(Runtime_BytecodeBudgetInterruptWithStackCheck_Sparkplug) {
return BytecodeBudgetInterruptWithStackCheck(isolate, args,
CodeKind::BASELINE);
}
RUNTIME_FUNCTION(Runtime_BytecodeBudgetInterrupt_Sparkplug) {
return BytecodeBudgetInterrupt(isolate, args, CodeKind::BASELINE);
}
RUNTIME_FUNCTION(Runtime_BytecodeBudgetInterruptWithStackCheck_Maglev) {
return BytecodeBudgetInterruptWithStackCheck(isolate, args, CodeKind::MAGLEV);
}
namespace { namespace {
#if V8_ENABLE_WEBASSEMBLY #if V8_ENABLE_WEBASSEMBLY
......
...@@ -224,8 +224,11 @@ namespace internal { ...@@ -224,8 +224,11 @@ namespace internal {
F(GetAndResetTurboProfilingData, 0, 1) \ F(GetAndResetTurboProfilingData, 0, 1) \
F(GetTemplateObject, 3, 1) \ F(GetTemplateObject, 3, 1) \
F(IncrementUseCounter, 1, 1) \ F(IncrementUseCounter, 1, 1) \
F(BytecodeBudgetInterrupt, 1, 1) \ F(BytecodeBudgetInterrupt_Ignition, 1, 1) \
F(BytecodeBudgetInterruptWithStackCheck, 1, 1) \ F(BytecodeBudgetInterruptWithStackCheck_Ignition, 1, 1) \
F(BytecodeBudgetInterrupt_Sparkplug, 1, 1) \
F(BytecodeBudgetInterruptWithStackCheck_Sparkplug, 1, 1) \
F(BytecodeBudgetInterruptWithStackCheck_Maglev, 1, 1) \
F(NewError, 2, 1) \ F(NewError, 2, 1) \
F(NewForeign, 0, 1) \ F(NewForeign, 0, 1) \
F(NewReferenceError, 2, 1) \ F(NewReferenceError, 2, 1) \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment