Commit b022c448 authored by Mythri A's avatar Mythri A Committed by Commit Bot

[turboprop] Tierup from turboprop with --turboprop-as-midtier

This cl implements tiering up support from Turboprop to TurboFan behind
turboprop_as_midtier flag. More specifically:
1. Scales down the bytecode size when updating the interrupt budget in
optimized code (TP / NCI).
2. Runtime profiler tiers up from TP->TF with --turboprop-as-midtier
3. Looks for the correct code kind when looking for optimized code in
the feedback vector.
4. After servicing the optimization marker continues with mid-tier
optimized code if it exists

Bug: v8:9684
Change-Id: Iaf5783e75555c50c97901504fd122f62ff30be5c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2480363
Commit-Queue: Mythri Alle <mythria@chromium.org>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarRoss McIlroy <rmcilroy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#70993}
parent fe0c98a3
......@@ -94,16 +94,15 @@ class CompilerTracer : public AllStatic {
static void PrintTracePrefix(const CodeTracer::Scope& scope,
const char* header,
OptimizedCompilationInfo* info) {
PrintF(scope.file(), "[%s ", header);
info->closure()->ShortPrint(scope.file());
PrintF(scope.file(), " (target %s)", CodeKindToString(info->code_kind()));
PrintTracePrefix(scope, header, info->closure(), info->code_kind());
}
static void PrintTracePrefix(const CodeTracer::Scope& scope,
const char* header,
Handle<JSFunction> function) {
const char* header, Handle<JSFunction> function,
CodeKind code_kind) {
PrintF(scope.file(), "[%s ", header);
function->ShortPrint(scope.file());
PrintF(scope.file(), " (target %s)", CodeKindToString(code_kind));
}
static void PrintTraceSuffix(const CodeTracer::Scope& scope) {
......@@ -152,10 +151,11 @@ class CompilerTracer : public AllStatic {
static void TraceOptimizedCodeCacheHit(Isolate* isolate,
Handle<JSFunction> function,
BailoutId osr_offset) {
BailoutId osr_offset,
CodeKind code_kind) {
if (!FLAG_trace_opt) return;
CodeTracer::Scope scope(isolate->GetCodeTracer());
PrintTracePrefix(scope, "found optimized code for", function);
PrintTracePrefix(scope, "found optimized code for", function, code_kind);
if (!osr_offset.IsNone()) {
PrintF(scope.file(), " at OSR AST id %d", osr_offset.ToInt());
}
......@@ -163,10 +163,11 @@ class CompilerTracer : public AllStatic {
}
static void TraceOptimizeForAlwaysOpt(Isolate* isolate,
Handle<JSFunction> function) {
Handle<JSFunction> function,
CodeKind code_kind) {
if (!FLAG_trace_opt) return;
CodeTracer::Scope scope(isolate->GetCodeTracer());
PrintTracePrefix(scope, "optimizing", function);
PrintTracePrefix(scope, "optimizing", function, code_kind);
PrintF(scope.file(), " because --always-opt");
PrintTraceSuffix(scope);
}
......@@ -821,7 +822,7 @@ bool FinalizeDeferredUnoptimizedCompilationJobs(
}
V8_WARN_UNUSED_RESULT MaybeHandle<Code> GetCodeFromOptimizedCodeCache(
Handle<JSFunction> function, BailoutId osr_offset) {
Handle<JSFunction> function, BailoutId osr_offset, CodeKind code_kind) {
RuntimeCallTimerScope runtimeTimer(
function->GetIsolate(),
RuntimeCallCounterId::kCompileGetFromOptimizedCodeMap);
......@@ -840,7 +841,8 @@ V8_WARN_UNUSED_RESULT MaybeHandle<Code> GetCodeFromOptimizedCodeCache(
.GetOSROptimizedCodeCache()
.GetOptimizedCode(shared, osr_offset, isolate);
}
if (!code.is_null()) {
DCHECK_IMPLIES(!code.is_null(), code.kind() <= code_kind);
if (!code.is_null() && code.kind() == code_kind) {
// Caching of optimized code enabled and optimized code found.
DCHECK(!code.marked_for_deoptimization());
DCHECK(function->shared().is_compiled());
......@@ -1032,6 +1034,18 @@ Handle<Code> ContinuationForConcurrentOptimization(
// Tiering up to Turbofan and cached optimized code exists. Continue
// execution there until TF optimization has finished.
return cached_code;
} else if (FLAG_turboprop_as_midtier &&
function->HasAvailableOptimizedCode()) {
DCHECK(function->NextTier() == CodeKind::TURBOFAN);
// It is possible that we have marked a closure for TurboFan optimization
// but the marker is processed by another closure that doesn't have
// optimized code yet. So heal the closure here and return the optimized
// code.
if (!function->HasAttachedOptimizedCode()) {
DCHECK(function->feedback_vector().has_optimized_code());
function->set_code(function->feedback_vector().optimized_code());
}
return handle(function->code(), isolate);
}
return BUILTIN_CODE(isolate, InterpreterEntryTrampoline);
}
......@@ -1078,9 +1092,10 @@ MaybeHandle<Code> GetOptimizedCode(Handle<JSFunction> function,
// Check the optimized code cache (stored on the SharedFunctionInfo).
if (CodeKindIsStoredInOptimizedCodeCache(code_kind)) {
Handle<Code> cached_code;
if (GetCodeFromOptimizedCodeCache(function, osr_offset)
if (GetCodeFromOptimizedCodeCache(function, osr_offset, code_kind)
.ToHandle(&cached_code)) {
CompilerTracer::TraceOptimizedCodeCacheHit(isolate, function, osr_offset);
CompilerTracer::TraceOptimizedCodeCacheHit(isolate, function, osr_offset,
code_kind);
return cached_code;
}
}
......@@ -1820,7 +1835,8 @@ bool Compiler::Compile(Handle<JSFunction> function, ClearExceptionFlag flag,
// Optimize now if --always-opt is enabled.
if (FLAG_always_opt && !function->shared().HasAsmWasmData()) {
CompilerTracer::TraceOptimizeForAlwaysOpt(isolate, function);
CompilerTracer::TraceOptimizeForAlwaysOpt(isolate, function,
CodeKindForTopTier());
Handle<Code> maybe_code;
if (GetOptimizedCode(function, ConcurrencyMode::kNotConcurrent,
......
......@@ -210,6 +210,9 @@ bool RuntimeProfiler::MaybeOSR(JSFunction function, InterpretedFrame* frame) {
function.HasAvailableOptimizedCode()) {
// Attempt OSR if we are still running interpreted code even though the
// the function has long been marked or even already been optimized.
// TODO(turboprop, mythria): Currently we don't tier up from Turboprop code
// to Turbofan OSR code. When we start supporting this, the ticks have to be
// scaled accordingly
int64_t allowance =
kOSRBytecodeSizeAllowanceBase +
static_cast<int64_t>(ticks) * kOSRBytecodeSizeAllowancePerTick;
......@@ -226,26 +229,31 @@ OptimizationReason RuntimeProfiler::ShouldOptimize(JSFunction function,
if (function.ActiveTierIsTurbofan()) {
return OptimizationReason::kDoNotOptimize;
}
if (V8_UNLIKELY(FLAG_turboprop) && function.ActiveTierIsTurboprop()) {
// TODO(turboprop): Implement tier up from Turboprop.
if (V8_UNLIKELY(FLAG_turboprop) && function.ActiveTierIsToptierTurboprop()) {
return OptimizationReason::kDoNotOptimize;
}
int ticks = function.feedback_vector().profiler_ticks();
int scale_factor = function.ActiveTierIsMidtierTurboprop()
? FLAG_ticks_scale_factor_for_top_tier
: 1;
int ticks_for_optimization =
kProfilerTicksBeforeOptimization +
(bytecode.length() / kBytecodeSizeAllowancePerTick);
ticks_for_optimization *= scale_factor;
if (ticks >= ticks_for_optimization) {
return OptimizationReason::kHotAndStable;
} else if (!any_ic_changed_ &&
bytecode.length() < kMaxBytecodeSizeForEarlyOpt) {
// TODO(turboprop, mythria): Do we need to support small function
// optimization for TP->TF tier up. If so, do we want to scale the bytecode
// size?
// If no IC was patched since the last tick and this function is very
// small, optimistically optimize it now.
return OptimizationReason::kSmallFunction;
} else if (FLAG_trace_opt_verbose) {
PrintF("[not yet optimizing ");
function.PrintName();
PrintF(", not enough ticks: %d/%d and ", ticks,
kProfilerTicksBeforeOptimization);
PrintF(", not enough ticks: %d/%d and ", ticks, ticks_for_optimization);
if (any_ic_changed_) {
PrintF("ICs changed]\n");
} else {
......
......@@ -564,6 +564,11 @@ DEFINE_VALUE_IMPLICATION(turboprop, interrupt_budget, 15 * KB)
DEFINE_VALUE_IMPLICATION(turboprop, reuse_opt_code_count, 2)
DEFINE_UINT_READONLY(max_minimorphic_map_checks, 4,
"max number of map checks to perform in minimorphic state")
// Since Turboprop uses much lower value for interrupt budget, we need to wait
// for a higher number of ticks to tierup to Turbofan roughly match the default.
// The default of 10 is approximately the ration of TP to TF interrupt budget.
DEFINE_INT(ticks_scale_factor_for_top_tier, 10,
"scale factor for profiler ticks when tiering up from midtier")
// Flags for concurrent recompilation.
DEFINE_BOOL(concurrent_recompilation, true,
......
......@@ -11,8 +11,10 @@
namespace v8 {
namespace internal {
// The order of INTERPRETED_FUNCTION to TURBOFAN is important. We use it to
// check the relative ordering of the tiers when fetching / installing optimized
// code.
#define CODE_KIND_LIST(V) \
V(TURBOFAN) \
V(BYTECODE_HANDLER) \
V(FOR_TESTING) \
V(BUILTIN) \
......@@ -25,13 +27,19 @@ namespace internal {
V(C_WASM_ENTRY) \
V(INTERPRETED_FUNCTION) \
V(NATIVE_CONTEXT_INDEPENDENT) \
V(TURBOPROP)
V(TURBOPROP) \
V(TURBOFAN)
enum class CodeKind {
#define DEFINE_CODE_KIND_ENUM(name) name,
CODE_KIND_LIST(DEFINE_CODE_KIND_ENUM)
#undef DEFINE_CODE_KIND_ENUM
};
STATIC_ASSERT(CodeKind::INTERPRETED_FUNCTION < CodeKind::TURBOPROP &&
CodeKind::INTERPRETED_FUNCTION <
CodeKind::NATIVE_CONTEXT_INDEPENDENT);
STATIC_ASSERT(CodeKind::TURBOPROP < CodeKind::TURBOFAN &&
CodeKind::NATIVE_CONTEXT_INDEPENDENT < CodeKind::TURBOFAN);
#define V(...) +1
static constexpr int kCodeKindCount = CODE_KIND_LIST(V);
......@@ -94,7 +102,10 @@ inline constexpr bool CodeKindIsStoredInOptimizedCodeCache(CodeKind kind) {
inline OptimizationTier GetTierForCodeKind(CodeKind kind) {
if (kind == CodeKind::TURBOFAN) return OptimizationTier::kTopTier;
if (kind == CodeKind::TURBOPROP) return OptimizationTier::kTopTier;
if (kind == CodeKind::TURBOPROP) {
return FLAG_turboprop_as_midtier ? OptimizationTier::kMidTier
: OptimizationTier::kTopTier;
}
if (kind == CodeKind::NATIVE_CONTEXT_INDEPENDENT) {
return FLAG_turbo_nci_as_midtier ? OptimizationTier::kMidTier
: OptimizationTier::kTopTier;
......@@ -103,7 +114,14 @@ inline OptimizationTier GetTierForCodeKind(CodeKind kind) {
}
inline CodeKind CodeKindForTopTier() {
return V8_UNLIKELY(FLAG_turboprop) ? CodeKind::TURBOPROP : CodeKind::TURBOFAN;
// TODO(turboprop, mythria): We should make FLAG_turboprop mean turboprop is
// mid-tier compiler and replace FLAG_turboprop_as_midtier with
// FLAG_turboprop_as_top_tier to tier up to only Turboprop once
// FLAG_turboprop_as_midtier is stable and major regressions are addressed.
if (V8_UNLIKELY(FLAG_turboprop)) {
return FLAG_turboprop_as_midtier ? CodeKind::TURBOFAN : CodeKind::TURBOPROP;
}
return CodeKind::TURBOFAN;
}
// The dedicated CodeKindFlag enum represents all code kinds in a format
......
......@@ -72,7 +72,7 @@ void JSFunction::MarkForOptimization(ConcurrencyMode mode) {
}
DCHECK(!is_compiled() || ActiveTierIsIgnition() || ActiveTierIsNCI() ||
(ActiveTierIsTurboprop() && FLAG_turboprop_as_midtier));
ActiveTierIsMidtierTurboprop());
DCHECK(!ActiveTierIsTurbofan());
DCHECK(shared().IsInterpreted());
DCHECK(shared().allows_lazy_compilation() ||
......
......@@ -131,19 +131,27 @@ bool JSFunction::ActiveTierIsNCI() const {
return highest_tier == CodeKind::NATIVE_CONTEXT_INDEPENDENT;
}
bool JSFunction::ActiveTierIsTurboprop() const {
bool JSFunction::ActiveTierIsToptierTurboprop() const {
CodeKind highest_tier;
if (!FLAG_turboprop) return false;
if (!HighestTierOf(GetAvailableCodeKinds(), &highest_tier)) return false;
return highest_tier == CodeKind::TURBOPROP;
return highest_tier == CodeKind::TURBOPROP && !FLAG_turboprop_as_midtier;
}
bool JSFunction::ActiveTierIsMidtierTurboprop() const {
CodeKind highest_tier;
if (!FLAG_turboprop_as_midtier) return false;
if (!HighestTierOf(GetAvailableCodeKinds(), &highest_tier)) return false;
return highest_tier == CodeKind::TURBOPROP && FLAG_turboprop_as_midtier;
}
CodeKind JSFunction::NextTier() const {
if (V8_UNLIKELY(FLAG_turbo_nci_as_midtier && ActiveTierIsIgnition())) {
return CodeKind::NATIVE_CONTEXT_INDEPENDENT;
} else if (V8_UNLIKELY(FLAG_turboprop_as_midtier &&
ActiveTierIsTurboprop())) {
} else if (V8_UNLIKELY(FLAG_turboprop) && ActiveTierIsMidtierTurboprop()) {
return CodeKind::TURBOFAN;
} else if (V8_UNLIKELY(FLAG_turboprop) && ActiveTierIsIgnition()) {
} else if (V8_UNLIKELY(FLAG_turboprop)) {
DCHECK(ActiveTierIsIgnition());
return CodeKind::TURBOPROP;
}
return CodeKind::TURBOFAN;
......
......@@ -116,7 +116,8 @@ class JSFunction : public JSFunctionOrBoundFunction {
V8_EXPORT_PRIVATE bool ActiveTierIsIgnition() const;
bool ActiveTierIsTurbofan() const;
bool ActiveTierIsNCI() const;
bool ActiveTierIsTurboprop() const;
bool ActiveTierIsMidtierTurboprop() const;
bool ActiveTierIsToptierTurboprop() const;
CodeKind NextTier() const;
......
......@@ -1182,12 +1182,12 @@ UNINITIALIZED_TEST(BuiltinsNotLoggedAsLazyCompile) {
i::SNPrintF(buffer, ",0x%" V8PRIxPTR ",%d,BooleanConstructor",
builtin->InstructionStart(), builtin->InstructionSize());
CHECK(logger.ContainsLine(
{"code-creation,Builtin,3,", std::string(buffer.begin())}));
{"code-creation,Builtin,2,", std::string(buffer.begin())}));
i::SNPrintF(buffer, ",0x%" V8PRIxPTR ",%d,", builtin->InstructionStart(),
builtin->InstructionSize());
CHECK(!logger.ContainsLine(
{"code-creation,LazyCompile,3,", std::string(buffer.begin())}));
{"code-creation,LazyCompile,2,", std::string(buffer.begin())}));
}
isolate->Dispose();
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment