Commit 1096e031 authored by Jakob Gruber's avatar Jakob Gruber Committed by Commit Bot

[nci] Implement tier-up, part 2 (marking)

This is part two of the implementation (part 1: heuristics in NCI code
to call the runtime profiler, part 2: heuristics in the runtime
profiler to mark the function for optimization, part 3: the final
part, recognizing and acting upon the marked function).

The runtime profiler heuristics added here remain very similar to what
we have for ignition, except that we now inspect optimized frames with
NCI code, and that we (currently) do not OSR from NCI to TF.

Bug: v8:8888
Change-Id: Ie88b0a0dcee16334cea585c771a4b505035f2291
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2358748
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarMythri Alle <mythria@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69484}
parent 8f87753f
......@@ -74,24 +74,52 @@ std::ostream& operator<<(std::ostream& os, OptimizationReason reason) {
return os << OptimizationReasonToString(reason);
}
RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
: isolate_(isolate), any_ic_changed_(false) {}
namespace {
void TraceInOptimizationQueue(JSFunction function) {
if (FLAG_trace_opt_verbose) {
PrintF("[function ");
function.PrintName();
PrintF(" has been marked manually for optimization]\n");
}
}
void TraceHeuristicOptimizationDisallowed(JSFunction function) {
if (FLAG_trace_opt_verbose) {
PrintF("[function ");
function.PrintName();
PrintF(" has been marked manually for optimization]\n");
}
}
static void TraceRecompile(JSFunction function, const char* reason,
const char* type, Isolate* isolate) {
void TraceRecompile(JSFunction function, OptimizationReason reason,
Isolate* isolate) {
if (FLAG_trace_opt) {
CodeTracer::Scope scope(isolate->GetCodeTracer());
PrintF(scope.file(), "[marking ");
function.ShortPrint(scope.file());
PrintF(scope.file(), " for %s recompilation, reason: %s", type, reason);
PrintF(scope.file(), " for optimized recompilation, reason: %s",
OptimizationReasonToString(reason));
PrintF(scope.file(), "]\n");
}
}
void TraceNCIRecompile(JSFunction function, OptimizationReason reason) {
if (FLAG_trace_turbo_nci) {
StdoutStream os;
os << "NCI tierup mark: " << Brief(function) << ", "
<< OptimizationReasonToString(reason) << std::endl;
}
}
} // namespace
RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
: isolate_(isolate), any_ic_changed_(false) {}
void RuntimeProfiler::Optimize(JSFunction function, OptimizationReason reason) {
DCHECK_NE(reason, OptimizationReason::kDoNotOptimize);
TraceRecompile(function, OptimizationReasonToString(reason), "optimized",
isolate_);
TraceRecompile(function, reason, isolate_);
function.MarkForOptimization(ConcurrencyMode::kConcurrent);
}
......@@ -122,28 +150,21 @@ void RuntimeProfiler::AttemptOnStackReplacement(InterpretedFrame* frame,
Min(level + loop_nesting_levels, AbstractCode::kMaxLoopNestingMarker));
}
void RuntimeProfiler::MaybeOptimize(JSFunction function,
InterpretedFrame* frame) {
void RuntimeProfiler::MaybeOptimizeInterpretedFrame(JSFunction function,
InterpretedFrame* frame) {
if (function.IsInOptimizationQueue()) {
if (FLAG_trace_opt_verbose) {
PrintF("[function ");
function.PrintName();
PrintF(" is already in optimization queue]\n");
}
TraceInOptimizationQueue(function);
return;
}
if (FLAG_testing_d8_test_runner) {
if (!PendingOptimizationTable::IsHeuristicOptimizationAllowed(isolate_,
function)) {
if (FLAG_trace_opt_verbose) {
PrintF("[function ");
function.PrintName();
PrintF(" has been marked manually for optimization]\n");
}
return;
}
if (FLAG_testing_d8_test_runner &&
!PendingOptimizationTable::IsHeuristicOptimizationAllowed(isolate_,
function)) {
TraceHeuristicOptimizationDisallowed(function);
return;
}
if (function.shared().optimization_disabled()) return;
if (FLAG_always_osr) {
AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
// Fall through and do a normal optimized compile as well.
......@@ -151,12 +172,38 @@ void RuntimeProfiler::MaybeOptimize(JSFunction function,
return;
}
OptimizationReason reason =
ShouldOptimize(function, function.shared().GetBytecodeArray());
if (reason != OptimizationReason::kDoNotOptimize) {
Optimize(function, reason);
}
}
void RuntimeProfiler::MaybeOptimizeNCIFrame(JSFunction function) {
DCHECK_EQ(function.code().kind(), CodeKind::NATIVE_CONTEXT_INDEPENDENT);
if (function.IsInOptimizationQueue()) {
TraceInOptimizationQueue(function);
return;
}
if (FLAG_testing_d8_test_runner &&
!PendingOptimizationTable::IsHeuristicOptimizationAllowed(isolate_,
function)) {
TraceHeuristicOptimizationDisallowed(function);
return;
}
if (function.shared().optimization_disabled()) return;
// Note: NCI code does not OSR except when FLAG_turbo_nci_as_highest_tier
// is enabled, in which case we do not tier up from NCI code.
OptimizationReason reason =
ShouldOptimize(function, function.shared().GetBytecodeArray());
if (reason != OptimizationReason::kDoNotOptimize) {
TraceNCIRecompile(function, reason);
Optimize(function, reason);
}
}
......@@ -190,7 +237,7 @@ bool RuntimeProfiler::MaybeOSR(JSFunction function, InterpretedFrame* frame) {
OptimizationReason RuntimeProfiler::ShouldOptimize(JSFunction function,
BytecodeArray bytecode) {
if (function.HasAvailableOptimizedCode()) {
if (function.ActiveTierIsTurbofan()) {
return OptimizationReason::kDoNotOptimize;
}
int ticks = function.feedback_vector().profiler_ticks();
......@@ -219,22 +266,24 @@ OptimizationReason RuntimeProfiler::ShouldOptimize(JSFunction function,
return OptimizationReason::kDoNotOptimize;
}
void RuntimeProfiler::MarkCandidatesForOptimizationFromBytecode() {
HandleScope scope(isolate_);
if (!isolate_->use_optimizer()) return;
DisallowHeapAllocation no_gc;
RuntimeProfiler::MarkCandidatesForOptimizationScope::
MarkCandidatesForOptimizationScope(RuntimeProfiler* profiler)
: handle_scope_(profiler->isolate_), profiler_(profiler) {
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.MarkCandidatesForOptimization");
}
RuntimeProfiler::MarkCandidatesForOptimizationScope::
~MarkCandidatesForOptimizationScope() {
profiler_->any_ic_changed_ = false;
}
// Run through the JavaScript frames and collect them. If we already
// have a sample of the function, we mark it for optimizations
// (eagerly or lazily).
int frame_count = 0;
int frame_count_limit = FLAG_frame_count;
for (JavaScriptFrameIterator it(isolate_);
frame_count++ < frame_count_limit && !it.done(); it.Advance()) {
void RuntimeProfiler::MarkCandidatesForOptimizationFromBytecode() {
if (!isolate_->use_optimizer()) return;
MarkCandidatesForOptimizationScope scope(this);
int i = 0;
for (JavaScriptFrameIterator it(isolate_); i < FLAG_frame_count && !it.done();
i++, it.Advance()) {
JavaScriptFrame* frame = it.frame();
if (!frame->is_interpreted()) continue;
......@@ -244,24 +293,35 @@ void RuntimeProfiler::MarkCandidatesForOptimizationFromBytecode() {
if (!function.has_feedback_vector()) continue;
MaybeOptimize(function, InterpretedFrame::cast(frame));
MaybeOptimizeInterpretedFrame(function, InterpretedFrame::cast(frame));
// TODO(leszeks): Move this increment to before the maybe optimize checks,
// and update the tests to assume the increment has already happened.
int ticks = function.feedback_vector().profiler_ticks();
if (ticks < Smi::kMaxValue) {
function.feedback_vector().set_profiler_ticks(ticks + 1);
}
function.feedback_vector().SaturatingIncrementProfilerTicks();
}
any_ic_changed_ = false;
}
void RuntimeProfiler::MarkCandidatesForOptimizationFromCode() {
if (FLAG_trace_turbo_nci) {
StdoutStream os;
os << "NCI tier-up: Marking candidates for optimization" << std::endl;
if (!isolate_->use_optimizer()) return;
MarkCandidatesForOptimizationScope scope(this);
int i = 0;
for (JavaScriptFrameIterator it(isolate_); i < FLAG_frame_count && !it.done();
i++, it.Advance()) {
JavaScriptFrame* frame = it.frame();
if (!frame->is_optimized()) continue;
JSFunction function = frame->function();
if (function.code().kind() != CodeKind::NATIVE_CONTEXT_INDEPENDENT) {
continue;
}
DCHECK(function.shared().is_compiled());
DCHECK(function.has_feedback_vector());
function.feedback_vector().SaturatingIncrementProfilerTicks();
MaybeOptimizeNCIFrame(function);
}
// TODO(jgruber,v8:8888): Implement.
}
} // namespace internal
......
......@@ -5,6 +5,8 @@
#ifndef V8_EXECUTION_RUNTIME_PROFILER_H_
#define V8_EXECUTION_RUNTIME_PROFILER_H_
#include "src/common/assert-scope.h"
#include "src/handles/handles.h"
#include "src/utils/allocation.h"
namespace v8 {
......@@ -31,7 +33,11 @@ class RuntimeProfiler {
int nesting_levels = 1);
private:
void MaybeOptimize(JSFunction function, InterpretedFrame* frame);
// Make the decision whether to optimize the given function, and mark it for
// optimization if the decision was 'yes'.
void MaybeOptimizeNCIFrame(JSFunction function);
void MaybeOptimizeInterpretedFrame(JSFunction function,
InterpretedFrame* frame);
// Potentially attempts OSR from and returns whether no other
// optimization attempts should be made.
bool MaybeOSR(JSFunction function, InterpretedFrame* frame);
......@@ -40,6 +46,17 @@ class RuntimeProfiler {
void Optimize(JSFunction function, OptimizationReason reason);
void Baseline(JSFunction function, OptimizationReason reason);
class MarkCandidatesForOptimizationScope final {
public:
explicit MarkCandidatesForOptimizationScope(RuntimeProfiler* profiler);
~MarkCandidatesForOptimizationScope();
private:
HandleScope handle_scope_;
RuntimeProfiler* const profiler_;
DisallowHeapAllocation no_gc;
};
Isolate* isolate_;
bool any_ic_changed_;
};
......
......@@ -376,7 +376,8 @@ inline bool Code::is_interpreter_trampoline_builtin() const {
inline bool Code::checks_optimization_marker() const {
bool checks_marker =
(builtin_index() == Builtins::kCompileLazy ||
builtin_index() == Builtins::kInterpreterEntryTrampoline);
builtin_index() == Builtins::kInterpreterEntryTrampoline ||
CodeKindChecksOptimizationMarker(kind()));
return checks_marker ||
(CodeKindCanDeoptimize(kind()) && marked_for_deoptimization());
}
......
......@@ -72,6 +72,11 @@ inline constexpr bool CodeKindCanDeoptimize(CodeKind kind) {
return CodeKindIsOptimizedJSFunction(kind);
}
inline constexpr bool CodeKindChecksOptimizationMarker(CodeKind kind) {
return kind == CodeKind::INTERPRETED_FUNCTION ||
kind == CodeKind::NATIVE_CONTEXT_INDEPENDENT;
}
inline CodeKind CodeKindForTopTier() {
return FLAG_turbo_nci_as_highest_tier ? CodeKind::NATIVE_CONTEXT_INDEPENDENT
: CodeKind::OPTIMIZED_FUNCTION;
......
......@@ -374,6 +374,11 @@ void FeedbackVector::AddToVectorsForProfilingTools(
isolate->SetFeedbackVectorsForProfilingTools(*list);
}
void FeedbackVector::SaturatingIncrementProfilerTicks() {
int ticks = profiler_ticks();
if (ticks < Smi::kMaxValue) set_profiler_ticks(ticks + 1);
}
// static
void FeedbackVector::SetOptimizedCode(Handle<FeedbackVector> vector,
Handle<Code> code) {
......
......@@ -213,6 +213,9 @@ class FeedbackVector : public HeapObject {
// runtime profiler.
DECL_INT32_ACCESSORS(profiler_ticks)
// Increment profiler ticks, saturating at the maximal value.
void SaturatingIncrementProfilerTicks();
// Initialize the padding if necessary.
inline void clear_padding();
......
......@@ -132,17 +132,13 @@ bool JSFunction::ActiveTierIsIgnition() const {
bool JSFunction::ActiveTierIsTurbofan() const {
CodeKind highest_tier;
if (!HighestTierOf(GetAvailableCodeKinds(), &highest_tier)) return false;
bool result = highest_tier == CodeKind::OPTIMIZED_FUNCTION;
DCHECK_IMPLIES(result, !code().marked_for_deoptimization());
return result;
return highest_tier == CodeKind::OPTIMIZED_FUNCTION;
}
bool JSFunction::ActiveTierIsNCI() const {
CodeKind highest_tier;
if (!HighestTierOf(GetAvailableCodeKinds(), &highest_tier)) return false;
bool result = highest_tier == CodeKind::NATIVE_CONTEXT_INDEPENDENT;
DCHECK_IMPLIES(result, !code().marked_for_deoptimization());
return result;
return highest_tier == CodeKind::NATIVE_CONTEXT_INDEPENDENT;
}
bool JSFunction::HasOptimizationMarker() {
......@@ -237,7 +233,7 @@ void JSFunction::ClearOptimizedCodeSlot(const char* reason) {
void JSFunction::SetOptimizationMarker(OptimizationMarker marker) {
DCHECK(has_feedback_vector());
DCHECK(ChecksOptimizationMarker());
DCHECK(!HasAvailableOptimizedCode());
DCHECK(!ActiveTierIsTurbofan());
feedback_vector().SetOptimizationMarker(marker);
}
......@@ -458,9 +454,9 @@ void JSFunction::MarkForOptimization(ConcurrencyMode mode) {
mode = ConcurrencyMode::kNotConcurrent;
}
DCHECK(!is_compiled() || ActiveTierIsIgnition());
DCHECK(!is_compiled() || ActiveTierIsIgnition() || ActiveTierIsNCI());
DCHECK(!ActiveTierIsTurbofan());
DCHECK(shared().IsInterpreted());
DCHECK(!HasAvailableOptimizedCode());
DCHECK(shared().allows_lazy_compilation() ||
!shared().optimization_disabled());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment