Commit 60e127d4 authored by Mythri's avatar Mythri Committed by Commit Bot

Remove type feedback check from tiering up decisions for Ignition.

This cl: https://chromium-review.googlesource.com/c/538614/ changes the
number of ticks required for tiering up based on the size of function. An
earlier cl: https://chromium-review.googlesource.com/c/529165/ also resets
ticks when type feedback changes. So, it is reasonable to assume that a
function which has necessary number of ticks has the required type feedback
for optimizing. Hence, removing the check for type feedback from the tierinup
decision.

Bug: 
Change-Id: Ia350ad4dfba5f93f1a17bdc0c309bf6b41b0c1c9
Reviewed-on: https://chromium-review.googlesource.com/647851Reviewed-by: 's avatarRoss McIlroy <rmcilroy@chromium.org>
Reviewed-by: 's avatarLeszek Swirski <leszeks@chromium.org>
Commit-Queue: Mythri Alle <mythria@chromium.org>
Cr-Commit-Position: refs/heads/master@{#47816}
parent b8cdefb1
...@@ -25,14 +25,9 @@ static const int kProfilerTicksBeforeOptimization = 2; ...@@ -25,14 +25,9 @@ static const int kProfilerTicksBeforeOptimization = 2;
// but the function is hot and has been seen on the stack this number of times, // but the function is hot and has been seen on the stack this number of times,
// then we try to reenable optimization for this function. // then we try to reenable optimization for this function.
static const int kProfilerTicksBeforeReenablingOptimization = 250; static const int kProfilerTicksBeforeReenablingOptimization = 250;
// If a function does not have enough type info (according to
// FLAG_type_info_threshold), but has seen a huge number of ticks,
// optimize it as it is.
static const int kTicksWhenNotEnoughTypeInfo = 100;
// We only have one byte to store the number of ticks. // We only have one byte to store the number of ticks.
STATIC_ASSERT(kProfilerTicksBeforeOptimization < 256); STATIC_ASSERT(kProfilerTicksBeforeOptimization < 256);
STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256); STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256);
STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256);
// The number of ticks required for optimizing a function increases with // The number of ticks required for optimizing a function increases with
// the size of the bytecode. This is in addition to the // the size of the bytecode. This is in addition to the
...@@ -60,7 +55,6 @@ static const int kMaxSizeOpt = 60 * KB; ...@@ -60,7 +55,6 @@ static const int kMaxSizeOpt = 60 * KB;
#define OPTIMIZATION_REASON_LIST(V) \ #define OPTIMIZATION_REASON_LIST(V) \
V(DoNotOptimize, "do not optimize") \ V(DoNotOptimize, "do not optimize") \
V(HotAndStable, "hot and stable") \ V(HotAndStable, "hot and stable") \
V(HotWithoutMuchTypeInfo, "not much type info but very hot") \
V(SmallFunction, "small function") V(SmallFunction, "small function")
enum class OptimizationReason : uint8_t { enum class OptimizationReason : uint8_t {
...@@ -247,42 +241,11 @@ OptimizationReason RuntimeProfiler::ShouldOptimize(JSFunction* function, ...@@ -247,42 +241,11 @@ OptimizationReason RuntimeProfiler::ShouldOptimize(JSFunction* function,
kProfilerTicksBeforeOptimization + kProfilerTicksBeforeOptimization +
(shared->bytecode_array()->Size() / kCodeSizeAllowancePerTick); (shared->bytecode_array()->Size() / kCodeSizeAllowancePerTick);
if (ticks >= ticks_for_optimization) { if (ticks >= ticks_for_optimization) {
int typeinfo, generic, total, type_percentage, generic_percentage; return OptimizationReason::kHotAndStable;
GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
&generic_percentage);
if (type_percentage >= FLAG_type_info_threshold) {
// If this particular function hasn't had any ICs patched for enough
// ticks, optimize it now.
return OptimizationReason::kHotAndStable;
} else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
return OptimizationReason::kHotWithoutMuchTypeInfo;
} else {
if (FLAG_trace_opt_verbose) {
PrintF("[not yet optimizing ");
function->PrintName();
PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total,
type_percentage);
}
return OptimizationReason::kDoNotOptimize;
}
} else if (!any_ic_changed_ && } else if (!any_ic_changed_ &&
shared->bytecode_array()->Size() < kMaxSizeEarlyOpt) { shared->bytecode_array()->Size() < kMaxSizeEarlyOpt) {
// If no IC was patched since the last tick and this function is very // If no IC was patched since the last tick and this function is very
// small, optimistically optimize it now. // small, optimistically optimize it now.
int typeinfo, generic, total, type_percentage, generic_percentage;
GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
&generic_percentage);
if (type_percentage < FLAG_type_info_threshold) {
if (FLAG_trace_opt_verbose) {
PrintF("[not yet optimizing ");
function->PrintName();
PrintF(
", not enough type info for small function optimization: %d/%d "
"(%d%%)]\n",
typeinfo, total, type_percentage);
}
return OptimizationReason::kDoNotOptimize;
}
return OptimizationReason::kSmallFunction; return OptimizationReason::kSmallFunction;
} else if (FLAG_trace_opt_verbose) { } else if (FLAG_trace_opt_verbose) {
PrintF("[not yet optimizing "); PrintF("[not yet optimizing ");
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment