Commit 3530998c authored by Ross McIlroy's avatar Ross McIlroy Committed by Commit Bot

Reland "Get BytecodeArray via current frame where possible."

This is a reland of 7350e7b2

Disabled LayoutTest that was causing issues and will rebaseline once this has rolled.

Original change's description:
> Get BytecodeArray via current frame where possible.
>
> With BytecodeArray flushing the SFI->BytecodeArray pointer will become pseudo weak.
> Instead of getting the bytecode array from the SFI, get it from the frame instead
> (which is a strong pointer). Note: This won't actually change behaviour since the
> fact that the bytecode array was on the frame will retain it strongly, however it
> makes the contract that the BytecodeArray must exist at these points more explicit.
>
> Updates code in runtime-profiler.cc, frames.cc and runtime-test.cc to do this.
>
> BUG=v8:8395
>
> Cq-Include-Trybots: luci.chromium.try:linux_chromium_headless_rel;master.tryserver.blink:linux_trusty_blink_rel
> Change-Id: Id7a3e6857abd0e89bf238e9b0b01de4461df54e1
> Reviewed-on: https://chromium-review.googlesource.com/c/1310193
> Commit-Queue: Ross McIlroy <rmcilroy@chromium.org>
> Reviewed-by: Mythri Alle <mythria@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#57198}

TBR=mythria@chromium.org

Bug: v8:8395
Change-Id: I63044138f876a1cdfb8bb71499732a257f30d29a
Cq-Include-Trybots: luci.chromium.try:linux_chromium_headless_rel;master.tryserver.blink:linux_trusty_blink_rel
Reviewed-on: https://chromium-review.googlesource.com/c/1314336Reviewed-by: 's avatarRoss McIlroy <rmcilroy@chromium.org>
Commit-Queue: Ross McIlroy <rmcilroy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#57219}
parent 70e6ffcc
......@@ -1655,7 +1655,7 @@ int InterpretedFrame::position() const {
int InterpretedFrame::LookupExceptionHandlerInTable(
int* context_register, HandlerTable::CatchPrediction* prediction) {
HandlerTable table(function()->shared()->GetBytecodeArray());
HandlerTable table(GetBytecodeArray());
return table.LookupRange(GetBytecodeOffset(), context_register, prediction);
}
......@@ -1723,8 +1723,7 @@ void InterpretedFrame::WriteInterpreterRegister(int register_index,
void InterpretedFrame::Summarize(std::vector<FrameSummary>* functions) const {
DCHECK(functions->empty());
AbstractCode* abstract_code =
AbstractCode::cast(function()->shared()->GetBytecodeArray());
AbstractCode* abstract_code = AbstractCode::cast(GetBytecodeArray());
FrameSummary::JavaScriptFrameSummary summary(
isolate(), receiver(), function(), abstract_code, GetBytecodeOffset(),
IsConstructor());
......
......@@ -112,7 +112,7 @@ void RuntimeProfiler::Optimize(JSFunction* function,
function->MarkForOptimization(ConcurrencyMode::kConcurrent);
}
void RuntimeProfiler::AttemptOnStackReplacement(JavaScriptFrame* frame,
void RuntimeProfiler::AttemptOnStackReplacement(InterpretedFrame* frame,
int loop_nesting_levels) {
JSFunction* function = frame->function();
SharedFunctionInfo* shared = function->shared();
......@@ -133,14 +133,13 @@ void RuntimeProfiler::AttemptOnStackReplacement(JavaScriptFrame* frame,
}
DCHECK_EQ(StackFrame::INTERPRETED, frame->type());
DCHECK(shared->HasBytecodeArray());
int level = shared->GetBytecodeArray()->osr_loop_nesting_level();
shared->GetBytecodeArray()->set_osr_loop_nesting_level(
int level = frame->GetBytecodeArray()->osr_loop_nesting_level();
frame->GetBytecodeArray()->set_osr_loop_nesting_level(
Min(level + loop_nesting_levels, AbstractCode::kMaxLoopNestingMarker));
}
void RuntimeProfiler::MaybeOptimize(JSFunction* function,
JavaScriptFrame* frame) {
InterpretedFrame* frame) {
if (function->IsInOptimizationQueue()) {
if (FLAG_trace_opt_verbose) {
PrintF("[function ");
......@@ -159,32 +158,28 @@ void RuntimeProfiler::MaybeOptimize(JSFunction* function,
if (function->shared()->optimization_disabled()) return;
if (frame->is_optimized()) return;
OptimizationReason reason = ShouldOptimize(function, frame);
OptimizationReason reason =
ShouldOptimize(function, function->shared()->GetBytecodeArray());
if (reason != OptimizationReason::kDoNotOptimize) {
Optimize(function, reason);
}
}
bool RuntimeProfiler::MaybeOSR(JSFunction* function, JavaScriptFrame* frame) {
SharedFunctionInfo* shared = function->shared();
bool RuntimeProfiler::MaybeOSR(JSFunction* function, InterpretedFrame* frame) {
int ticks = function->feedback_vector()->profiler_ticks();
// TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
// than kMaxToplevelSourceSize.
if (!frame->is_optimized() &&
(function->IsMarkedForOptimization() ||
function->IsMarkedForConcurrentOptimization() ||
function->HasOptimizedCode())) {
if (function->IsMarkedForOptimization() ||
function->IsMarkedForConcurrentOptimization() ||
function->HasOptimizedCode()) {
// Attempt OSR if we are still running interpreted code even though the
// the function has long been marked or even already been optimized.
int64_t allowance =
kOSRBytecodeSizeAllowanceBase +
static_cast<int64_t>(ticks) * kOSRBytecodeSizeAllowancePerTick;
if (shared->GetBytecodeArray()->length() <= allowance) {
if (function->shared()->GetBytecodeArray()->length() <= allowance) {
AttemptOnStackReplacement(frame);
}
return true;
......@@ -193,21 +188,19 @@ bool RuntimeProfiler::MaybeOSR(JSFunction* function, JavaScriptFrame* frame) {
}
OptimizationReason RuntimeProfiler::ShouldOptimize(JSFunction* function,
JavaScriptFrame* frame) {
SharedFunctionInfo* shared = function->shared();
BytecodeArray* bytecode) {
int ticks = function->feedback_vector()->profiler_ticks();
if (shared->GetBytecodeArray()->length() > kMaxBytecodeSizeForOpt) {
if (bytecode->length() > kMaxBytecodeSizeForOpt) {
return OptimizationReason::kDoNotOptimize;
}
int ticks_for_optimization =
kProfilerTicksBeforeOptimization +
(shared->GetBytecodeArray()->length() / kBytecodeSizeAllowancePerTick);
(bytecode->length() / kBytecodeSizeAllowancePerTick);
if (ticks >= ticks_for_optimization) {
return OptimizationReason::kHotAndStable;
} else if (!any_ic_changed_ && shared->GetBytecodeArray()->length() <
kMaxBytecodeSizeForEarlyOpt) {
} else if (!any_ic_changed_ &&
bytecode->length() < kMaxBytecodeSizeForEarlyOpt) {
// If no IC was patched since the last tick and this function is very
// small, optimistically optimize it now.
return OptimizationReason::kSmallFunction;
......@@ -220,7 +213,7 @@ OptimizationReason RuntimeProfiler::ShouldOptimize(JSFunction* function,
PrintF("ICs changed]\n");
} else {
PrintF(" too large for small function optimization: %d/%d]\n",
shared->GetBytecodeArray()->length(), kMaxBytecodeSizeForEarlyOpt);
bytecode->length(), kMaxBytecodeSizeForEarlyOpt);
}
}
return OptimizationReason::kDoNotOptimize;
......@@ -242,13 +235,13 @@ void RuntimeProfiler::MarkCandidatesForOptimization() {
frame_count++ < frame_count_limit && !it.done();
it.Advance()) {
JavaScriptFrame* frame = it.frame();
if (frame->is_optimized()) continue;
if (!frame->is_interpreted()) continue;
JSFunction* function = frame->function();
DCHECK(function->shared()->is_compiled());
if (!function->shared()->IsInterpreted()) continue;
MaybeOptimize(function, frame);
MaybeOptimize(function, InterpretedFrame::cast(frame));
// TODO(leszeks): Move this increment to before the maybe optimize checks,
// and update the tests to assume the increment has already happened.
......
......@@ -10,8 +10,9 @@
namespace v8 {
namespace internal {
class BytecodeArray;
class Isolate;
class JavaScriptFrame;
class InterpretedFrame;
class JSFunction;
enum class OptimizationReason : uint8_t;
......@@ -23,16 +24,16 @@ class RuntimeProfiler {
void NotifyICChanged() { any_ic_changed_ = true; }
void AttemptOnStackReplacement(JavaScriptFrame* frame,
void AttemptOnStackReplacement(InterpretedFrame* frame,
int nesting_levels = 1);
private:
void MaybeOptimize(JSFunction* function, JavaScriptFrame* frame);
void MaybeOptimize(JSFunction* function, InterpretedFrame* frame);
// Potentially attempts OSR from and returns whether no other
// optimization attempts should be made.
bool MaybeOSR(JSFunction* function, JavaScriptFrame* frame);
bool MaybeOSR(JSFunction* function, InterpretedFrame* frame);
OptimizationReason ShouldOptimize(JSFunction* function,
JavaScriptFrame* frame);
BytecodeArray* bytecode_array);
void Optimize(JSFunction* function, OptimizationReason reason);
void Baseline(JSFunction* function, OptimizationReason reason);
......
......@@ -299,7 +299,8 @@ RUNTIME_FUNCTION(Runtime_OptimizeOsr) {
// Make the profiler arm all back edges in unoptimized code.
if (it.frame()->type() == StackFrame::INTERPRETED) {
isolate->runtime_profiler()->AttemptOnStackReplacement(
it.frame(), AbstractCode::kMaxLoopNestingMarker);
InterpretedFrame::cast(it.frame()),
AbstractCode::kMaxLoopNestingMarker);
}
return ReadOnlyRoots(isolate).undefined_value();
......
......@@ -26,7 +26,7 @@ return x + b;
---
Break location after LiveEdit:
var x = a;
var x = #3;
#var x = 3;
debugger;
stackChanged: true
......@@ -62,12 +62,12 @@ var x = 1;
---
Break location after LiveEdit:
function boo() {
var x = #3;
#var x = 3;
debugger;
stackChanged: true
Protocol.Debugger.stepInto
var x = 3;
#debugger;
var x = 1;
function boo() {
var x = #3;
debugger;
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment