Commit 961a2c88 authored by Ross McIlroy's avatar Ross McIlroy Committed by Commit Bot

[fullcodegen] Remove ability to compile with Full-Codegen.

Removes the pathways to use Full-Codegen from compiler.cc. Also removes all
paths to optimize using AstGraphBuilder, which relies on Full-codegen.
Cleans up ast-numbering, runtime-profiler and some runtime functions to
remove now dead code.

This makes Full-codegen and AstGraphBuilder dead, but doesn't remove their
code yet, that will be done in a followup CL to keep things reviewable.

BUG=v8:6409

Change-Id: I3901ff17d960b2bb084cef0cb39fa16cb8419881
Reviewed-on: https://chromium-review.googlesource.com/583328
Commit-Queue: Ross McIlroy <rmcilroy@chromium.org>
Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#47277}
parent 45b4522e
......@@ -24,9 +24,7 @@ class AstNumberingVisitor final : public AstVisitor<AstNumberingVisitor> {
properties_(zone),
language_mode_(SLOPPY),
slot_cache_(zone),
disable_fullcodegen_reason_(kNoReason),
dont_optimize_reason_(kNoReason),
dont_self_optimize_(false),
collect_type_profile_(collect_type_profile) {
InitializeAstVisitor(stack_limit);
}
......@@ -57,13 +55,8 @@ class AstNumberingVisitor final : public AstVisitor<AstNumberingVisitor> {
return tmp;
}
void DisableSelfOptimization() { dont_self_optimize_ = true; }
void DisableOptimization(BailoutReason reason) {
dont_optimize_reason_ = reason;
DisableSelfOptimization();
}
void DisableFullCodegen(BailoutReason reason) {
disable_fullcodegen_reason_ = reason;
}
template <typename Node>
......@@ -98,9 +91,7 @@ class AstNumberingVisitor final : public AstVisitor<AstNumberingVisitor> {
FunctionKind function_kind_;
// The slot cache allows us to reuse certain feedback slots.
FeedbackSlotCache slot_cache_;
BailoutReason disable_fullcodegen_reason_;
BailoutReason dont_optimize_reason_;
bool dont_self_optimize_;
bool collect_type_profile_;
DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
......@@ -132,7 +123,6 @@ void AstNumberingVisitor::VisitBreakStatement(BreakStatement* node) {
void AstNumberingVisitor::VisitDebuggerStatement(DebuggerStatement* node) {
DisableFullCodegen(kDebuggerStatement);
}
......@@ -159,16 +149,6 @@ void AstNumberingVisitor::VisitRegExpLiteral(RegExpLiteral* node) {
void AstNumberingVisitor::VisitVariableProxyReference(VariableProxy* node) {
switch (node->var()->location()) {
case VariableLocation::LOOKUP:
DisableFullCodegen(kReferenceToAVariableWhichRequiresDynamicLookup);
break;
case VariableLocation::MODULE:
DisableFullCodegen(kReferenceToModuleVariable);
break;
default:
break;
}
}
void AstNumberingVisitor::VisitVariableProxy(VariableProxy* node,
......@@ -188,14 +168,12 @@ void AstNumberingVisitor::VisitThisFunction(ThisFunction* node) {
void AstNumberingVisitor::VisitSuperPropertyReference(
SuperPropertyReference* node) {
DisableFullCodegen(kSuperReference);
Visit(node->this_var());
Visit(node->home_object());
}
void AstNumberingVisitor::VisitSuperCallReference(SuperCallReference* node) {
DisableFullCodegen(kSuperReference);
Visit(node->this_var());
Visit(node->new_target_var());
Visit(node->this_function_var());
......@@ -209,8 +187,6 @@ void AstNumberingVisitor::VisitExpressionStatement(ExpressionStatement* node) {
void AstNumberingVisitor::VisitReturnStatement(ReturnStatement* node) {
Visit(node->expression());
DCHECK(!node->is_async_return() || disable_fullcodegen_reason_ != kNoReason);
}
void AstNumberingVisitor::VisitSuspend(Suspend* node) {
......@@ -278,14 +254,12 @@ void AstNumberingVisitor::VisitCallRuntime(CallRuntime* node) {
void AstNumberingVisitor::VisitWithStatement(WithStatement* node) {
DisableFullCodegen(kWithStatement);
Visit(node->expression());
Visit(node->statement());
}
void AstNumberingVisitor::VisitDoWhileStatement(DoWhileStatement* node) {
DisableSelfOptimization();
node->set_osr_id(ReserveId());
node->set_first_suspend_id(suspend_count_);
Visit(node->body());
......@@ -295,7 +269,6 @@ void AstNumberingVisitor::VisitDoWhileStatement(DoWhileStatement* node) {
void AstNumberingVisitor::VisitWhileStatement(WhileStatement* node) {
DisableSelfOptimization();
node->set_osr_id(ReserveId());
node->set_first_suspend_id(suspend_count_);
Visit(node->cond());
......@@ -306,14 +279,12 @@ void AstNumberingVisitor::VisitWhileStatement(WhileStatement* node) {
void AstNumberingVisitor::VisitTryCatchStatement(TryCatchStatement* node) {
DCHECK(node->scope() == nullptr || !node->scope()->HasBeenRemoved());
DisableFullCodegen(kTryCatchStatement);
Visit(node->try_block());
Visit(node->catch_block());
}
void AstNumberingVisitor::VisitTryFinallyStatement(TryFinallyStatement* node) {
DisableFullCodegen(kTryFinallyStatement);
Visit(node->try_block());
Visit(node->finally_block());
}
......@@ -363,8 +334,6 @@ void AstNumberingVisitor::VisitCompareOperation(CompareOperation* node) {
}
void AstNumberingVisitor::VisitSpread(Spread* node) {
// We can only get here from spread calls currently.
DisableFullCodegen(kSpreadCall);
Visit(node->expression());
}
......@@ -373,19 +342,16 @@ void AstNumberingVisitor::VisitEmptyParentheses(EmptyParentheses* node) {
}
void AstNumberingVisitor::VisitGetIterator(GetIterator* node) {
DisableFullCodegen(kGetIterator);
Visit(node->iterable());
ReserveFeedbackSlots(node);
}
void AstNumberingVisitor::VisitImportCallExpression(
ImportCallExpression* node) {
DisableFullCodegen(kDynamicImport);
Visit(node->argument());
}
void AstNumberingVisitor::VisitForInStatement(ForInStatement* node) {
DisableSelfOptimization();
node->set_osr_id(ReserveId());
Visit(node->enumerable()); // Not part of loop.
node->set_first_suspend_id(suspend_count_);
......@@ -397,7 +363,6 @@ void AstNumberingVisitor::VisitForInStatement(ForInStatement* node) {
void AstNumberingVisitor::VisitForOfStatement(ForOfStatement* node) {
DisableFullCodegen(kForOfStatement);
node->set_osr_id(ReserveId());
Visit(node->assign_iterator()); // Not part of loop.
node->set_first_suspend_id(suspend_count_);
......@@ -442,7 +407,6 @@ void AstNumberingVisitor::VisitCaseClause(CaseClause* node) {
void AstNumberingVisitor::VisitForStatement(ForStatement* node) {
DisableSelfOptimization();
node->set_osr_id(ReserveId());
if (node->init() != NULL) Visit(node->init()); // Not part of loop.
node->set_first_suspend_id(suspend_count_);
......@@ -454,7 +418,6 @@ void AstNumberingVisitor::VisitForStatement(ForStatement* node) {
void AstNumberingVisitor::VisitClassLiteral(ClassLiteral* node) {
DisableFullCodegen(kClassLiteral);
LanguageModeScope language_mode_scope(this, STRICT);
if (node->extends()) Visit(node->extends());
if (node->constructor()) Visit(node->constructor());
......@@ -481,7 +444,6 @@ void AstNumberingVisitor::VisitObjectLiteral(ObjectLiteral* node) {
}
void AstNumberingVisitor::VisitLiteralProperty(LiteralProperty* node) {
if (node->is_computed_name()) DisableFullCodegen(kComputedPropertyName);
Visit(node->key());
Visit(node->value());
}
......@@ -496,9 +458,6 @@ void AstNumberingVisitor::VisitArrayLiteral(ArrayLiteral* node) {
void AstNumberingVisitor::VisitCall(Call* node) {
if (node->is_possibly_eval()) {
DisableFullCodegen(kFunctionCallsEval);
}
ReserveFeedbackSlots(node);
Visit(node->expression());
VisitArguments(node->arguments());
......@@ -559,29 +518,6 @@ void AstNumberingVisitor::VisitRewritableExpression(
bool AstNumberingVisitor::Renumber(FunctionLiteral* node) {
DeclarationScope* scope = node->scope();
DCHECK(!scope->HasBeenRemoved());
if (scope->new_target_var() != nullptr ||
scope->this_function_var() != nullptr) {
DisableFullCodegen(kSuperReference);
}
if (scope->arguments() != nullptr &&
!scope->arguments()->IsStackAllocated()) {
DisableFullCodegen(kContextAllocatedArguments);
}
if (scope->rest_parameter() != nullptr) {
DisableFullCodegen(kRestParameter);
}
if (IsResumableFunction(node->kind())) {
DisableFullCodegen(kGenerator);
}
if (IsClassConstructor(node->kind())) {
DisableFullCodegen(kClassConstructorFunction);
}
function_kind_ = node->kind();
LanguageModeScope language_mode_scope(this, node->language_mode());
......@@ -596,25 +532,6 @@ bool AstNumberingVisitor::Renumber(FunctionLiteral* node) {
node->set_dont_optimize_reason(dont_optimize_reason());
node->set_suspend_count(suspend_count_);
if (dont_self_optimize_) {
node->set_dont_self_optimize();
}
if (disable_fullcodegen_reason_ != kNoReason) {
node->set_must_use_ignition();
if (FLAG_trace_opt && scope->asm_function()) {
// TODO(leszeks): This is a quick'n'dirty fix to allow the debug name of
// the function to be accessed in the below print. This DCHECK will fail
// if we move ast numbering off the main thread, but that won't be before
// we remove FCG, in which case this entire check isn't necessary anyway.
AllowHandleDereference allow_deref;
DCHECK(!node->debug_name().is_null());
PrintF("[enforcing Ignition for %s because: %s\n",
node->debug_name()->ToCString().get(),
GetBailoutReason(disable_fullcodegen_reason_));
}
}
return !HasStackOverflow();
}
......
......@@ -2512,16 +2512,6 @@ class FunctionLiteral final : public Expression {
return ast_properties_.get_spec();
}
bool must_use_ignition() { return MustUseIgnitionField::decode(bit_field_); }
void set_must_use_ignition() {
bit_field_ = MustUseIgnitionField::update(bit_field_, true);
}
bool dont_self_optimize() { return DontSelfOptimize::decode(bit_field_); }
void set_dont_self_optimize() {
bit_field_ = DontSelfOptimize::update(bit_field_, true);
}
bool dont_optimize() { return dont_optimize_reason() != kNoReason; }
BailoutReason dont_optimize_reason() {
return DontOptimizeReasonField::decode(bit_field_);
......@@ -2593,13 +2583,9 @@ class FunctionLiteral final : public Expression {
class HasDuplicateParameters : public BitField<bool, Pretenure::kNext, 1> {};
class ShouldNotBeUsedOnceHintField
: public BitField<bool, HasDuplicateParameters::kNext, 1> {};
class MustUseIgnitionField
: public BitField<bool, ShouldNotBeUsedOnceHintField::kNext, 1> {};
// TODO(6409): Remove when Full-Codegen dies.
class DontSelfOptimize
: public BitField<bool, MustUseIgnitionField::kNext, 1> {};
class DontOptimizeReasonField
: public BitField<BailoutReason, DontSelfOptimize::kNext, 8> {};
: public BitField<BailoutReason, ShouldNotBeUsedOnceHintField::kNext, 8> {
};
int expected_property_count_;
int parameter_count_;
......
......@@ -126,11 +126,7 @@ bool CompilationInfo::is_this_defined() const { return !IsStub(); }
// profiler, so they trigger their own optimization when they're called
// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
// TODO(6409) Remove when Full-Codegen dies.
bool CompilationInfo::ShouldSelfOptimize() {
return FLAG_opt && !literal()->dont_self_optimize() &&
!literal()->dont_optimize() &&
literal()->scope()->AllowsLazyCompilation();
}
bool CompilationInfo::ShouldSelfOptimize() { return false; }
void CompilationInfo::set_deferred_handles(
std::shared_ptr<DeferredHandles> deferred_handles) {
......
......@@ -23,7 +23,6 @@
#include "src/debug/debug.h"
#include "src/debug/liveedit.h"
#include "src/frames-inl.h"
#include "src/full-codegen/full-codegen.h"
#include "src/globals.h"
#include "src/heap/heap.h"
#include "src/interpreter/interpreter.h"
......@@ -256,22 +255,6 @@ void EnsureFeedbackMetadata(CompilationInfo* compilation_info) {
compilation_info->literal()->feedback_vector_spec()));
}
bool ShouldUseFullCodegen(FunctionLiteral* literal) {
// Code which can't be supported by the old pipeline should use Ignition.
if (literal->must_use_ignition()) return false;
// Resumable functions are not supported by {FullCodeGenerator}, suspended
// activations stored as {JSGeneratorObject} on the heap always assume the
// underlying code to be based on the bytecode array.
DCHECK(!IsResumableFunction(literal->kind()));
// Use full-codegen for asm.js functions.
if (literal->scope()->asm_function()) return true;
// Disabled by default.
return false;
}
bool UseAsmWasm(FunctionLiteral* literal,
Handle<SharedFunctionInfo> shared_info, bool is_debug) {
// Check whether asm.js validation is enabled.
......@@ -291,22 +274,6 @@ bool UseAsmWasm(FunctionLiteral* literal,
return literal->scope()->asm_module();
}
CompilationJob* GetUnoptimizedCompilationJob(ParseInfo* parse_info,
FunctionLiteral* literal,
Isolate* isolate) {
// Function should have been parsed and analyzed before creating a compilation
// job.
DCHECK_NOT_NULL(literal);
DCHECK_NOT_NULL(parse_info->scope());
if (ShouldUseFullCodegen(literal)) {
return FullCodeGenerator::NewCompilationJob(parse_info, literal, isolate);
} else {
return interpreter::Interpreter::NewCompilationJob(parse_info, literal,
isolate);
}
}
void InstallUnoptimizedCode(CompilationInfo* compilation_info) {
Handle<SharedFunctionInfo> shared = compilation_info->shared_info();
DCHECK_EQ(compilation_info->shared_info()->language_mode(),
......@@ -433,7 +400,8 @@ std::unique_ptr<CompilationJob> PrepareAndExecuteUnoptimizedCompileJob(
// through to standard unoptimized compile.
}
std::unique_ptr<CompilationJob> job(
GetUnoptimizedCompilationJob(parse_info, literal, isolate));
interpreter::Interpreter::NewCompilationJob(parse_info, literal,
isolate));
if (job->PrepareJob() == CompilationJob::SUCCEEDED &&
job->ExecuteJob() == CompilationJob::SUCCEEDED) {
return job;
......@@ -441,15 +409,6 @@ std::unique_ptr<CompilationJob> PrepareAndExecuteUnoptimizedCompileJob(
return std::unique_ptr<CompilationJob>(); // Compilation failed, return null.
}
bool InnerFunctionShouldUseFullCodegen(
ThreadedList<ThreadedListZoneEntry<FunctionLiteral*>>* literals) {
for (auto it : *literals) {
FunctionLiteral* literal = it->value();
if (ShouldUseFullCodegen(literal)) return true;
}
return false;
}
Handle<SharedFunctionInfo> CompileUnoptimizedCode(
ParseInfo* parse_info, Handle<SharedFunctionInfo> shared_info,
Isolate* isolate) {
......@@ -461,23 +420,6 @@ Handle<SharedFunctionInfo> CompileUnoptimizedCode(
return Handle<SharedFunctionInfo>::null();
}
if (ShouldUseFullCodegen(parse_info->literal()) ||
InnerFunctionShouldUseFullCodegen(&inner_literals)) {
// If we might compile with full-codegen internalize now, otherwise
// we internalize when finalizing compilation.
parse_info->ast_value_factory()->Internalize(isolate);
// Full-codegen needs to access ScopeInfos when compiling, so allocate now.
DeclarationScope::AllocateScopeInfos(parse_info, isolate,
AnalyzeMode::kRegular);
if (parse_info->is_toplevel()) {
// Full-codegen needs to access SFI when compiling, so allocate the array
// now.
EnsureSharedFunctionInfosArrayOnScript(parse_info, isolate);
}
}
// Prepare and execute compilation of the outer-most function.
std::unique_ptr<CompilationJob> outer_job(
PrepareAndExecuteUnoptimizedCompileJob(parse_info, parse_info->literal(),
......@@ -638,21 +580,6 @@ void InsertCodeIntoOptimizedCodeCache(CompilationInfo* compilation_info) {
bool GetOptimizedCodeNow(CompilationJob* job) {
CompilationInfo* compilation_info = job->compilation_info();
Isolate* isolate = compilation_info->isolate();
// Parsing is not required when optimizing from existing bytecode.
if (!compilation_info->is_optimizing_from_bytecode()) {
ParseInfo* parse_info = job->parse_info();
if (!Compiler::ParseAndAnalyze(parse_info, compilation_info->shared_info(),
isolate)) {
return false;
}
compilation_info->set_literal(parse_info->literal());
parse_info->ast_value_factory()->Internalize(isolate);
DeclarationScope::AllocateScopeInfos(parse_info, isolate,
AnalyzeMode::kRegular);
EnsureFeedbackMetadata(compilation_info);
}
TimerEventScope<TimerEventRecompileSynchronous> timer(isolate);
RuntimeCallTimerScope runtimeTimer(isolate,
&RuntimeCallStats::RecompileSynchronous);
......@@ -702,19 +629,6 @@ bool GetOptimizedCodeLater(CompilationJob* job) {
return false;
}
// Parsing is not required when optimizing from existing bytecode.
if (!compilation_info->is_optimizing_from_bytecode()) {
ParseInfo* parse_info = job->parse_info();
if (!Compiler::ParseAndAnalyze(parse_info, compilation_info->shared_info(),
isolate)) {
return false;
}
compilation_info->set_literal(parse_info->literal());
DeclarationScope::AllocateScopeInfos(parse_info, isolate,
AnalyzeMode::kRegular);
EnsureFeedbackMetadata(compilation_info);
}
TimerEventScope<TimerEventRecompileSynchronous> timer(isolate);
RuntimeCallTimerScope runtimeTimer(isolate,
&RuntimeCallStats::RecompileSynchronous);
......@@ -739,11 +653,6 @@ MaybeHandle<Code> GetOptimizedCode(Handle<JSFunction> function,
Isolate* isolate = function->GetIsolate();
Handle<SharedFunctionInfo> shared(function->shared(), isolate);
bool ignition_osr = osr_frame && osr_frame->is_interpreted();
USE(ignition_osr);
DCHECK_IMPLIES(ignition_osr, !osr_ast_id.IsNone());
DCHECK_IMPLIES(ignition_osr, FLAG_ignition_osr);
// Make sure we clear the optimization marker on the function so that we
// don't try to re-optimize.
if (function->HasOptimizationMarker()) {
......@@ -806,20 +715,8 @@ MaybeHandle<Code> GetOptimizedCode(Handle<JSFunction> function,
RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::OptimizeCode);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.OptimizeCode");
// TurboFan can optimize directly from existing bytecode.
if (shared->HasBytecodeArray()) {
compilation_info->MarkAsOptimizeFromBytecode();
}
// Verify that OSR compilations are delegated to the correct graph builder.
// Depending on the underlying frame the semantics of the {BailoutId} differ
// and the various graph builders hard-code a certain semantic:
// - Interpreter : The BailoutId represents a bytecode offset.
// - FullCodegen : The BailoutId represents the id of an AST node.
DCHECK_IMPLIES(compilation_info->is_osr() && ignition_osr,
compilation_info->is_optimizing_from_bytecode());
DCHECK_IMPLIES(compilation_info->is_osr() && !ignition_osr,
!compilation_info->is_optimizing_from_bytecode());
// TODO(rmcilroy): Remove OptimizeFromBytecode flag.
compilation_info->MarkAsOptimizeFromBytecode();
// In case of concurrent recompilation, all handles below this point will be
// allocated in a deferred handle scope that is detached and handed off to
......@@ -1613,7 +1510,8 @@ CompilationJob* Compiler::PrepareUnoptimizedCompilationJob(
ParseInfo* parse_info, Isolate* isolate) {
VMState<COMPILER> state(isolate);
std::unique_ptr<CompilationJob> job(
GetUnoptimizedCompilationJob(parse_info, parse_info->literal(), isolate));
interpreter::Interpreter::NewCompilationJob(
parse_info, parse_info->literal(), isolate));
if (job->PrepareJob() != CompilationJob::SUCCEEDED) {
return nullptr;
}
......
......@@ -276,6 +276,8 @@ AstGraphBuilder::AstGraphBuilder(Zone* local_zone, CompilationInfo* info,
exit_controls_(local_zone),
loop_assignment_analysis_(loop),
state_values_cache_(jsgraph) {
// TODO(6409, rmcilroy): Remove ast graph builder in followup CL.
UNREACHABLE();
InitializeAstVisitor(info->isolate());
}
......
......@@ -311,7 +311,6 @@ DEFINE_BOOL(unbox_double_arrays, true, "automatically unbox arrays of doubles")
DEFINE_BOOL(string_slices, true, "use string slices")
// Flags for Ignition.
DEFINE_BOOL(ignition_osr, true, "enable support for OSR from ignition code")
DEFINE_BOOL(ignition_elide_noneffectful_bytecodes, true,
"elide bytecodes which won't have any external effect")
DEFINE_BOOL(ignition_reo, true, "use ignition register equivalence optimizer")
......
......@@ -73,6 +73,8 @@ FullCodeGenerator::FullCodeGenerator(MacroAssembler* masm,
source_position_table_builder_(info->zone(),
info->SourcePositionRecordingMode()),
ic_total_count_(0) {
// TODO(6409, rmcilroy): Remove fullcodegen in followup CL.
UNREACHABLE();
DCHECK(!info->IsStub());
Initialize(stack_limit);
}
......@@ -89,7 +91,8 @@ bool FullCodeGenerator::MakeCode(ParseInfo* parse_info, CompilationInfo* info,
uintptr_t stack_limit) {
Isolate* isolate = info->isolate();
DCHECK(!info->literal()->must_use_ignition());
// TODO(6409, rmcilroy): Remove fullcodegen in followup CL.
UNREACHABLE();
DCHECK(!FLAG_minimal);
RuntimeCallTimerScope runtimeTimer(isolate,
&RuntimeCallStats::CompileFullCode);
......
......@@ -14148,27 +14148,6 @@ void JSFunction::ClearTypeFeedbackInfo() {
}
}
BailoutId Code::TranslatePcOffsetToBytecodeOffset(uint32_t pc_offset) {
DisallowHeapAllocation no_gc;
DCHECK(kind() == FUNCTION);
BackEdgeTable back_edges(this, &no_gc);
for (uint32_t i = 0; i < back_edges.length(); i++) {
if (back_edges.pc_offset(i) == pc_offset) return back_edges.ast_id(i);
}
return BailoutId::None();
}
uint32_t Code::TranslateBytecodeOffsetToPcOffset(BailoutId bytecode_offset) {
DisallowHeapAllocation no_gc;
DCHECK(kind() == FUNCTION);
BackEdgeTable back_edges(this, &no_gc);
for (uint32_t i = 0; i < back_edges.length(); i++) {
if (back_edges.ast_id(i) == bytecode_offset) return back_edges.pc_offset(i);
}
UNREACHABLE(); // We expect to find the back edge.
return 0;
}
void Code::MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate) {
PatchPlatformCodeAge(isolate, sequence, kNoAgeCodeAge);
}
......
......@@ -4022,9 +4022,6 @@ class Code: public HeapObject {
void ClearInlineCaches();
BailoutId TranslatePcOffsetToBytecodeOffset(uint32_t pc_offset);
uint32_t TranslateBytecodeOffsetToPcOffset(BailoutId bytecode_offset);
#define DECL_CODE_AGE_ENUM(X) k##X##CodeAge,
enum Age {
kToBeExecutedOnceCodeAge = -3,
......
This diff is collapsed.
......@@ -27,15 +27,12 @@ class RuntimeProfiler {
int nesting_levels = 1);
private:
void MaybeOptimizeFullCodegen(JSFunction* function, JavaScriptFrame* frame,
int frame_count);
void MaybeBaselineIgnition(JSFunction* function, JavaScriptFrame* frame);
void MaybeOptimizeIgnition(JSFunction* function, JavaScriptFrame* frame);
// Potentially attempts OSR from ignition and returns whether no other
void MaybeOptimize(JSFunction* function, JavaScriptFrame* frame);
// Potentially attempts OSR from and returns whether no other
// optimization attempts should be made.
bool MaybeOSRIgnition(JSFunction* function, JavaScriptFrame* frame);
OptimizationReason ShouldOptimizeIgnition(JSFunction* function,
JavaScriptFrame* frame);
bool MaybeOSR(JSFunction* function, JavaScriptFrame* frame);
OptimizationReason ShouldOptimize(JSFunction* function,
JavaScriptFrame* frame);
void Optimize(JSFunction* function, OptimizationReason reason);
void Baseline(JSFunction* function, OptimizationReason reason);
......
......@@ -244,30 +244,6 @@ static bool IsSuitableForOnStackReplacement(Isolate* isolate,
namespace {
BailoutId DetermineEntryAndDisarmOSRForBaseline(JavaScriptFrame* frame) {
Handle<Code> caller_code(frame->function()->shared()->code());
// Passing the PC in the JavaScript frame from the caller directly is
// not GC safe, so we walk the stack to get it.
if (!caller_code->contains(frame->pc())) {
// Code on the stack may not be the code object referenced by the shared
// function info. It may have been replaced to include deoptimization data.
caller_code = Handle<Code>(frame->LookupCode());
}
DCHECK_EQ(frame->LookupCode(), *caller_code);
DCHECK_EQ(Code::FUNCTION, caller_code->kind());
DCHECK(caller_code->contains(frame->pc()));
// Revert the patched back edge table, regardless of whether OSR succeeds.
BackEdgeTable::Revert(frame->isolate(), *caller_code);
// Return a BailoutId representing an AST id of the {IterationStatement}.
uint32_t pc_offset =
static_cast<uint32_t>(frame->pc() - caller_code->instruction_start());
return caller_code->TranslatePcOffsetToBytecodeOffset(pc_offset);
}
BailoutId DetermineEntryAndDisarmOSRForInterpreter(JavaScriptFrame* frame) {
InterpretedFrame* iframe = reinterpret_cast<InterpretedFrame*>(frame);
......@@ -280,7 +256,6 @@ BailoutId DetermineEntryAndDisarmOSRForInterpreter(JavaScriptFrame* frame) {
DCHECK(frame->LookupCode()->is_interpreter_trampoline_builtin());
DCHECK(frame->function()->shared()->HasBytecodeArray());
DCHECK(frame->is_interpreted());
DCHECK(FLAG_ignition_osr);
// Reset the OSR loop nesting depth to disarm back edges.
bytecode->set_osr_loop_nesting_level(0);
......@@ -306,12 +281,11 @@ RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
JavaScriptFrameIterator it(isolate);
JavaScriptFrame* frame = it.frame();
DCHECK_EQ(frame->function(), *function);
DCHECK(frame->is_interpreted());
// Determine the entry point for which this OSR request has been fired and
// also disarm all back edges in the calling code to stop new requests.
BailoutId ast_id = frame->is_interpreted()
? DetermineEntryAndDisarmOSRForInterpreter(frame)
: DetermineEntryAndDisarmOSRForBaseline(frame);
BailoutId ast_id = DetermineEntryAndDisarmOSRForInterpreter(frame);
DCHECK(!ast_id.IsNone());
MaybeHandle<Code> maybe_result;
......
......@@ -233,7 +233,7 @@ RUNTIME_FUNCTION(Runtime_OptimizeFunctionOnNextCall) {
}
// If the function is already optimized, just return.
if (function->IsOptimized()) {
if (function->IsOptimized() || function->shared()->HasAsmWasmData()) {
return isolate->heap()->undefined_value();
}
......
......@@ -10,11 +10,9 @@
#include "src/compiler/linkage.h"
#include "src/compiler/pipeline.h"
#include "src/execution.h"
#include "src/full-codegen/full-codegen.h"
#include "src/handles.h"
#include "src/objects-inl.h"
#include "src/parsing/parse-info.h"
#include "src/parsing/parsing.h"
#include "test/cctest/cctest.h"
namespace v8 {
......@@ -149,13 +147,9 @@ Handle<JSFunction> FunctionTester::Compile(Handle<JSFunction> function) {
}
CHECK(Compiler::Compile(function, Compiler::CLEAR_EXCEPTION));
if (info.shared_info()->HasBytecodeArray()) {
info.MarkAsDeoptimizationEnabled();
info.MarkAsOptimizeFromBytecode();
} else {
CHECK(Compiler::ParseAndAnalyze(&parse_info, shared, info.isolate()));
parse_info.ast_value_factory()->Internalize(info.isolate());
}
CHECK(info.shared_info()->HasBytecodeArray());
info.MarkAsDeoptimizationEnabled();
info.MarkAsOptimizeFromBytecode();
JSFunction::EnsureLiterals(function);
Handle<Code> code = Pipeline::GenerateCodeForTesting(&info);
......@@ -174,9 +168,6 @@ Handle<JSFunction> FunctionTester::CompileGraph(Graph* graph) {
CompilationInfo info(parse_info.zone(), function->GetIsolate(),
parse_info.script(), shared, function);
CHECK(
parsing::ParseFunction(&parse_info, info.shared_info(), info.isolate()));
Handle<Code> code = Pipeline::GenerateCodeForTesting(&info, graph);
CHECK(!code.is_null());
function->ReplaceCode(*code);
......
......@@ -3,11 +3,13 @@
# found in the LICENSE file.
[
[ALWAYS, {
# Issue 6166.
'debugger/asm-js-breakpoint-during-exec': [FAIL],
}], # ALWAYS
##############################################################################
['variant != default', {
# Issue 6166.
'debugger/asm-js-breakpoint-during-exec': [PASS, FAIL],
# Issue 6167.
'debugger/eval-scopes': [PASS, FAIL],
'debugger/scope-skip-variables-with-empty-name': [PASS, FAIL],
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment