Commit 5f072169 authored by Jakob Gruber's avatar Jakob Gruber Committed by Commit Bot

[nci] Refactor optimized compilation info flags

... for more consistent naming and less boilerplate.

Getters now use the `lower_case_flag()` style. Setters now use the
`set_lower_case_flag()` style.

Bug: v8:8888
Change-Id: I5af35b13a013bf303c4ca8d86f926754af28bfce
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2237139
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarGeorg Neis <neis@chromium.org>
Cr-Commit-Position: refs/heads/master@{#68293}
parent 0817d7ee
...@@ -751,7 +751,7 @@ void InsertCodeIntoOptimizedCodeCache( ...@@ -751,7 +751,7 @@ void InsertCodeIntoOptimizedCodeCache(
// Function context specialization folds-in the function context, // Function context specialization folds-in the function context,
// so no sharing can occur. // so no sharing can occur.
if (compilation_info->is_function_context_specializing()) { if (compilation_info->function_context_specializing()) {
// Native context specialized code is not shared, so make sure the optimized // Native context specialized code is not shared, so make sure the optimized
// code cache is clear. // code cache is clear.
ClearOptimizedCodeCache(compilation_info); ClearOptimizedCodeCache(compilation_info);
......
...@@ -32,7 +32,7 @@ OptimizedCompilationInfo::OptimizedCompilationInfo( ...@@ -32,7 +32,7 @@ OptimizedCompilationInfo::OptimizedCompilationInfo(
// is active, to be able to get more precise source positions at the price of // is active, to be able to get more precise source positions at the price of
// more memory consumption. // more memory consumption.
if (isolate->NeedsDetailedOptimizedCodeLineInfo()) { if (isolate->NeedsDetailedOptimizedCodeLineInfo()) {
MarkAsSourcePositionsEnabled(); set_source_positions();
} }
SetTracingFlags(shared->PassesFilter(FLAG_trace_turbo_filter)); SetTracingFlags(shared->PassesFilter(FLAG_trace_turbo_filter));
...@@ -53,59 +53,82 @@ OptimizedCompilationInfo::OptimizedCompilationInfo(Code::Kind code_kind, ...@@ -53,59 +53,82 @@ OptimizedCompilationInfo::OptimizedCompilationInfo(Code::Kind code_kind,
ConfigureFlags(); ConfigureFlags();
} }
#ifdef DEBUG
bool OptimizedCompilationInfo::FlagSetIsValid(Flag flag) const {
switch (flag) {
case kPoisonRegisterArguments:
return untrusted_code_mitigations();
default:
return true;
}
UNREACHABLE();
}
bool OptimizedCompilationInfo::FlagGetIsValid(Flag flag) const {
switch (flag) {
case kPoisonRegisterArguments:
if (!GetFlag(kPoisonRegisterArguments)) return true;
return untrusted_code_mitigations() && called_with_code_start_register();
default:
return true;
}
UNREACHABLE();
}
#endif // DEBUG
void OptimizedCompilationInfo::ConfigureFlags() { void OptimizedCompilationInfo::ConfigureFlags() {
if (FLAG_untrusted_code_mitigations) SetFlag(kUntrustedCodeMitigations); if (FLAG_untrusted_code_mitigations) set_untrusted_code_mitigations();
switch (code_kind_) { switch (code_kind_) {
case Code::OPTIMIZED_FUNCTION: case Code::OPTIMIZED_FUNCTION:
SetFlag(kCalledWithCodeStartRegister); set_called_with_code_start_register();
SetFlag(kSwitchJumpTableEnabled); set_switch_jump_table();
if (FLAG_function_context_specialization) { if (FLAG_function_context_specialization) {
MarkAsFunctionContextSpecializing(); set_function_context_specializing();
} }
if (FLAG_turbo_splitting) { if (FLAG_turbo_splitting) {
MarkAsSplittingEnabled(); set_splitting();
} }
if (FLAG_untrusted_code_mitigations) { if (FLAG_untrusted_code_mitigations) {
MarkAsPoisoningRegisterArguments(); set_poison_register_arguments();
} }
if (FLAG_analyze_environment_liveness) { if (FLAG_analyze_environment_liveness) {
// TODO(yangguo): Disable this in case of debugging for crbug.com/826613 // TODO(yangguo): Disable this in case of debugging for crbug.com/826613
MarkAsAnalyzeEnvironmentLiveness(); set_analyze_environment_liveness();
} }
break; break;
case Code::BYTECODE_HANDLER: case Code::BYTECODE_HANDLER:
SetFlag(kCalledWithCodeStartRegister); set_called_with_code_start_register();
if (FLAG_turbo_splitting) { if (FLAG_turbo_splitting) {
MarkAsSplittingEnabled(); set_splitting();
} }
break; break;
case Code::BUILTIN: case Code::BUILTIN:
case Code::STUB: case Code::STUB:
if (FLAG_turbo_splitting) { if (FLAG_turbo_splitting) {
MarkAsSplittingEnabled(); set_splitting();
} }
#if ENABLE_GDB_JIT_INTERFACE && DEBUG #if ENABLE_GDB_JIT_INTERFACE && DEBUG
MarkAsSourcePositionsEnabled(); set_source_positions();
#endif // ENABLE_GDB_JIT_INTERFACE && DEBUG #endif // ENABLE_GDB_JIT_INTERFACE && DEBUG
break; break;
case Code::WASM_FUNCTION: case Code::WASM_FUNCTION:
case Code::WASM_TO_CAPI_FUNCTION: case Code::WASM_TO_CAPI_FUNCTION:
SetFlag(kSwitchJumpTableEnabled); set_switch_jump_table();
break; break;
default: default:
break; break;
} }
if (FLAG_turbo_control_flow_aware_allocation) { if (FLAG_turbo_control_flow_aware_allocation) {
MarkAsTurboControlFlowAwareAllocation(); set_turbo_control_flow_aware_allocation();
} else { } else {
MarkAsTurboPreprocessRanges(); set_turbo_preprocess_ranges();
} }
} }
OptimizedCompilationInfo::~OptimizedCompilationInfo() { OptimizedCompilationInfo::~OptimizedCompilationInfo() {
if (GetFlag(kDisableFutureOptimization) && has_shared_info()) { if (disable_future_optimization() && has_shared_info()) {
shared_info()->DisableOptimization(bailout_reason()); shared_info()->DisableOptimization(bailout_reason());
} }
} }
...@@ -134,12 +157,12 @@ void OptimizedCompilationInfo::AbortOptimization(BailoutReason reason) { ...@@ -134,12 +157,12 @@ void OptimizedCompilationInfo::AbortOptimization(BailoutReason reason) {
if (bailout_reason_ == BailoutReason::kNoReason) { if (bailout_reason_ == BailoutReason::kNoReason) {
bailout_reason_ = reason; bailout_reason_ = reason;
} }
SetFlag(kDisableFutureOptimization); set_disable_future_optimization();
} }
void OptimizedCompilationInfo::RetryOptimization(BailoutReason reason) { void OptimizedCompilationInfo::RetryOptimization(BailoutReason reason) {
DCHECK_NE(reason, BailoutReason::kNoReason); DCHECK_NE(reason, BailoutReason::kNoReason);
if (GetFlag(kDisableFutureOptimization)) return; if (disable_future_optimization()) return;
bailout_reason_ = reason; bailout_reason_ = reason;
} }
...@@ -225,11 +248,11 @@ int OptimizedCompilationInfo::AddInlinedFunction( ...@@ -225,11 +248,11 @@ int OptimizedCompilationInfo::AddInlinedFunction(
void OptimizedCompilationInfo::SetTracingFlags(bool passes_filter) { void OptimizedCompilationInfo::SetTracingFlags(bool passes_filter) {
if (!passes_filter) return; if (!passes_filter) return;
if (FLAG_trace_turbo) SetFlag(kTraceTurboJson); if (FLAG_trace_turbo) set_trace_turbo_json();
if (FLAG_trace_turbo_graph) SetFlag(kTraceTurboGraph); if (FLAG_trace_turbo_graph) set_trace_turbo_graph();
if (FLAG_trace_turbo_scheduled) SetFlag(kTraceTurboScheduled); if (FLAG_trace_turbo_scheduled) set_trace_turbo_scheduled();
if (FLAG_trace_turbo_alloc) SetFlag(kTraceTurboAllocation); if (FLAG_trace_turbo_alloc) set_trace_turbo_allocation();
if (FLAG_trace_heap_broker) SetFlag(kTraceHeapBroker); if (FLAG_trace_heap_broker) set_trace_heap_broker();
} }
OptimizedCompilationInfo::InlinedFunctionHolder::InlinedFunctionHolder( OptimizedCompilationInfo::InlinedFunctionHolder::InlinedFunctionHolder(
......
...@@ -44,31 +44,58 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final { ...@@ -44,31 +44,58 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
public: public:
// Various configuration flags for a compilation, as well as some properties // Various configuration flags for a compilation, as well as some properties
// of the compiled code produced by a compilation. // of the compiled code produced by a compilation.
#define FLAGS(V) \
V(FunctionContextSpecializing, function_context_specializing, 0) \
V(Inlining, inlining, 1) \
V(DisableFutureOptimization, disable_future_optimization, 2) \
V(Splitting, splitting, 3) \
V(SourcePositions, source_positions, 4) \
V(BailoutOnUninitialized, bailout_on_uninitialized, 5) \
V(LoopPeeling, loop_peeling, 6) \
V(UntrustedCodeMitigations, untrusted_code_mitigations, 7) \
V(SwitchJumpTable, switch_jump_table, 8) \
V(CalledWithCodeStartRegister, called_with_code_start_register, 9) \
V(PoisonRegisterArguments, poison_register_arguments, 10) \
V(AllocationFolding, allocation_folding, 11) \
V(AnalyzeEnvironmentLiveness, analyze_environment_liveness, 12) \
V(TraceTurboJson, trace_turbo_json, 13) \
V(TraceTurboGraph, trace_turbo_graph, 14) \
V(TraceTurboScheduled, trace_turbo_scheduled, 15) \
V(TraceTurboAllocation, trace_turbo_allocation, 16) \
V(TraceHeapBroker, trace_heap_broker, 17) \
V(WasmRuntimeExceptionSupport, wasm_runtime_exception_support, 18) \
V(TurboControlFlowAwareAllocation, turbo_control_flow_aware_allocation, 19) \
V(TurboPreprocessRanges, turbo_preprocess_ranges, 20) \
V(ConcurrentInlining, concurrent_inlining, 21)
enum Flag { enum Flag {
kFunctionContextSpecializing = 1 << 0, #define DEF_ENUM(Camel, Lower, Bit) k##Camel = 1 << Bit,
kInliningEnabled = 1 << 1, FLAGS(DEF_ENUM)
kDisableFutureOptimization = 1 << 2, #undef DEF_ENUM
kSplittingEnabled = 1 << 3,
kSourcePositionsEnabled = 1 << 4,
kBailoutOnUninitialized = 1 << 5,
kLoopPeelingEnabled = 1 << 6,
kUntrustedCodeMitigations = 1 << 7,
kSwitchJumpTableEnabled = 1 << 8,
kCalledWithCodeStartRegister = 1 << 9,
kPoisonRegisterArguments = 1 << 10,
kAllocationFoldingEnabled = 1 << 11,
kAnalyzeEnvironmentLiveness = 1 << 12,
kTraceTurboJson = 1 << 13,
kTraceTurboGraph = 1 << 14,
kTraceTurboScheduled = 1 << 15,
kTraceTurboAllocation = 1 << 16,
kTraceHeapBroker = 1 << 17,
kWasmRuntimeExceptionSupport = 1 << 18,
kTurboControlFlowAwareAllocation = 1 << 19,
kTurboPreprocessRanges = 1 << 20,
kConcurrentInlining = 1 << 21,
}; };
#define DEF_GETTER(Camel, Lower, Bit) \
bool Lower() const { \
DCHECK(FlagGetIsValid(k##Camel)); \
return GetFlag(k##Camel); \
}
FLAGS(DEF_GETTER)
#undef DEF_GETTER
#define DEF_SETTER(Camel, Lower, Bit) \
void set_##Lower() { \
DCHECK(FlagSetIsValid(k##Camel)); \
SetFlag(k##Camel); \
}
FLAGS(DEF_SETTER)
#undef DEF_SETTER
#ifdef DEBUG
bool FlagGetIsValid(Flag flag) const;
bool FlagSetIsValid(Flag flag) const;
#endif // DEBUG
// Construct a compilation info for optimized compilation. // Construct a compilation info for optimized compilation.
OptimizedCompilationInfo(Zone* zone, Isolate* isolate, OptimizedCompilationInfo(Zone* zone, Isolate* isolate,
Handle<SharedFunctionInfo> shared, Handle<SharedFunctionInfo> shared,
...@@ -93,38 +120,6 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final { ...@@ -93,38 +120,6 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
BailoutId osr_offset() const { return osr_offset_; } BailoutId osr_offset() const { return osr_offset_; }
JavaScriptFrame* osr_frame() const { return osr_frame_; } JavaScriptFrame* osr_frame() const { return osr_frame_; }
// Flags used by optimized compilation.
void MarkAsConcurrentInlining() { SetFlag(kConcurrentInlining); }
bool is_concurrent_inlining() const { return GetFlag(kConcurrentInlining); }
void MarkAsTurboControlFlowAwareAllocation() {
SetFlag(kTurboControlFlowAwareAllocation);
}
bool is_turbo_control_flow_aware_allocation() const {
return GetFlag(kTurboControlFlowAwareAllocation);
}
void MarkAsTurboPreprocessRanges() { SetFlag(kTurboPreprocessRanges); }
bool is_turbo_preprocess_ranges() const {
return GetFlag(kTurboPreprocessRanges);
}
void MarkAsFunctionContextSpecializing() {
SetFlag(kFunctionContextSpecializing);
}
bool is_function_context_specializing() const {
return GetFlag(kFunctionContextSpecializing);
}
void MarkAsSourcePositionsEnabled() { SetFlag(kSourcePositionsEnabled); }
bool is_source_positions_enabled() const {
return GetFlag(kSourcePositionsEnabled);
}
void MarkAsInliningEnabled() { SetFlag(kInliningEnabled); }
bool is_inlining_enabled() const { return GetFlag(kInliningEnabled); }
void SetPoisoningMitigationLevel(PoisoningMitigationLevel poisoning_level) { void SetPoisoningMitigationLevel(PoisoningMitigationLevel poisoning_level) {
poisoning_level_ = poisoning_level; poisoning_level_ = poisoning_level;
} }
...@@ -132,75 +127,6 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final { ...@@ -132,75 +127,6 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
return poisoning_level_; return poisoning_level_;
} }
void MarkAsSplittingEnabled() { SetFlag(kSplittingEnabled); }
bool is_splitting_enabled() const { return GetFlag(kSplittingEnabled); }
void MarkAsBailoutOnUninitialized() { SetFlag(kBailoutOnUninitialized); }
bool is_bailout_on_uninitialized() const {
return GetFlag(kBailoutOnUninitialized);
}
void MarkAsLoopPeelingEnabled() { SetFlag(kLoopPeelingEnabled); }
bool is_loop_peeling_enabled() const { return GetFlag(kLoopPeelingEnabled); }
bool has_untrusted_code_mitigations() const {
return GetFlag(kUntrustedCodeMitigations);
}
bool switch_jump_table_enabled() const {
return GetFlag(kSwitchJumpTableEnabled);
}
bool called_with_code_start_register() const {
bool enabled = GetFlag(kCalledWithCodeStartRegister);
return enabled;
}
void MarkAsPoisoningRegisterArguments() {
DCHECK(has_untrusted_code_mitigations());
SetFlag(kPoisonRegisterArguments);
}
bool is_poisoning_register_arguments() const {
bool enabled = GetFlag(kPoisonRegisterArguments);
DCHECK_IMPLIES(enabled, has_untrusted_code_mitigations());
DCHECK_IMPLIES(enabled, called_with_code_start_register());
return enabled;
}
void MarkAsAllocationFoldingEnabled() { SetFlag(kAllocationFoldingEnabled); }
bool is_allocation_folding_enabled() const {
return GetFlag(kAllocationFoldingEnabled);
}
void MarkAsAnalyzeEnvironmentLiveness() {
SetFlag(kAnalyzeEnvironmentLiveness);
}
bool is_analyze_environment_liveness() const {
return GetFlag(kAnalyzeEnvironmentLiveness);
}
void SetWasmRuntimeExceptionSupport() {
SetFlag(kWasmRuntimeExceptionSupport);
}
bool wasm_runtime_exception_support() {
return GetFlag(kWasmRuntimeExceptionSupport);
}
bool trace_turbo_json_enabled() const { return GetFlag(kTraceTurboJson); }
bool trace_turbo_graph_enabled() const { return GetFlag(kTraceTurboGraph); }
bool trace_turbo_allocation_enabled() const {
return GetFlag(kTraceTurboAllocation);
}
bool trace_turbo_scheduled_enabled() const {
return GetFlag(kTraceTurboScheduled);
}
bool trace_heap_broker_enabled() const { return GetFlag(kTraceHeapBroker); }
// Code getters and setters. // Code getters and setters.
void SetCode(Handle<Code> code) { code_ = code; } void SetCode(Handle<Code> code) { code_ = code; }
...@@ -240,10 +166,6 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final { ...@@ -240,10 +166,6 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
BailoutReason bailout_reason() const { return bailout_reason_; } BailoutReason bailout_reason() const { return bailout_reason_; }
bool is_disable_future_optimization() const {
return GetFlag(kDisableFutureOptimization);
}
int optimization_id() const { int optimization_id() const {
DCHECK(IsOptimizing()); DCHECK(IsOptimizing());
return optimization_id_; return optimization_id_;
......
...@@ -162,7 +162,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall( ...@@ -162,7 +162,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
DeoptimizeReason deoptimization_reason = exit->reason(); DeoptimizeReason deoptimization_reason = exit->reason();
Address deopt_entry = Address deopt_entry =
Deoptimizer::GetDeoptimizationEntry(tasm()->isolate(), deopt_kind); Deoptimizer::GetDeoptimizationEntry(tasm()->isolate(), deopt_kind);
if (info()->is_source_positions_enabled()) { if (info()->source_positions()) {
tasm()->RecordDeoptReason(deoptimization_reason, exit->pos(), tasm()->RecordDeoptReason(deoptimization_reason, exit->pos(),
deoptimization_id); deoptimization_id);
} }
...@@ -192,7 +192,7 @@ void CodeGenerator::AssembleCode() { ...@@ -192,7 +192,7 @@ void CodeGenerator::AssembleCode() {
// the frame (that is done in AssemblePrologue). // the frame (that is done in AssemblePrologue).
FrameScope frame_scope(tasm(), StackFrame::MANUAL); FrameScope frame_scope(tasm(), StackFrame::MANUAL);
if (info->is_source_positions_enabled()) { if (info->source_positions()) {
AssembleSourcePosition(start_source_position()); AssembleSourcePosition(start_source_position());
} }
offsets_info_.code_start_register_check = tasm()->pc_offset(); offsets_info_.code_start_register_check = tasm()->pc_offset();
...@@ -243,7 +243,7 @@ void CodeGenerator::AssembleCode() { ...@@ -243,7 +243,7 @@ void CodeGenerator::AssembleCode() {
unwinding_info_writer_.SetNumberOfInstructionBlocks( unwinding_info_writer_.SetNumberOfInstructionBlocks(
instructions()->InstructionBlockCount()); instructions()->InstructionBlockCount());
if (info->trace_turbo_json_enabled()) { if (info->trace_turbo_json()) {
block_starts_.assign(instructions()->instruction_blocks().size(), -1); block_starts_.assign(instructions()->instruction_blocks().size(), -1);
instr_starts_.assign(instructions()->instructions().size(), {}); instr_starts_.assign(instructions()->instructions().size(), {});
} }
...@@ -254,7 +254,7 @@ void CodeGenerator::AssembleCode() { ...@@ -254,7 +254,7 @@ void CodeGenerator::AssembleCode() {
if (block->ShouldAlign() && !tasm()->jump_optimization_info()) { if (block->ShouldAlign() && !tasm()->jump_optimization_info()) {
tasm()->CodeTargetAlign(); tasm()->CodeTargetAlign();
} }
if (info->trace_turbo_json_enabled()) { if (info->trace_turbo_json()) {
block_starts_[block->rpo_number().ToInt()] = tasm()->pc_offset(); block_starts_[block->rpo_number().ToInt()] = tasm()->pc_offset();
} }
// Bind a label for a block. // Bind a label for a block.
...@@ -723,7 +723,7 @@ RpoNumber CodeGenerator::ComputeBranchInfo(BranchInfo* branch, ...@@ -723,7 +723,7 @@ RpoNumber CodeGenerator::ComputeBranchInfo(BranchInfo* branch,
CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction( CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction(
int instruction_index, const InstructionBlock* block) { int instruction_index, const InstructionBlock* block) {
Instruction* instr = instructions()->InstructionAt(instruction_index); Instruction* instr = instructions()->InstructionAt(instruction_index);
if (info()->trace_turbo_json_enabled()) { if (info()->trace_turbo_json()) {
instr_starts_[instruction_index].gap_pc_offset = tasm()->pc_offset(); instr_starts_[instruction_index].gap_pc_offset = tasm()->pc_offset();
} }
int first_unused_stack_slot; int first_unused_stack_slot;
...@@ -743,14 +743,14 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction( ...@@ -743,14 +743,14 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction(
if (instr->IsJump() && block->must_deconstruct_frame()) { if (instr->IsJump() && block->must_deconstruct_frame()) {
AssembleDeconstructFrame(); AssembleDeconstructFrame();
} }
if (info()->trace_turbo_json_enabled()) { if (info()->trace_turbo_json()) {
instr_starts_[instruction_index].arch_instr_pc_offset = tasm()->pc_offset(); instr_starts_[instruction_index].arch_instr_pc_offset = tasm()->pc_offset();
} }
// Assemble architecture-specific code for the instruction. // Assemble architecture-specific code for the instruction.
CodeGenResult result = AssembleArchInstruction(instr); CodeGenResult result = AssembleArchInstruction(instr);
if (result != kSuccess) return result; if (result != kSuccess) return result;
if (info()->trace_turbo_json_enabled()) { if (info()->trace_turbo_json()) {
instr_starts_[instruction_index].condition_pc_offset = tasm()->pc_offset(); instr_starts_[instruction_index].condition_pc_offset = tasm()->pc_offset();
} }
...@@ -834,7 +834,7 @@ void CodeGenerator::AssembleSourcePosition(SourcePosition source_position) { ...@@ -834,7 +834,7 @@ void CodeGenerator::AssembleSourcePosition(SourcePosition source_position) {
buffer << "-- "; buffer << "-- ";
// Turbolizer only needs the source position, as it can reconstruct // Turbolizer only needs the source position, as it can reconstruct
// the inlining stack from other information. // the inlining stack from other information.
if (info->trace_turbo_json_enabled() || !tasm()->isolate() || if (info->trace_turbo_json() || !tasm()->isolate() ||
tasm()->isolate()->concurrent_recompilation_enabled()) { tasm()->isolate()->concurrent_recompilation_enabled()) {
buffer << source_position; buffer << source_position;
} else { } else {
...@@ -1331,7 +1331,7 @@ void CodeGenerator::InitializeSpeculationPoison() { ...@@ -1331,7 +1331,7 @@ void CodeGenerator::InitializeSpeculationPoison() {
if (info()->called_with_code_start_register()) { if (info()->called_with_code_start_register()) {
tasm()->RecordComment("-- Prologue: generate speculation poison --"); tasm()->RecordComment("-- Prologue: generate speculation poison --");
GenerateSpeculationPoisonFromCodeStartRegister(); GenerateSpeculationPoisonFromCodeStartRegister();
if (info()->is_poisoning_register_arguments()) { if (info()->poison_register_arguments()) {
AssembleRegisterArgumentPoisoning(); AssembleRegisterArgumentPoisoning();
} }
} else { } else {
......
...@@ -429,8 +429,7 @@ Reduction JSInliner::ReduceJSCall(Node* node) { ...@@ -429,8 +429,7 @@ Reduction JSInliner::ReduceJSCall(Node* node) {
// always hold true. // always hold true.
CHECK(shared_info->is_compiled()); CHECK(shared_info->is_compiled());
if (!broker()->is_concurrent_inlining() && if (!broker()->is_concurrent_inlining() && info_->source_positions()) {
info_->is_source_positions_enabled()) {
SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate(), SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate(),
shared_info->object()); shared_info->object());
} }
...@@ -462,10 +461,10 @@ Reduction JSInliner::ReduceJSCall(Node* node) { ...@@ -462,10 +461,10 @@ Reduction JSInliner::ReduceJSCall(Node* node) {
Graph::SubgraphScope scope(graph()); Graph::SubgraphScope scope(graph());
BytecodeGraphBuilderFlags flags( BytecodeGraphBuilderFlags flags(
BytecodeGraphBuilderFlag::kSkipFirstStackCheck); BytecodeGraphBuilderFlag::kSkipFirstStackCheck);
if (info_->is_analyze_environment_liveness()) { if (info_->analyze_environment_liveness()) {
flags |= BytecodeGraphBuilderFlag::kAnalyzeEnvironmentLiveness; flags |= BytecodeGraphBuilderFlag::kAnalyzeEnvironmentLiveness;
} }
if (info_->is_bailout_on_uninitialized()) { if (info_->bailout_on_uninitialized()) {
flags |= BytecodeGraphBuilderFlag::kBailoutOnUninitialized; flags |= BytecodeGraphBuilderFlag::kBailoutOnUninitialized;
} }
{ {
......
...@@ -151,7 +151,7 @@ class PipelineData { ...@@ -151,7 +151,7 @@ class PipelineData {
codegen_zone_scope_(zone_stats_, kCodegenZoneName), codegen_zone_scope_(zone_stats_, kCodegenZoneName),
codegen_zone_(codegen_zone_scope_.zone()), codegen_zone_(codegen_zone_scope_.zone()),
broker_(new JSHeapBroker(isolate_, info_->zone(), broker_(new JSHeapBroker(isolate_, info_->zone(),
info_->trace_heap_broker_enabled(), info_->trace_heap_broker(),
is_concurrent_inlining)), is_concurrent_inlining)),
register_allocation_zone_scope_(zone_stats_, register_allocation_zone_scope_(zone_stats_,
kRegisterAllocationZoneName), kRegisterAllocationZoneName),
...@@ -160,9 +160,9 @@ class PipelineData { ...@@ -160,9 +160,9 @@ class PipelineData {
PhaseScope scope(pipeline_statistics, "V8.TFInitPipelineData"); PhaseScope scope(pipeline_statistics, "V8.TFInitPipelineData");
graph_ = new (graph_zone_) Graph(graph_zone_); graph_ = new (graph_zone_) Graph(graph_zone_);
source_positions_ = new (graph_zone_) SourcePositionTable(graph_); source_positions_ = new (graph_zone_) SourcePositionTable(graph_);
node_origins_ = info->trace_turbo_json_enabled() node_origins_ = info->trace_turbo_json() ? new (graph_zone_)
? new (graph_zone_) NodeOriginTable(graph_) NodeOriginTable(graph_)
: nullptr; : nullptr;
simplified_ = new (graph_zone_) SimplifiedOperatorBuilder(graph_zone_); simplified_ = new (graph_zone_) SimplifiedOperatorBuilder(graph_zone_);
machine_ = new (graph_zone_) MachineOperatorBuilder( machine_ = new (graph_zone_) MachineOperatorBuilder(
graph_zone_, MachineType::PointerRepresentation(), graph_zone_, MachineType::PointerRepresentation(),
...@@ -365,7 +365,7 @@ class PipelineData { ...@@ -365,7 +365,7 @@ class PipelineData {
} }
void ChooseSpecializationContext() { void ChooseSpecializationContext() {
if (info()->is_function_context_specializing()) { if (info()->function_context_specializing()) {
DCHECK(info()->has_context()); DCHECK(info()->has_context());
specialization_context_ = specialization_context_ =
Just(OuterContext(handle(info()->context(), isolate()), 0)); Just(OuterContext(handle(info()->context(), isolate()), 0));
...@@ -787,7 +787,7 @@ void PrintCode(Isolate* isolate, Handle<Code> code, ...@@ -787,7 +787,7 @@ void PrintCode(Isolate* isolate, Handle<Code> code,
void TraceScheduleAndVerify(OptimizedCompilationInfo* info, PipelineData* data, void TraceScheduleAndVerify(OptimizedCompilationInfo* info, PipelineData* data,
Schedule* schedule, const char* phase_name) { Schedule* schedule, const char* phase_name) {
if (info->trace_turbo_json_enabled()) { if (info->trace_turbo_json()) {
AllowHandleDereference allow_deref; AllowHandleDereference allow_deref;
TurboJsonFile json_of(info, std::ios_base::app); TurboJsonFile json_of(info, std::ios_base::app);
json_of << "{\"name\":\"" << phase_name << "\",\"type\":\"schedule\"" json_of << "{\"name\":\"" << phase_name << "\",\"type\":\"schedule\""
...@@ -800,7 +800,7 @@ void TraceScheduleAndVerify(OptimizedCompilationInfo* info, PipelineData* data, ...@@ -800,7 +800,7 @@ void TraceScheduleAndVerify(OptimizedCompilationInfo* info, PipelineData* data,
} }
json_of << "\"},\n"; json_of << "\"},\n";
} }
if (info->trace_turbo_graph_enabled() || FLAG_trace_turbo_scheduler) { if (info->trace_turbo_graph() || FLAG_trace_turbo_scheduler) {
AllowHandleDereference allow_deref; AllowHandleDereference allow_deref;
CodeTracer::StreamScope tracing_scope(data->GetCodeTracer()); CodeTracer::StreamScope tracing_scope(data->GetCodeTracer());
tracing_scope.stream() tracing_scope.stream()
...@@ -858,13 +858,13 @@ class NodeOriginsWrapper final : public Reducer { ...@@ -858,13 +858,13 @@ class NodeOriginsWrapper final : public Reducer {
void AddReducer(PipelineData* data, GraphReducer* graph_reducer, void AddReducer(PipelineData* data, GraphReducer* graph_reducer,
Reducer* reducer) { Reducer* reducer) {
if (data->info()->is_source_positions_enabled()) { if (data->info()->source_positions()) {
void* const buffer = data->graph_zone()->New(sizeof(SourcePositionWrapper)); void* const buffer = data->graph_zone()->New(sizeof(SourcePositionWrapper));
SourcePositionWrapper* const wrapper = SourcePositionWrapper* const wrapper =
new (buffer) SourcePositionWrapper(reducer, data->source_positions()); new (buffer) SourcePositionWrapper(reducer, data->source_positions());
reducer = wrapper; reducer = wrapper;
} }
if (data->info()->trace_turbo_json_enabled()) { if (data->info()->trace_turbo_json()) {
void* const buffer = data->graph_zone()->New(sizeof(NodeOriginsWrapper)); void* const buffer = data->graph_zone()->New(sizeof(NodeOriginsWrapper));
NodeOriginsWrapper* const wrapper = NodeOriginsWrapper* const wrapper =
new (buffer) NodeOriginsWrapper(reducer, data->node_origins()); new (buffer) NodeOriginsWrapper(reducer, data->node_origins());
...@@ -912,7 +912,7 @@ PipelineStatistics* CreatePipelineStatistics(Handle<Script> script, ...@@ -912,7 +912,7 @@ PipelineStatistics* CreatePipelineStatistics(Handle<Script> script,
pipeline_statistics->BeginPhaseKind("V8.TFInitializing"); pipeline_statistics->BeginPhaseKind("V8.TFInitializing");
} }
if (info->trace_turbo_json_enabled()) { if (info->trace_turbo_json()) {
TurboJsonFile json_of(info, std::ios_base::trunc); TurboJsonFile json_of(info, std::ios_base::trunc);
json_of << "{\"function\" : "; json_of << "{\"function\" : ";
JsonPrintFunctionSource(json_of, -1, info->GetDebugName(), script, isolate, JsonPrintFunctionSource(json_of, -1, info->GetDebugName(), script, isolate,
...@@ -938,7 +938,7 @@ PipelineStatistics* CreatePipelineStatistics( ...@@ -938,7 +938,7 @@ PipelineStatistics* CreatePipelineStatistics(
pipeline_statistics->BeginPhaseKind("V8.WasmInitializing"); pipeline_statistics->BeginPhaseKind("V8.WasmInitializing");
} }
if (info->trace_turbo_json_enabled()) { if (info->trace_turbo_json()) {
TurboJsonFile json_of(info, std::ios_base::trunc); TurboJsonFile json_of(info, std::ios_base::trunc);
std::unique_ptr<char[]> function_name = info->GetDebugName(); std::unique_ptr<char[]> function_name = info->GetDebugName();
json_of << "{\"function\":\"" << function_name.get() << "\", \"source\":\""; json_of << "{\"function\":\"" << function_name.get() << "\", \"source\":\"";
...@@ -1052,13 +1052,13 @@ PipelineCompilationJob::Status PipelineCompilationJob::PrepareJobImpl( ...@@ -1052,13 +1052,13 @@ PipelineCompilationJob::Status PipelineCompilationJob::PrepareJobImpl(
} }
if (!FLAG_always_opt) { if (!FLAG_always_opt) {
compilation_info()->MarkAsBailoutOnUninitialized(); compilation_info()->set_bailout_on_uninitialized();
} }
if (FLAG_turbo_loop_peeling) { if (FLAG_turbo_loop_peeling) {
compilation_info()->MarkAsLoopPeelingEnabled(); compilation_info()->set_loop_peeling();
} }
if (FLAG_turbo_inlining) { if (FLAG_turbo_inlining) {
compilation_info()->MarkAsInliningEnabled(); compilation_info()->set_inlining();
} }
// This is the bottleneck for computing and setting poisoning level in the // This is the bottleneck for computing and setting poisoning level in the
...@@ -1073,7 +1073,7 @@ PipelineCompilationJob::Status PipelineCompilationJob::PrepareJobImpl( ...@@ -1073,7 +1073,7 @@ PipelineCompilationJob::Status PipelineCompilationJob::PrepareJobImpl(
compilation_info()->SetPoisoningMitigationLevel(load_poisoning); compilation_info()->SetPoisoningMitigationLevel(load_poisoning);
if (FLAG_turbo_allocation_folding) { if (FLAG_turbo_allocation_folding) {
compilation_info()->MarkAsAllocationFoldingEnabled(); compilation_info()->set_allocation_folding();
} }
// Determine whether to specialize the code for the function's context. // Determine whether to specialize the code for the function's context.
...@@ -1084,11 +1084,11 @@ PipelineCompilationJob::Status PipelineCompilationJob::PrepareJobImpl( ...@@ -1084,11 +1084,11 @@ PipelineCompilationJob::Status PipelineCompilationJob::PrepareJobImpl(
if (compilation_info()->closure()->raw_feedback_cell().map() == if (compilation_info()->closure()->raw_feedback_cell().map() ==
ReadOnlyRoots(isolate).one_closure_cell_map() && ReadOnlyRoots(isolate).one_closure_cell_map() &&
!compilation_info()->is_osr()) { !compilation_info()->is_osr()) {
compilation_info()->MarkAsFunctionContextSpecializing(); compilation_info()->set_function_context_specializing();
data_.ChooseSpecializationContext(); data_.ChooseSpecializationContext();
} }
if (compilation_info()->is_source_positions_enabled()) { if (compilation_info()->source_positions()) {
SharedFunctionInfo::EnsureSourcePositionsAvailable( SharedFunctionInfo::EnsureSourcePositionsAvailable(
isolate, compilation_info()->shared_info()); isolate, compilation_info()->shared_info());
} }
...@@ -1262,20 +1262,20 @@ CompilationJob::Status WasmHeapStubCompilationJob::ExecuteJobImpl( ...@@ -1262,20 +1262,20 @@ CompilationJob::Status WasmHeapStubCompilationJob::ExecuteJobImpl(
&info_, wasm_engine_->GetOrCreateTurboStatistics(), &zone_stats_)); &info_, wasm_engine_->GetOrCreateTurboStatistics(), &zone_stats_));
pipeline_statistics->BeginPhaseKind("V8.WasmStubCodegen"); pipeline_statistics->BeginPhaseKind("V8.WasmStubCodegen");
} }
if (info_.trace_turbo_json_enabled() || info_.trace_turbo_graph_enabled()) { if (info_.trace_turbo_json() || info_.trace_turbo_graph()) {
CodeTracer::StreamScope tracing_scope(data_.GetCodeTracer()); CodeTracer::StreamScope tracing_scope(data_.GetCodeTracer());
tracing_scope.stream() tracing_scope.stream()
<< "---------------------------------------------------\n" << "---------------------------------------------------\n"
<< "Begin compiling method " << info_.GetDebugName().get() << "Begin compiling method " << info_.GetDebugName().get()
<< " using TurboFan" << std::endl; << " using TurboFan" << std::endl;
} }
if (info_.trace_turbo_graph_enabled()) { // Simple textual RPO. if (info_.trace_turbo_graph()) { // Simple textual RPO.
StdoutStream{} << "-- wasm stub " << Code::Kind2String(info_.code_kind()) StdoutStream{} << "-- wasm stub " << Code::Kind2String(info_.code_kind())
<< " graph -- " << std::endl << " graph -- " << std::endl
<< AsRPO(*data_.graph()); << AsRPO(*data_.graph());
} }
if (info_.trace_turbo_json_enabled()) { if (info_.trace_turbo_json()) {
TurboJsonFile json_of(&info_, std::ios_base::trunc); TurboJsonFile json_of(&info_, std::ios_base::trunc);
json_of << "{\"function\":\"" << info_.GetDebugName().get() json_of << "{\"function\":\"" << info_.GetDebugName().get()
<< "\", \"source\":\"\",\n\"phases\":["; << "\", \"source\":\"\",\n\"phases\":[";
...@@ -1334,10 +1334,10 @@ struct GraphBuilderPhase { ...@@ -1334,10 +1334,10 @@ struct GraphBuilderPhase {
void Run(PipelineData* data, Zone* temp_zone) { void Run(PipelineData* data, Zone* temp_zone) {
BytecodeGraphBuilderFlags flags; BytecodeGraphBuilderFlags flags;
if (data->info()->is_analyze_environment_liveness()) { if (data->info()->analyze_environment_liveness()) {
flags |= BytecodeGraphBuilderFlag::kAnalyzeEnvironmentLiveness; flags |= BytecodeGraphBuilderFlag::kAnalyzeEnvironmentLiveness;
} }
if (data->info()->is_bailout_on_uninitialized()) { if (data->info()->bailout_on_uninitialized()) {
flags |= BytecodeGraphBuilderFlag::kBailoutOnUninitialized; flags |= BytecodeGraphBuilderFlag::kBailoutOnUninitialized;
} }
...@@ -1365,7 +1365,7 @@ struct InliningPhase { ...@@ -1365,7 +1365,7 @@ struct InliningPhase {
data->broker(), data->common(), data->broker(), data->common(),
data->machine(), temp_zone); data->machine(), temp_zone);
JSCallReducer::Flags call_reducer_flags = JSCallReducer::kNoFlags; JSCallReducer::Flags call_reducer_flags = JSCallReducer::kNoFlags;
if (data->info()->is_bailout_on_uninitialized()) { if (data->info()->bailout_on_uninitialized()) {
call_reducer_flags |= JSCallReducer::kBailoutOnUninitialized; call_reducer_flags |= JSCallReducer::kBailoutOnUninitialized;
} }
JSCallReducer call_reducer(&graph_reducer, data->jsgraph(), data->broker(), JSCallReducer call_reducer(&graph_reducer, data->jsgraph(), data->broker(),
...@@ -1374,12 +1374,12 @@ struct InliningPhase { ...@@ -1374,12 +1374,12 @@ struct InliningPhase {
JSContextSpecialization context_specialization( JSContextSpecialization context_specialization(
&graph_reducer, data->jsgraph(), data->broker(), &graph_reducer, data->jsgraph(), data->broker(),
data->specialization_context(), data->specialization_context(),
data->info()->is_function_context_specializing() data->info()->function_context_specializing()
? data->info()->closure() ? data->info()->closure()
: MaybeHandle<JSFunction>()); : MaybeHandle<JSFunction>());
JSNativeContextSpecialization::Flags flags = JSNativeContextSpecialization::Flags flags =
JSNativeContextSpecialization::kNoFlags; JSNativeContextSpecialization::kNoFlags;
if (data->info()->is_bailout_on_uninitialized()) { if (data->info()->bailout_on_uninitialized()) {
flags |= JSNativeContextSpecialization::kBailoutOnUninitialized; flags |= JSNativeContextSpecialization::kBailoutOnUninitialized;
} }
// Passing the OptimizedCompilationInfo's shared zone here as // Passing the OptimizedCompilationInfo's shared zone here as
...@@ -1401,7 +1401,7 @@ struct InliningPhase { ...@@ -1401,7 +1401,7 @@ struct InliningPhase {
AddReducer(data, &graph_reducer, &context_specialization); AddReducer(data, &graph_reducer, &context_specialization);
AddReducer(data, &graph_reducer, &intrinsic_lowering); AddReducer(data, &graph_reducer, &intrinsic_lowering);
AddReducer(data, &graph_reducer, &call_reducer); AddReducer(data, &graph_reducer, &call_reducer);
if (data->info()->is_inlining_enabled()) { if (data->info()->inlining()) {
AddReducer(data, &graph_reducer, &inlining); AddReducer(data, &graph_reducer, &inlining);
} }
graph_reducer.ReduceGraph(); graph_reducer.ReduceGraph();
...@@ -1490,17 +1490,17 @@ struct SerializationPhase { ...@@ -1490,17 +1490,17 @@ struct SerializationPhase {
void Run(PipelineData* data, Zone* temp_zone) { void Run(PipelineData* data, Zone* temp_zone) {
SerializerForBackgroundCompilationFlags flags; SerializerForBackgroundCompilationFlags flags;
if (data->info()->is_bailout_on_uninitialized()) { if (data->info()->bailout_on_uninitialized()) {
flags |= SerializerForBackgroundCompilationFlag::kBailoutOnUninitialized; flags |= SerializerForBackgroundCompilationFlag::kBailoutOnUninitialized;
} }
if (data->info()->is_source_positions_enabled()) { if (data->info()->source_positions()) {
flags |= SerializerForBackgroundCompilationFlag::kCollectSourcePositions; flags |= SerializerForBackgroundCompilationFlag::kCollectSourcePositions;
} }
if (data->info()->is_analyze_environment_liveness()) { if (data->info()->analyze_environment_liveness()) {
flags |= flags |=
SerializerForBackgroundCompilationFlag::kAnalyzeEnvironmentLiveness; SerializerForBackgroundCompilationFlag::kAnalyzeEnvironmentLiveness;
} }
if (data->info()->is_inlining_enabled()) { if (data->info()->inlining()) {
flags |= SerializerForBackgroundCompilationFlag::kEnableTurboInlining; flags |= SerializerForBackgroundCompilationFlag::kEnableTurboInlining;
} }
RunSerializerForBackgroundCompilation( RunSerializerForBackgroundCompilation(
...@@ -1798,7 +1798,7 @@ struct MemoryOptimizationPhase { ...@@ -1798,7 +1798,7 @@ struct MemoryOptimizationPhase {
// Optimize allocations and load/store operations. // Optimize allocations and load/store operations.
MemoryOptimizer optimizer( MemoryOptimizer optimizer(
data->jsgraph(), temp_zone, data->info()->GetPoisoningMitigationLevel(), data->jsgraph(), temp_zone, data->info()->GetPoisoningMitigationLevel(),
data->info()->is_allocation_folding_enabled() data->info()->allocation_folding()
? MemoryLowering::AllocationFolding::kDoAllocationFolding ? MemoryLowering::AllocationFolding::kDoAllocationFolding
: MemoryLowering::AllocationFolding::kDontAllocationFolding, : MemoryLowering::AllocationFolding::kDontAllocationFolding,
data->debug_name(), &data->info()->tick_counter()); data->debug_name(), &data->info()->tick_counter());
...@@ -1990,8 +1990,8 @@ struct ComputeSchedulePhase { ...@@ -1990,8 +1990,8 @@ struct ComputeSchedulePhase {
void Run(PipelineData* data, Zone* temp_zone) { void Run(PipelineData* data, Zone* temp_zone) {
Schedule* schedule = Scheduler::ComputeSchedule( Schedule* schedule = Scheduler::ComputeSchedule(
temp_zone, data->graph(), temp_zone, data->graph(),
data->info()->is_splitting_enabled() ? Scheduler::kSplitNodes data->info()->splitting() ? Scheduler::kSplitNodes
: Scheduler::kNoFlags, : Scheduler::kNoFlags,
&data->info()->tick_counter()); &data->info()->tick_counter());
data->set_schedule(schedule); data->set_schedule(schedule);
} }
...@@ -2036,13 +2036,13 @@ struct InstructionSelectionPhase { ...@@ -2036,13 +2036,13 @@ struct InstructionSelectionPhase {
InstructionSelector selector( InstructionSelector selector(
temp_zone, data->graph()->NodeCount(), linkage, data->sequence(), temp_zone, data->graph()->NodeCount(), linkage, data->sequence(),
data->schedule(), data->source_positions(), data->frame(), data->schedule(), data->source_positions(), data->frame(),
data->info()->switch_jump_table_enabled() data->info()->switch_jump_table()
? InstructionSelector::kEnableSwitchJumpTable ? InstructionSelector::kEnableSwitchJumpTable
: InstructionSelector::kDisableSwitchJumpTable, : InstructionSelector::kDisableSwitchJumpTable,
&data->info()->tick_counter(), &data->info()->tick_counter(),
data->address_of_max_unoptimized_frame_height(), data->address_of_max_unoptimized_frame_height(),
data->address_of_max_pushed_argument_count(), data->address_of_max_pushed_argument_count(),
data->info()->is_source_positions_enabled() data->info()->source_positions()
? InstructionSelector::kAllSourcePositions ? InstructionSelector::kAllSourcePositions
: InstructionSelector::kCallSourcePositions, : InstructionSelector::kCallSourcePositions,
InstructionSelector::SupportedFeatures(), InstructionSelector::SupportedFeatures(),
...@@ -2053,13 +2053,13 @@ struct InstructionSelectionPhase { ...@@ -2053,13 +2053,13 @@ struct InstructionSelectionPhase {
? InstructionSelector::kEnableRootsRelativeAddressing ? InstructionSelector::kEnableRootsRelativeAddressing
: InstructionSelector::kDisableRootsRelativeAddressing, : InstructionSelector::kDisableRootsRelativeAddressing,
data->info()->GetPoisoningMitigationLevel(), data->info()->GetPoisoningMitigationLevel(),
data->info()->trace_turbo_json_enabled() data->info()->trace_turbo_json()
? InstructionSelector::kEnableTraceTurboJson ? InstructionSelector::kEnableTraceTurboJson
: InstructionSelector::kDisableTraceTurboJson); : InstructionSelector::kDisableTraceTurboJson);
if (!selector.SelectInstructions()) { if (!selector.SelectInstructions()) {
data->set_compilation_failed(); data->set_compilation_failed();
} }
if (data->info()->trace_turbo_json_enabled()) { if (data->info()->trace_turbo_json()) {
TurboJsonFile json_of(data->info(), std::ios_base::app); TurboJsonFile json_of(data->info(), std::ios_base::app);
json_of << "{\"name\":\"" << phase_name() json_of << "{\"name\":\"" << phase_name()
<< "\",\"type\":\"instructions\"" << "\",\"type\":\"instructions\""
...@@ -2276,7 +2276,7 @@ struct PrintGraphPhase { ...@@ -2276,7 +2276,7 @@ struct PrintGraphPhase {
OptimizedCompilationInfo* info = data->info(); OptimizedCompilationInfo* info = data->info();
Graph* graph = data->graph(); Graph* graph = data->graph();
if (info->trace_turbo_json_enabled()) { // Print JSON. if (info->trace_turbo_json()) { // Print JSON.
AllowHandleDereference allow_deref; AllowHandleDereference allow_deref;
TurboJsonFile json_of(info, std::ios_base::app); TurboJsonFile json_of(info, std::ios_base::app);
...@@ -2285,7 +2285,7 @@ struct PrintGraphPhase { ...@@ -2285,7 +2285,7 @@ struct PrintGraphPhase {
<< "},\n"; << "},\n";
} }
if (info->trace_turbo_scheduled_enabled()) { if (info->trace_turbo_scheduled()) {
AccountingAllocator allocator; AccountingAllocator allocator;
Schedule* schedule = data->schedule(); Schedule* schedule = data->schedule();
if (schedule == nullptr) { if (schedule == nullptr) {
...@@ -2299,7 +2299,7 @@ struct PrintGraphPhase { ...@@ -2299,7 +2299,7 @@ struct PrintGraphPhase {
tracing_scope.stream() tracing_scope.stream()
<< "-- Graph after " << phase << " -- " << std::endl << "-- Graph after " << phase << " -- " << std::endl
<< AsScheduledGraph(schedule); << AsScheduledGraph(schedule);
} else if (info->trace_turbo_graph_enabled()) { // Simple textual RPO. } else if (info->trace_turbo_graph()) { // Simple textual RPO.
AllowHandleDereference allow_deref; AllowHandleDereference allow_deref;
CodeTracer::StreamScope tracing_scope(data->GetCodeTracer()); CodeTracer::StreamScope tracing_scope(data->GetCodeTracer());
tracing_scope.stream() tracing_scope.stream()
...@@ -2338,8 +2338,7 @@ struct VerifyGraphPhase { ...@@ -2338,8 +2338,7 @@ struct VerifyGraphPhase {
#undef DECL_PIPELINE_PHASE_CONSTANTS_HELPER #undef DECL_PIPELINE_PHASE_CONSTANTS_HELPER
void PipelineImpl::RunPrintAndVerify(const char* phase, bool untyped) { void PipelineImpl::RunPrintAndVerify(const char* phase, bool untyped) {
if (info()->trace_turbo_json_enabled() || if (info()->trace_turbo_json() || info()->trace_turbo_graph()) {
info()->trace_turbo_graph_enabled()) {
Run<PrintGraphPhase>(phase); Run<PrintGraphPhase>(phase);
} }
if (FLAG_turbo_verify) { if (FLAG_turbo_verify) {
...@@ -2352,21 +2351,20 @@ void PipelineImpl::Serialize() { ...@@ -2352,21 +2351,20 @@ void PipelineImpl::Serialize() {
data->BeginPhaseKind("V8.TFBrokerInitAndSerialization"); data->BeginPhaseKind("V8.TFBrokerInitAndSerialization");
if (info()->trace_turbo_json_enabled() || if (info()->trace_turbo_json() || info()->trace_turbo_graph()) {
info()->trace_turbo_graph_enabled()) {
CodeTracer::StreamScope tracing_scope(data->GetCodeTracer()); CodeTracer::StreamScope tracing_scope(data->GetCodeTracer());
tracing_scope.stream() tracing_scope.stream()
<< "---------------------------------------------------\n" << "---------------------------------------------------\n"
<< "Begin compiling method " << info()->GetDebugName().get() << "Begin compiling method " << info()->GetDebugName().get()
<< " using TurboFan" << std::endl; << " using TurboFan" << std::endl;
} }
if (info()->trace_turbo_json_enabled()) { if (info()->trace_turbo_json()) {
TurboCfgFile tcf(isolate()); TurboCfgFile tcf(isolate());
tcf << AsC1VCompilation(info()); tcf << AsC1VCompilation(info());
} }
data->source_positions()->AddDecorator(); data->source_positions()->AddDecorator();
if (data->info()->trace_turbo_json_enabled()) { if (data->info()->trace_turbo_json()) {
data->node_origins()->AddDecorator(); data->node_origins()->AddDecorator();
} }
...@@ -2435,7 +2433,7 @@ bool PipelineImpl::OptimizeGraph(Linkage* linkage) { ...@@ -2435,7 +2433,7 @@ bool PipelineImpl::OptimizeGraph(Linkage* linkage) {
Run<TypedLoweringPhase>(); Run<TypedLoweringPhase>();
RunPrintAndVerify(TypedLoweringPhase::phase_name()); RunPrintAndVerify(TypedLoweringPhase::phase_name());
if (data->info()->is_loop_peeling_enabled()) { if (data->info()->loop_peeling()) {
Run<LoopPeelingPhase>(); Run<LoopPeelingPhase>();
RunPrintAndVerify(LoopPeelingPhase::phase_name(), true); RunPrintAndVerify(LoopPeelingPhase::phase_name(), true);
} else { } else {
...@@ -2524,7 +2522,7 @@ bool PipelineImpl::OptimizeGraph(Linkage* linkage) { ...@@ -2524,7 +2522,7 @@ bool PipelineImpl::OptimizeGraph(Linkage* linkage) {
RunPrintAndVerify(DecompressionOptimizationPhase::phase_name(), true); RunPrintAndVerify(DecompressionOptimizationPhase::phase_name(), true);
data->source_positions()->RemoveDecorator(); data->source_positions()->RemoveDecorator();
if (data->info()->trace_turbo_json_enabled()) { if (data->info()->trace_turbo_json()) {
data->node_origins()->RemoveDecorator(); data->node_origins()->RemoveDecorator();
} }
...@@ -2591,7 +2589,7 @@ bool PipelineImpl::OptimizeGraphForMidTier(Linkage* linkage) { ...@@ -2591,7 +2589,7 @@ bool PipelineImpl::OptimizeGraphForMidTier(Linkage* linkage) {
RunPrintAndVerify(ScheduledMachineLoweringPhase::phase_name(), true); RunPrintAndVerify(ScheduledMachineLoweringPhase::phase_name(), true);
data->source_positions()->RemoveDecorator(); data->source_positions()->RemoveDecorator();
if (data->info()->trace_turbo_json_enabled()) { if (data->info()->trace_turbo_json()) {
data->node_origins()->RemoveDecorator(); data->node_origins()->RemoveDecorator();
} }
...@@ -2633,12 +2631,12 @@ MaybeHandle<Code> Pipeline::GenerateCodeForCodeStub( ...@@ -2633,12 +2631,12 @@ MaybeHandle<Code> Pipeline::GenerateCodeForCodeStub(
PipelineImpl pipeline(&data); PipelineImpl pipeline(&data);
if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) { if (info.trace_turbo_json() || info.trace_turbo_graph()) {
CodeTracer::StreamScope tracing_scope(data.GetCodeTracer()); CodeTracer::StreamScope tracing_scope(data.GetCodeTracer());
tracing_scope.stream() tracing_scope.stream()
<< "---------------------------------------------------\n" << "---------------------------------------------------\n"
<< "Begin compiling " << debug_name << " using TurboFan" << std::endl; << "Begin compiling " << debug_name << " using TurboFan" << std::endl;
if (info.trace_turbo_json_enabled()) { if (info.trace_turbo_json()) {
TurboJsonFile json_of(&info, std::ios_base::trunc); TurboJsonFile json_of(&info, std::ios_base::trunc);
json_of << "{\"function\" : "; json_of << "{\"function\" : ";
JsonPrintFunctionSource(json_of, -1, info.GetDebugName(), JsonPrintFunctionSource(json_of, -1, info.GetDebugName(),
...@@ -2735,7 +2733,7 @@ wasm::WasmCompilationResult Pipeline::GenerateCodeForWasmNativeStub( ...@@ -2735,7 +2733,7 @@ wasm::WasmCompilationResult Pipeline::GenerateCodeForWasmNativeStub(
PipelineImpl pipeline(&data); PipelineImpl pipeline(&data);
if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) { if (info.trace_turbo_json() || info.trace_turbo_graph()) {
CodeTracer::StreamScope tracing_scope(data.GetCodeTracer()); CodeTracer::StreamScope tracing_scope(data.GetCodeTracer());
tracing_scope.stream() tracing_scope.stream()
<< "---------------------------------------------------\n" << "---------------------------------------------------\n"
...@@ -2743,13 +2741,13 @@ wasm::WasmCompilationResult Pipeline::GenerateCodeForWasmNativeStub( ...@@ -2743,13 +2741,13 @@ wasm::WasmCompilationResult Pipeline::GenerateCodeForWasmNativeStub(
<< " using TurboFan" << std::endl; << " using TurboFan" << std::endl;
} }
if (info.trace_turbo_graph_enabled()) { // Simple textual RPO. if (info.trace_turbo_graph()) { // Simple textual RPO.
StdoutStream{} << "-- wasm stub " << Code::Kind2String(kind) << " graph -- " StdoutStream{} << "-- wasm stub " << Code::Kind2String(kind) << " graph -- "
<< std::endl << std::endl
<< AsRPO(*graph); << AsRPO(*graph);
} }
if (info.trace_turbo_json_enabled()) { if (info.trace_turbo_json()) {
TurboJsonFile json_of(&info, std::ios_base::trunc); TurboJsonFile json_of(&info, std::ios_base::trunc);
json_of << "{\"function\":\"" << info.GetDebugName().get() json_of << "{\"function\":\"" << info.GetDebugName().get()
<< "\", \"source\":\"\",\n\"phases\":["; << "\", \"source\":\"\",\n\"phases\":[";
...@@ -2777,7 +2775,7 @@ wasm::WasmCompilationResult Pipeline::GenerateCodeForWasmNativeStub( ...@@ -2777,7 +2775,7 @@ wasm::WasmCompilationResult Pipeline::GenerateCodeForWasmNativeStub(
DCHECK(result.succeeded()); DCHECK(result.succeeded());
if (info.trace_turbo_json_enabled()) { if (info.trace_turbo_json()) {
TurboJsonFile json_of(&info, std::ios_base::app); TurboJsonFile json_of(&info, std::ios_base::app);
json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\"" json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\""
<< BlockStartsAsJSON{&code_generator->block_starts()} << BlockStartsAsJSON{&code_generator->block_starts()}
...@@ -2796,7 +2794,7 @@ wasm::WasmCompilationResult Pipeline::GenerateCodeForWasmNativeStub( ...@@ -2796,7 +2794,7 @@ wasm::WasmCompilationResult Pipeline::GenerateCodeForWasmNativeStub(
json_of << "\n}"; json_of << "\n}";
} }
if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) { if (info.trace_turbo_json() || info.trace_turbo_graph()) {
CodeTracer::StreamScope tracing_scope(data.GetCodeTracer()); CodeTracer::StreamScope tracing_scope(data.GetCodeTracer());
tracing_scope.stream() tracing_scope.stream()
<< "---------------------------------------------------\n" << "---------------------------------------------------\n"
...@@ -2856,7 +2854,7 @@ MaybeHandle<Code> Pipeline::GenerateCodeForTesting( ...@@ -2856,7 +2854,7 @@ MaybeHandle<Code> Pipeline::GenerateCodeForTesting(
PipelineImpl pipeline(&data); PipelineImpl pipeline(&data);
if (info->trace_turbo_json_enabled()) { if (info->trace_turbo_json()) {
TurboJsonFile json_of(info, std::ios_base::trunc); TurboJsonFile json_of(info, std::ios_base::trunc);
json_of << "{\"function\":\"" << info->GetDebugName().get() json_of << "{\"function\":\"" << info->GetDebugName().get()
<< "\", \"source\":\"\",\n\"phases\":["; << "\", \"source\":\"\",\n\"phases\":[";
...@@ -2909,8 +2907,7 @@ void Pipeline::GenerateCodeForWasmFunction( ...@@ -2909,8 +2907,7 @@ void Pipeline::GenerateCodeForWasmFunction(
PipelineImpl pipeline(&data); PipelineImpl pipeline(&data);
if (data.info()->trace_turbo_json_enabled() || if (data.info()->trace_turbo_json() || data.info()->trace_turbo_graph()) {
data.info()->trace_turbo_graph_enabled()) {
CodeTracer::StreamScope tracing_scope(data.GetCodeTracer()); CodeTracer::StreamScope tracing_scope(data.GetCodeTracer());
tracing_scope.stream() tracing_scope.stream()
<< "---------------------------------------------------\n" << "---------------------------------------------------\n"
...@@ -2923,7 +2920,7 @@ void Pipeline::GenerateCodeForWasmFunction( ...@@ -2923,7 +2920,7 @@ void Pipeline::GenerateCodeForWasmFunction(
data.BeginPhaseKind("V8.WasmOptimization"); data.BeginPhaseKind("V8.WasmOptimization");
const bool is_asm_js = is_asmjs_module(module); const bool is_asm_js = is_asmjs_module(module);
if (FLAG_turbo_splitting && !is_asm_js) { if (FLAG_turbo_splitting && !is_asm_js) {
data.info()->MarkAsSplittingEnabled(); data.info()->set_splitting();
} }
if (FLAG_wasm_opt || is_asm_js) { if (FLAG_wasm_opt || is_asm_js) {
PipelineRunScope scope(&data, "V8.WasmFullOptimization", PipelineRunScope scope(&data, "V8.WasmFullOptimization",
...@@ -2981,7 +2978,7 @@ void Pipeline::GenerateCodeForWasmFunction( ...@@ -2981,7 +2978,7 @@ void Pipeline::GenerateCodeForWasmFunction(
code_generator->GetProtectedInstructionsData(); code_generator->GetProtectedInstructionsData();
result->result_tier = wasm::ExecutionTier::kTurbofan; result->result_tier = wasm::ExecutionTier::kTurbofan;
if (data.info()->trace_turbo_json_enabled()) { if (data.info()->trace_turbo_json()) {
TurboJsonFile json_of(data.info(), std::ios_base::app); TurboJsonFile json_of(data.info(), std::ios_base::app);
json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\"" json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\""
<< BlockStartsAsJSON{&code_generator->block_starts()} << BlockStartsAsJSON{&code_generator->block_starts()}
...@@ -3000,8 +2997,7 @@ void Pipeline::GenerateCodeForWasmFunction( ...@@ -3000,8 +2997,7 @@ void Pipeline::GenerateCodeForWasmFunction(
json_of << "\n}"; json_of << "\n}";
} }
if (data.info()->trace_turbo_json_enabled() || if (data.info()->trace_turbo_json() || data.info()->trace_turbo_graph()) {
data.info()->trace_turbo_graph_enabled()) {
CodeTracer::StreamScope tracing_scope(data.GetCodeTracer()); CodeTracer::StreamScope tracing_scope(data.GetCodeTracer());
tracing_scope.stream() tracing_scope.stream()
<< "---------------------------------------------------\n" << "---------------------------------------------------\n"
...@@ -3097,14 +3093,14 @@ bool PipelineImpl::SelectInstructions(Linkage* linkage) { ...@@ -3097,14 +3093,14 @@ bool PipelineImpl::SelectInstructions(Linkage* linkage) {
return false; return false;
} }
if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) { if (info()->trace_turbo_json() && !data->MayHaveUnverifiableGraph()) {
AllowHandleDereference allow_deref; AllowHandleDereference allow_deref;
TurboCfgFile tcf(isolate()); TurboCfgFile tcf(isolate());
tcf << AsC1V("CodeGen", data->schedule(), data->source_positions(), tcf << AsC1V("CodeGen", data->schedule(), data->source_positions(),
data->sequence()); data->sequence());
} }
if (info()->trace_turbo_json_enabled()) { if (info()->trace_turbo_json()) {
std::ostringstream source_position_output; std::ostringstream source_position_output;
// Output source position information before the graph is deleted. // Output source position information before the graph is deleted.
if (data_->source_positions() != nullptr) { if (data_->source_positions() != nullptr) {
...@@ -3239,7 +3235,7 @@ void PipelineImpl::AssembleCode(Linkage* linkage, ...@@ -3239,7 +3235,7 @@ void PipelineImpl::AssembleCode(Linkage* linkage,
data->InitializeCodeGenerator(linkage, std::move(buffer)); data->InitializeCodeGenerator(linkage, std::move(buffer));
Run<AssembleCodePhase>(); Run<AssembleCodePhase>();
if (data->info()->trace_turbo_json_enabled()) { if (data->info()->trace_turbo_json()) {
TurboJsonFile json_of(data->info(), std::ios_base::app); TurboJsonFile json_of(data->info(), std::ios_base::app);
json_of << "{\"name\":\"code generation\"" json_of << "{\"name\":\"code generation\""
<< ", \"type\":\"instructions\"" << ", \"type\":\"instructions\""
...@@ -3269,7 +3265,7 @@ MaybeHandle<Code> PipelineImpl::FinalizeCode(bool retire_broker) { ...@@ -3269,7 +3265,7 @@ MaybeHandle<Code> PipelineImpl::FinalizeCode(bool retire_broker) {
info()->SetCode(code); info()->SetCode(code);
PrintCode(isolate(), code, info()); PrintCode(isolate(), code, info());
if (info()->trace_turbo_json_enabled()) { if (info()->trace_turbo_json()) {
TurboJsonFile json_of(info(), std::ios_base::app); TurboJsonFile json_of(info(), std::ios_base::app);
json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\"" json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\""
...@@ -3289,8 +3285,7 @@ MaybeHandle<Code> PipelineImpl::FinalizeCode(bool retire_broker) { ...@@ -3289,8 +3285,7 @@ MaybeHandle<Code> PipelineImpl::FinalizeCode(bool retire_broker) {
JsonPrintAllSourceWithPositions(json_of, data->info(), isolate()); JsonPrintAllSourceWithPositions(json_of, data->info(), isolate());
json_of << "\n}"; json_of << "\n}";
} }
if (info()->trace_turbo_json_enabled() || if (info()->trace_turbo_json() || info()->trace_turbo_graph()) {
info()->trace_turbo_graph_enabled()) {
CodeTracer::StreamScope tracing_scope(data->GetCodeTracer()); CodeTracer::StreamScope tracing_scope(data->GetCodeTracer());
tracing_scope.stream() tracing_scope.stream()
<< "---------------------------------------------------\n" << "---------------------------------------------------\n"
...@@ -3329,7 +3324,7 @@ namespace { ...@@ -3329,7 +3324,7 @@ namespace {
void TraceSequence(OptimizedCompilationInfo* info, PipelineData* data, void TraceSequence(OptimizedCompilationInfo* info, PipelineData* data,
const char* phase_name) { const char* phase_name) {
if (info->trace_turbo_json_enabled()) { if (info->trace_turbo_json()) {
AllowHandleDereference allow_deref; AllowHandleDereference allow_deref;
TurboJsonFile json_of(info, std::ios_base::app); TurboJsonFile json_of(info, std::ios_base::app);
json_of << "{\"name\":\"" << phase_name << "\",\"type\":\"sequence\"" json_of << "{\"name\":\"" << phase_name << "\",\"type\":\"sequence\""
...@@ -3339,7 +3334,7 @@ void TraceSequence(OptimizedCompilationInfo* info, PipelineData* data, ...@@ -3339,7 +3334,7 @@ void TraceSequence(OptimizedCompilationInfo* info, PipelineData* data,
*(data->sequence())} *(data->sequence())}
<< "}},\n"; << "}},\n";
} }
if (info->trace_turbo_graph_enabled()) { if (info->trace_turbo_graph()) {
AllowHandleDereference allow_deref; AllowHandleDereference allow_deref;
CodeTracer::StreamScope tracing_scope(data->GetCodeTracer()); CodeTracer::StreamScope tracing_scope(data->GetCodeTracer());
tracing_scope.stream() << "----- Instruction sequence " << phase_name tracing_scope.stream() << "----- Instruction sequence " << phase_name
...@@ -3371,13 +3366,13 @@ void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config, ...@@ -3371,13 +3366,13 @@ void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config,
#endif #endif
RegisterAllocationFlags flags; RegisterAllocationFlags flags;
if (data->info()->is_turbo_control_flow_aware_allocation()) { if (data->info()->turbo_control_flow_aware_allocation()) {
flags |= RegisterAllocationFlag::kTurboControlFlowAwareAllocation; flags |= RegisterAllocationFlag::kTurboControlFlowAwareAllocation;
} }
if (data->info()->is_turbo_preprocess_ranges()) { if (data->info()->turbo_preprocess_ranges()) {
flags |= RegisterAllocationFlag::kTurboPreprocessRanges; flags |= RegisterAllocationFlag::kTurboPreprocessRanges;
} }
if (data->info()->trace_turbo_allocation_enabled()) { if (data->info()->trace_turbo_allocation()) {
flags |= RegisterAllocationFlag::kTraceAllocation; flags |= RegisterAllocationFlag::kTraceAllocation;
} }
data->InitializeRegisterAllocationData(config, call_descriptor, flags); data->InitializeRegisterAllocationData(config, call_descriptor, flags);
...@@ -3395,16 +3390,15 @@ void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config, ...@@ -3395,16 +3390,15 @@ void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config,
->RangesDefinedInDeferredStayInDeferred()); ->RangesDefinedInDeferredStayInDeferred());
} }
if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) { if (info()->trace_turbo_json() && !data->MayHaveUnverifiableGraph()) {
TurboCfgFile tcf(isolate()); TurboCfgFile tcf(isolate());
tcf << AsC1VRegisterAllocationData("PreAllocation", tcf << AsC1VRegisterAllocationData("PreAllocation",
data->register_allocation_data()); data->register_allocation_data());
} }
if (info()->is_turbo_preprocess_ranges()) { if (info()->turbo_preprocess_ranges()) {
Run<SplinterLiveRangesPhase>(); Run<SplinterLiveRangesPhase>();
if (info()->trace_turbo_json_enabled() && if (info()->trace_turbo_json() && !data->MayHaveUnverifiableGraph()) {
!data->MayHaveUnverifiableGraph()) {
TurboCfgFile tcf(isolate()); TurboCfgFile tcf(isolate());
tcf << AsC1VRegisterAllocationData("PostSplinter", tcf << AsC1VRegisterAllocationData("PostSplinter",
data->register_allocation_data()); data->register_allocation_data());
...@@ -3417,7 +3411,7 @@ void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config, ...@@ -3417,7 +3411,7 @@ void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config,
Run<AllocateFPRegistersPhase<LinearScanAllocator>>(); Run<AllocateFPRegistersPhase<LinearScanAllocator>>();
} }
if (info()->is_turbo_preprocess_ranges()) { if (info()->turbo_preprocess_ranges()) {
Run<MergeSplintersPhase>(); Run<MergeSplintersPhase>();
} }
...@@ -3449,7 +3443,7 @@ void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config, ...@@ -3449,7 +3443,7 @@ void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config,
verifier->VerifyGapMoves(); verifier->VerifyGapMoves();
} }
if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) { if (info()->trace_turbo_json() && !data->MayHaveUnverifiableGraph()) {
TurboCfgFile tcf(isolate()); TurboCfgFile tcf(isolate());
tcf << AsC1VRegisterAllocationData("CodeGen", tcf << AsC1VRegisterAllocationData("CodeGen",
data->register_allocation_data()); data->register_allocation_data());
......
...@@ -6894,18 +6894,17 @@ wasm::WasmCompilationResult ExecuteTurbofanWasmCompilation( ...@@ -6894,18 +6894,17 @@ wasm::WasmCompilationResult ExecuteTurbofanWasmCompilation(
OptimizedCompilationInfo info(GetDebugName(&zone, func_index), &zone, OptimizedCompilationInfo info(GetDebugName(&zone, func_index), &zone,
Code::WASM_FUNCTION); Code::WASM_FUNCTION);
if (env->runtime_exception_support) { if (env->runtime_exception_support) {
info.SetWasmRuntimeExceptionSupport(); info.set_wasm_runtime_exception_support();
} }
if (info.trace_turbo_json_enabled()) { if (info.trace_turbo_json()) {
TurboCfgFile tcf; TurboCfgFile tcf;
tcf << AsC1VCompilation(&info); tcf << AsC1VCompilation(&info);
} }
NodeOriginTable* node_origins = info.trace_turbo_json_enabled() NodeOriginTable* node_origins =
? new (&zone) info.trace_turbo_json() ? new (&zone) NodeOriginTable(mcgraph->graph())
NodeOriginTable(mcgraph->graph()) : nullptr;
: nullptr;
SourcePositionTable* source_positions = SourcePositionTable* source_positions =
new (mcgraph->zone()) SourcePositionTable(mcgraph->graph()); new (mcgraph->zone()) SourcePositionTable(mcgraph->graph());
if (!BuildGraphForWasmFunction(wasm_engine->allocator(), env, func_body, if (!BuildGraphForWasmFunction(wasm_engine->allocator(), env, func_body,
......
...@@ -257,8 +257,8 @@ i::Handle<i::JSFunction> Optimize( ...@@ -257,8 +257,8 @@ i::Handle<i::JSFunction> Optimize(
i::OptimizedCompilationInfo info(zone, isolate, shared, function); i::OptimizedCompilationInfo info(zone, isolate, shared, function);
if (flags & i::OptimizedCompilationInfo::kInliningEnabled) { if (flags & i::OptimizedCompilationInfo::kInlining) {
info.MarkAsInliningEnabled(); info.set_inlining();
} }
CHECK(info.shared_info()->HasBytecodeArray()); CHECK(info.shared_info()->HasBytecodeArray());
......
...@@ -25,7 +25,7 @@ FunctionTester::FunctionTester(const char* source, uint32_t flags) ...@@ -25,7 +25,7 @@ FunctionTester::FunctionTester(const char* source, uint32_t flags)
function((FLAG_allow_natives_syntax = true, NewFunction(source))), function((FLAG_allow_natives_syntax = true, NewFunction(source))),
flags_(flags) { flags_(flags) {
Compile(function); Compile(function);
const uint32_t supported_flags = OptimizedCompilationInfo::kInliningEnabled; const uint32_t supported_flags = OptimizedCompilationInfo::kInlining;
CHECK_EQ(0u, flags_ & ~supported_flags); CHECK_EQ(0u, flags_ & ~supported_flags);
} }
......
...@@ -37,12 +37,12 @@ SerializerTester::SerializerTester(const char* source) ...@@ -37,12 +37,12 @@ SerializerTester::SerializerTester(const char* source)
function_string += " })();"; function_string += " })();";
Handle<JSFunction> function = Handle<JSFunction>::cast(v8::Utils::OpenHandle( Handle<JSFunction> function = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
*v8::Local<v8::Function>::Cast(CompileRun(function_string.c_str())))); *v8::Local<v8::Function>::Cast(CompileRun(function_string.c_str()))));
uint32_t flags = i::OptimizedCompilationInfo::kInliningEnabled | uint32_t flags = i::OptimizedCompilationInfo::kInlining |
i::OptimizedCompilationInfo::kFunctionContextSpecializing | i::OptimizedCompilationInfo::kFunctionContextSpecializing |
i::OptimizedCompilationInfo::kLoopPeelingEnabled | i::OptimizedCompilationInfo::kLoopPeeling |
i::OptimizedCompilationInfo::kBailoutOnUninitialized | i::OptimizedCompilationInfo::kBailoutOnUninitialized |
i::OptimizedCompilationInfo::kAllocationFoldingEnabled | i::OptimizedCompilationInfo::kAllocationFolding |
i::OptimizedCompilationInfo::kSplittingEnabled | i::OptimizedCompilationInfo::kSplitting |
i::OptimizedCompilationInfo::kAnalyzeEnvironmentLiveness; i::OptimizedCompilationInfo::kAnalyzeEnvironmentLiveness;
Optimize(function, main_zone(), main_isolate(), flags, &broker_); Optimize(function, main_zone(), main_isolate(), flags, &broker_);
function_ = JSFunctionRef(broker(), function); function_ = JSFunctionRef(broker(), function);
......
...@@ -10,7 +10,7 @@ namespace v8 { ...@@ -10,7 +10,7 @@ namespace v8 {
namespace internal { namespace internal {
namespace compiler { namespace compiler {
uint32_t flags = OptimizedCompilationInfo::kInliningEnabled; uint32_t flags = OptimizedCompilationInfo::kInlining;
TEST(Call) { TEST(Call) {
FunctionTester T("(function(a,b) { return %_Call(b, a, 1, 2, 3); })", flags); FunctionTester T("(function(a,b) { return %_Call(b, a, 1, 2, 3); })", flags);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment