Commit 52cef4ed authored by jgruber's avatar jgruber Committed by Commit Bot

Refactor OptimizedCompilationInfo construction

This mostly pushes code around (from the two specialized public ctors
to the ConfigureFlags method), but does include one behavioral change
in that all builtins/stubs/handlers now disables switch jump tables.

Bug: v8:6666
Change-Id: I801d5bdc7a9c4bcc3bc5eb467a7c049404ffaff0
Reviewed-on: https://chromium-review.googlesource.com/1201785Reviewed-by: 's avatarStephan Herhut <herhut@chromium.org>
Reviewed-by: 's avatarTobias Tebbi <tebbi@chromium.org>
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#55652}
parent 37cb48d9
......@@ -17,22 +17,11 @@ namespace internal {
OptimizedCompilationInfo::OptimizedCompilationInfo(
Zone* zone, Isolate* isolate, Handle<SharedFunctionInfo> shared,
Handle<JSFunction> closure)
: OptimizedCompilationInfo({}, AbstractCode::OPTIMIZED_FUNCTION, zone) {
: OptimizedCompilationInfo(Code::OPTIMIZED_FUNCTION, zone) {
shared_info_ = shared;
closure_ = closure;
optimization_id_ = isolate->NextOptimizationId();
SetFlag(kCalledWithCodeStartRegister);
if (FLAG_function_context_specialization) MarkAsFunctionContextSpecializing();
if (FLAG_turbo_splitting) MarkAsSplittingEnabled();
SetFlag(kSwitchJumpTableEnabled);
if (FLAG_untrusted_code_mitigations) MarkAsPoisoningRegisterArguments();
// TODO(yangguo): Disable this in case of debugging for crbug.com/826613
if (FLAG_analyze_environment_liveness) {
MarkAsAnalyzeEnvironmentLiveness();
}
// Collect source positions for optimized code when profiling or if debugger
// is active, to be able to get more precise source positions at the price of
// more memory consumption.
......@@ -45,39 +34,54 @@ OptimizedCompilationInfo::OptimizedCompilationInfo(
OptimizedCompilationInfo::OptimizedCompilationInfo(
Vector<const char> debug_name, Zone* zone, Code::Kind code_kind)
: OptimizedCompilationInfo(
debug_name, static_cast<AbstractCode::Kind>(code_kind), zone) {
if (code_kind == Code::BYTECODE_HANDLER) {
SetFlag(OptimizedCompilationInfo::kCalledWithCodeStartRegister);
}
#if ENABLE_GDB_JIT_INTERFACE
#if DEBUG
if (code_kind == Code::BUILTIN || code_kind == Code::STUB) {
MarkAsSourcePositionsEnabled();
}
#endif
#endif
: OptimizedCompilationInfo(code_kind, zone) {
debug_name_ = debug_name;
SetTracingFlags(
PassesFilter(debug_name, CStrVector(FLAG_trace_turbo_filter)));
// Embedded builtins don't support embedded absolute code addresses, so we
// cannot use jump tables.
if (code_kind != Code::BUILTIN && code_kind != Code::BYTECODE_HANDLER) {
SetFlag(kSwitchJumpTableEnabled);
}
}
OptimizedCompilationInfo::OptimizedCompilationInfo(
Vector<const char> debug_name, AbstractCode::Kind code_kind, Zone* zone)
: flags_(FLAG_untrusted_code_mitigations ? kUntrustedCodeMitigations : 0),
code_kind_(code_kind),
stub_key_(0),
builtin_index_(Builtins::kNoBuiltinId),
osr_offset_(BailoutId::None()),
zone_(zone),
deferred_handles_(nullptr),
bailout_reason_(BailoutReason::kNoReason),
optimization_id_(-1),
debug_name_(debug_name) {}
OptimizedCompilationInfo::OptimizedCompilationInfo(Code::Kind code_kind,
Zone* zone)
: code_kind_(code_kind), zone_(zone) {
ConfigureFlags();
}
void OptimizedCompilationInfo::ConfigureFlags() {
if (FLAG_untrusted_code_mitigations) SetFlag(kUntrustedCodeMitigations);
switch (code_kind_) {
case Code::OPTIMIZED_FUNCTION:
SetFlag(kCalledWithCodeStartRegister);
SetFlag(kSwitchJumpTableEnabled);
if (FLAG_function_context_specialization) {
MarkAsFunctionContextSpecializing();
}
if (FLAG_turbo_splitting) {
MarkAsSplittingEnabled();
}
if (FLAG_untrusted_code_mitigations) {
MarkAsPoisoningRegisterArguments();
}
if (FLAG_analyze_environment_liveness) {
// TODO(yangguo): Disable this in case of debugging for crbug.com/826613
MarkAsAnalyzeEnvironmentLiveness();
}
break;
case Code::BYTECODE_HANDLER:
SetFlag(kCalledWithCodeStartRegister);
break;
case Code::BUILTIN:
case Code::STUB:
#if ENABLE_GDB_JIT_INTERFACE && DEBUG
MarkAsSourcePositionsEnabled();
#endif // ENABLE_GDB_JIT_INTERFACE && DEBUG
break;
default:
SetFlag(kSwitchJumpTableEnabled);
break;
}
}
OptimizedCompilationInfo::~OptimizedCompilationInfo() {
if (GetFlag(kDisableFutureOptimization) && has_shared_info()) {
......
......@@ -79,11 +79,7 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
wasm::WasmCode* wasm_code() const {
return const_cast<wasm::WasmCode*>(code_.as_wasm_code());
}
AbstractCode::Kind abstract_code_kind() const { return code_kind_; }
Code::Kind code_kind() const {
DCHECK(code_kind_ < static_cast<AbstractCode::Kind>(Code::NUMBER_OF_KINDS));
return static_cast<Code::Kind>(code_kind_);
}
Code::Kind code_kind() const { return code_kind_; }
uint32_t stub_key() const { return stub_key_; }
void set_stub_key(uint32_t stub_key) { stub_key_ = stub_key; }
int32_t builtin_index() const { return builtin_index_; }
......@@ -200,15 +196,11 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
JSGlobalObject* global_object() const;
// Accessors for the different compilation modes.
bool IsOptimizing() const {
return abstract_code_kind() == AbstractCode::OPTIMIZED_FUNCTION;
}
bool IsWasm() const {
return abstract_code_kind() == AbstractCode::WASM_FUNCTION;
}
bool IsOptimizing() const { return code_kind() == Code::OPTIMIZED_FUNCTION; }
bool IsWasm() const { return code_kind() == Code::WASM_FUNCTION; }
bool IsStub() const {
return abstract_code_kind() != AbstractCode::OPTIMIZED_FUNCTION &&
abstract_code_kind() != AbstractCode::WASM_FUNCTION;
return code_kind() != Code::OPTIMIZED_FUNCTION &&
code_kind() != Code::WASM_FUNCTION;
}
void SetOptimizingForOsr(BailoutId osr_offset, JavaScriptFrame* osr_frame) {
DCHECK(IsOptimizing());
......@@ -281,8 +273,8 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
}
private:
OptimizedCompilationInfo(Vector<const char> debug_name,
AbstractCode::Kind code_kind, Zone* zone);
OptimizedCompilationInfo(Code::Kind code_kind, Zone* zone);
void ConfigureFlags();
void SetFlag(Flag flag) { flags_ |= flag; }
bool GetFlag(Flag flag) const { return (flags_ & flag) != 0; }
......@@ -290,13 +282,13 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
void SetTracingFlags(bool passes_filter);
// Compilation flags.
unsigned flags_;
unsigned flags_ = 0;
PoisoningMitigationLevel poisoning_level_ =
PoisoningMitigationLevel::kDontPoison;
AbstractCode::Kind code_kind_;
uint32_t stub_key_;
int32_t builtin_index_;
Code::Kind code_kind_;
uint32_t stub_key_ = 0;
int32_t builtin_index_ = -1;
Handle<SharedFunctionInfo> shared_info_;
......@@ -306,7 +298,7 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
CodeReference code_;
// Entry point when compiling for OSR, {BailoutId::None} otherwise.
BailoutId osr_offset_;
BailoutId osr_offset_ = BailoutId::None();
// The zone from which the compilation pipeline working on this
// OptimizedCompilationInfo allocates.
......@@ -314,11 +306,11 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
std::shared_ptr<DeferredHandles> deferred_handles_;
BailoutReason bailout_reason_;
BailoutReason bailout_reason_ = BailoutReason::kNoReason;
InlinedFunctionList inlined_functions_;
int optimization_id_;
int optimization_id_ = -1;
// The current OSR frame for specialization or {nullptr}.
JavaScriptFrame* osr_frame_ = nullptr;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment