Commit 069c2ac2 authored by Leszek Swirski's avatar Leszek Swirski Committed by Commit Bot

[sfi] Move bailout reason into compiler hints

By representing "optimization disabled" with a kNoReason bailout reason,
we have enough spare bits to merge the bailout reason field into
compiler hints. This decreases SFI size by one word.

Change-Id: I0169c91dfbfa443128b060a83e483717ed31a166
Reviewed-on: https://chromium-review.googlesource.com/595980
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Reviewed-by: 's avatarRoss McIlroy <rmcilroy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#47080}
parent 7ac416ca
......@@ -2559,7 +2559,6 @@ Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(
share->set_function_token_position(0);
// All compiler hints default to false or 0.
share->set_compiler_hints(0);
share->set_bailout_reason(0);
share->set_kind(kind);
share->set_preparsed_scope_data(*null_value());
......
......@@ -13761,16 +13761,10 @@ std::ostream& operator<<(std::ostream& os, const SourceCodeOf& v) {
void SharedFunctionInfo::DisableOptimization(BailoutReason reason) {
// Disable optimization for the shared function info and mark the
// code as non-optimizable. The marker on the shared function info
// is there because we flush non-optimized code thereby losing the
// non-optimizable information for the code. When the code is
// regenerated and set on the shared function info it is marked as
// non-optimizable if optimization is disabled for the shared
// function info.
DCHECK(reason != kNoReason);
set_optimization_disabled(true);
set_disable_optimization_reason(reason);
set_compiler_hints(
DisabledOptimizationReasonBits::update(compiler_hints(), reason));
// Code should be the lazy compilation stub or else unoptimized.
DCHECK(abstract_code()->kind() == AbstractCode::FUNCTION ||
abstract_code()->kind() == AbstractCode::INTERPRETED_FUNCTION ||
......
......@@ -58,8 +58,6 @@ INT_ACCESSORS(SharedFunctionInfo, start_position_and_type,
INT_ACCESSORS(SharedFunctionInfo, function_token_position,
kFunctionTokenPositionOffset)
INT_ACCESSORS(SharedFunctionInfo, compiler_hints, kCompilerHintsOffset)
INT_ACCESSORS(SharedFunctionInfo, bailout_reason,
kCountersAndBailoutReasonOffset)
bool SharedFunctionInfo::has_shared_name() const {
return raw_name() != kNoSharedNameSentinel;
......@@ -102,8 +100,14 @@ BIT_FIELD_ACCESSORS(SharedFunctionInfo, compiler_hints, force_inline,
SharedFunctionInfo::ForceInlineBit)
BIT_FIELD_ACCESSORS(SharedFunctionInfo, compiler_hints, is_asm_wasm_broken,
SharedFunctionInfo::IsAsmWasmBrokenBit)
BIT_FIELD_ACCESSORS(SharedFunctionInfo, compiler_hints, optimization_disabled,
SharedFunctionInfo::OptimizationDisabledBit)
bool SharedFunctionInfo::optimization_disabled() const {
return disable_optimization_reason() != BailoutReason::kNoReason;
}
BailoutReason SharedFunctionInfo::disable_optimization_reason() const {
return DisabledOptimizationReasonBits::decode(compiler_hints());
}
LanguageMode SharedFunctionInfo::language_mode() {
STATIC_ASSERT(LANGUAGE_END == 2);
......@@ -355,10 +359,6 @@ void SharedFunctionInfo::set_inferred_name(String* inferred_name) {
set_function_identifier(inferred_name);
}
BIT_FIELD_ACCESSORS(SharedFunctionInfo, bailout_reason,
disable_optimization_reason,
SharedFunctionInfo::DisabledOptimizationReasonBits)
bool SharedFunctionInfo::IsUserJavaScript() {
Object* script_obj = script();
if (script_obj->IsUndefined(GetIsolate())) return false;
......
......@@ -281,12 +281,6 @@ class SharedFunctionInfo : public HeapObject {
// Indicates if this function can be lazy compiled.
DECL_BOOLEAN_ACCESSORS(allows_lazy_compilation)
// Indicates whether optimizations have been disabled for this
// shared function info. If a function is repeatedly optimized or if
// we cannot optimize the function we disable optimization to avoid
// spending time attempting to optimize it again.
DECL_BOOLEAN_ACCESSORS(optimization_disabled)
// Indicates the language mode.
inline LanguageMode language_mode();
inline void set_language_mode(LanguageMode language_mode);
......@@ -324,6 +318,14 @@ class SharedFunctionInfo : public HeapObject {
// Recalculates the |map_index| value after modifications of this shared info.
inline void UpdateFunctionMapIndex();
// Indicates whether optimizations have been disabled for this shared function
// info. If we cannot optimize the function we disable optimization to avoid
// spending time attempting to optimize it again.
inline bool optimization_disabled() const;
// The reason why optimization was disabled.
inline BailoutReason disable_optimization_reason() const;
// Disable (further) attempted optimization of all functions sharing this
// shared function info.
void DisableOptimization(BailoutReason reason);
......@@ -333,12 +335,6 @@ class SharedFunctionInfo : public HeapObject {
Handle<Object> GetSourceCode();
Handle<Object> GetSourceCodeHarmony();
// Stores bailout_reason as a bit-field.
DECL_INT_ACCESSORS(bailout_reason)
inline BailoutReason disable_optimization_reason() const;
inline void set_disable_optimization_reason(BailoutReason reason);
// Tells whether this function should be subject to debugging.
inline bool IsSubjectToDebugging();
......@@ -414,33 +410,32 @@ class SharedFunctionInfo : public HeapObject {
#endif
// Layout description.
#define SHARED_FUNCTION_INFO_FIELDS(V) \
/* Pointer fields. */ \
V(kCodeOffset, kPointerSize) \
V(kNameOffset, kPointerSize) \
V(kScopeInfoOffset, kPointerSize) \
V(kOuterScopeInfoOffset, kPointerSize) \
V(kConstructStubOffset, kPointerSize) \
V(kInstanceClassNameOffset, kPointerSize) \
V(kFunctionDataOffset, kPointerSize) \
V(kScriptOffset, kPointerSize) \
V(kDebugInfoOffset, kPointerSize) \
V(kFunctionIdentifierOffset, kPointerSize) \
V(kFeedbackMetadataOffset, kPointerSize) \
V(kPreParsedScopeDataOffset, kPointerSize) \
V(kEndOfPointerFieldsOffset, 0) \
/* Raw data fields. */ \
V(kFunctionLiteralIdOffset, kInt32Size) \
V(kUniqueIdOffset, kUniqueIdFieldSize) \
V(kLengthOffset, kInt32Size) \
V(kFormalParameterCountOffset, kInt32Size) \
V(kExpectedNofPropertiesOffset, kInt32Size) \
V(kStartPositionAndTypeOffset, kInt32Size) \
V(kEndPositionOffset, kInt32Size) \
V(kFunctionTokenPositionOffset, kInt32Size) \
V(kCompilerHintsOffset, kInt32Size) \
V(kCountersAndBailoutReasonOffset, kInt32Size) \
/* Total size. */ \
#define SHARED_FUNCTION_INFO_FIELDS(V) \
/* Pointer fields. */ \
V(kCodeOffset, kPointerSize) \
V(kNameOffset, kPointerSize) \
V(kScopeInfoOffset, kPointerSize) \
V(kOuterScopeInfoOffset, kPointerSize) \
V(kConstructStubOffset, kPointerSize) \
V(kInstanceClassNameOffset, kPointerSize) \
V(kFunctionDataOffset, kPointerSize) \
V(kScriptOffset, kPointerSize) \
V(kDebugInfoOffset, kPointerSize) \
V(kFunctionIdentifierOffset, kPointerSize) \
V(kFeedbackMetadataOffset, kPointerSize) \
V(kPreParsedScopeDataOffset, kPointerSize) \
V(kEndOfPointerFieldsOffset, 0) \
/* Raw data fields. */ \
V(kFunctionLiteralIdOffset, kInt32Size) \
V(kUniqueIdOffset, kUniqueIdFieldSize) \
V(kLengthOffset, kInt32Size) \
V(kFormalParameterCountOffset, kInt32Size) \
V(kExpectedNofPropertiesOffset, kInt32Size) \
V(kStartPositionAndTypeOffset, kInt32Size) \
V(kEndPositionOffset, kInt32Size) \
V(kFunctionTokenPositionOffset, kInt32Size) \
V(kCompilerHintsOffset, kInt32Size) \
/* Total size. */ \
V(kSize, 0)
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
......@@ -469,7 +464,6 @@ class SharedFunctionInfo : public HeapObject {
V(FunctionKindBits, FunctionKind, 10, _) \
V(HasDuplicateParametersBit, bool, 1, _) \
V(AllowLazyCompilationBit, bool, 1, _) \
V(OptimizationDisabledBit, bool, 1, _) \
V(UsesArgumentsBit, bool, 1, _) \
V(NeedsHomeObjectBit, bool, 1, _) \
V(ForceInlineBit, bool, 1, _) \
......@@ -477,13 +471,14 @@ class SharedFunctionInfo : public HeapObject {
V(IsDeclarationBit, bool, 1, _) \
V(IsAsmWasmBrokenBit, bool, 1, _) \
V(FunctionMapIndexBits, int, 5, _) \
/* Bits 26-31 are unused. */
// TODO(leszeks): Move DisabledOptimizationReason into here once there is
// space.
V(DisabledOptimizationReasonBits, BailoutReason, 7, _)
DEFINE_BIT_FIELDS(COMPILER_HINTS_BIT_FIELDS)
#undef COMPILER_HINTS_BIT_FIELDS
// Bailout reasons must fit in the DisabledOptimizationReason bitfield.
STATIC_ASSERT(kLastErrorMessage <= DisabledOptimizationReasonBits::kMax);
// Masks for checking if certain FunctionKind bits are set without fully
// decoding of the FunctionKind bit field.
static const int kClassConstructorMask = FunctionKind::kClassConstructor
......@@ -505,16 +500,6 @@ class SharedFunctionInfo : public HeapObject {
DEFINE_BIT_FIELDS(DEBUGGER_HINTS_BIT_FIELDS)
#undef DEBUGGER_HINTS_BIT_FIELDS
// Bit fields in |bailout_reason|.
#define BAILOUT_REASON_BIT_FIELDS(V, _) \
V(DisabledOptimizationReasonBits, BailoutReason, 7, _)
DEFINE_BIT_FIELDS(BAILOUT_REASON_BIT_FIELDS)
#undef BAILOUT_REASON_BIT_FIELDS
// Bailout reasons must fit in the DisabledOptimizationReason bitfield.
STATIC_ASSERT(kLastErrorMessage <= DisabledOptimizationReasonBits::kMax);
private:
// [raw_name]: Function name string or kNoSharedNameSentinel.
DECL_ACCESSORS(raw_name, Object)
......
......@@ -151,7 +151,6 @@ RUNTIME_FUNCTION(Runtime_SetCode) {
target_shared->set_end_position(source_shared->end_position());
bool was_native = target_shared->native();
target_shared->set_compiler_hints(source_shared->compiler_hints());
target_shared->set_bailout_reason(source_shared->bailout_reason());
target_shared->set_native(was_native);
target_shared->set_function_literal_id(source_shared->function_literal_id());
......
......@@ -316,9 +316,7 @@ RUNTIME_FUNCTION(Runtime_NeverOptimizeFunction) {
HandleScope scope(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_CHECKED(JSFunction, function, 0);
function->shared()->set_disable_optimization_reason(
kOptimizationDisabledForTest);
function->shared()->set_optimization_disabled(true);
function->shared()->DisableOptimization(kOptimizationDisabledForTest);
return isolate->heap()->undefined_value();
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment