Commit 7a82be3d authored by adamk's avatar adamk Committed by Commit bot

Remove unnecessary duplication of FunctionKind enums in CompilerHints

The duplicated enum values are only used by the FastNewClosureStub,
so inline them there, with the help of one new constant (kFunctionKindShift)
in SharedFunctionInfo.

Review-Url: https://codereview.chromium.org/2390043003
Cr-Commit-Position: refs/heads/master@{#40005}
parent 138127a6
......@@ -2458,11 +2458,9 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
Label class_constructor;
__ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset));
__ TestAndBranchIfAnySet(
w3, (1 << SharedFunctionInfo::kIsDefaultConstructor) |
(1 << SharedFunctionInfo::kIsSubclassConstructor) |
(1 << SharedFunctionInfo::kIsBaseConstructor),
&class_constructor);
__ TestAndBranchIfAnySet(w3, FunctionKind::kClassConstructor
<< SharedFunctionInfo::kFunctionKindShift,
&class_constructor);
// Enter the context of the function; ToObject has to run in the function
// context, and we also need to take the global proxy from the function
......
......@@ -5011,33 +5011,38 @@ compiler::Node* FastNewClosureStub::Generate(CodeStubAssembler* assembler,
load_map(assembler);
Variable map_index(assembler, MachineType::PointerRepresentation());
STATIC_ASSERT(FunctionKind::kNormalFunction == 0);
Node* is_not_normal = assembler->Word32And(
compiler_hints,
assembler->Int32Constant(SharedFunctionInfo::kFunctionKindMaskBits));
assembler->Int32Constant(SharedFunctionInfo::kAllFunctionKindBitsMask));
assembler->GotoUnless(is_not_normal, &if_normal);
Node* is_generator = assembler->Word32And(
compiler_hints,
assembler->Int32Constant(1 << SharedFunctionInfo::kIsGeneratorBit));
assembler->Int32Constant(FunctionKind::kGeneratorFunction
<< SharedFunctionInfo::kFunctionKindShift));
assembler->GotoIf(is_generator, &if_generator);
Node* is_async = assembler->Word32And(
compiler_hints,
assembler->Int32Constant(1 << SharedFunctionInfo::kIsAsyncFunctionBit));
assembler->Int32Constant(FunctionKind::kAsyncFunction
<< SharedFunctionInfo::kFunctionKindShift));
assembler->GotoIf(is_async, &if_async);
Node* is_class_constructor = assembler->Word32And(
compiler_hints,
assembler->Int32Constant(SharedFunctionInfo::kClassConstructorBits));
assembler->Int32Constant(FunctionKind::kClassConstructor
<< SharedFunctionInfo::kFunctionKindShift));
assembler->GotoIf(is_class_constructor, &if_class_constructor);
if (FLAG_debug_code) {
// Function must be a function without a prototype.
assembler->Assert(assembler->Word32And(
compiler_hints, assembler->Int32Constant(
SharedFunctionInfo::kAccessorFunctionBits |
(1 << SharedFunctionInfo::kIsArrowBit) |
(1 << SharedFunctionInfo::kIsConciseMethodBit))));
compiler_hints,
assembler->Int32Constant((FunctionKind::kAccessorFunction |
FunctionKind::kArrowFunction |
FunctionKind::kConciseMethod)
<< SharedFunctionInfo::kFunctionKindShift)));
}
assembler->Goto(&if_function_without_prototype);
......
......@@ -7707,18 +7707,9 @@ class SharedFunctionInfo: public HeapObject {
kDontFlush,
// byte 2
kFunctionKind,
kIsArrow = kFunctionKind,
kIsGenerator,
kIsConciseMethod,
kIsDefaultConstructor,
kIsSubclassConstructor,
kIsBaseConstructor,
kIsGetterFunction,
kIsSetterFunction,
// rest of byte 2 and first two bits of byte 3 are used by FunctionKind
// byte 3
kIsAsyncFunction,
kIsModule,
kDeserialized,
kDeserialized = kFunctionKind + 10,
kIsDeclaration,
kIsAsmWasmBroken,
kRequiresClassFieldInit,
......@@ -7727,23 +7718,8 @@ class SharedFunctionInfo: public HeapObject {
};
// kFunctionKind has to be byte-aligned
STATIC_ASSERT((kFunctionKind % kBitsPerByte) == 0);
// Make sure that FunctionKind and byte 2 are in sync:
#define ASSERT_FUNCTION_KIND_ORDER(functionKind, compilerFunctionKind) \
STATIC_ASSERT(FunctionKind::functionKind == \
1 << (compilerFunctionKind - kFunctionKind))
ASSERT_FUNCTION_KIND_ORDER(kArrowFunction, kIsArrow);
ASSERT_FUNCTION_KIND_ORDER(kGeneratorFunction, kIsGenerator);
ASSERT_FUNCTION_KIND_ORDER(kConciseMethod, kIsConciseMethod);
ASSERT_FUNCTION_KIND_ORDER(kDefaultConstructor, kIsDefaultConstructor);
ASSERT_FUNCTION_KIND_ORDER(kSubclassConstructor, kIsSubclassConstructor);
ASSERT_FUNCTION_KIND_ORDER(kBaseConstructor, kIsBaseConstructor);
ASSERT_FUNCTION_KIND_ORDER(kGetterFunction, kIsGetterFunction);
ASSERT_FUNCTION_KIND_ORDER(kSetterFunction, kIsSetterFunction);
ASSERT_FUNCTION_KIND_ORDER(kAsyncFunction, kIsAsyncFunction);
ASSERT_FUNCTION_KIND_ORDER(kModule, kIsModule);
#undef ASSERT_FUNCTION_KIND_ORDER
class FunctionKindBits : public BitField<FunctionKind, kIsArrow, 10> {};
class FunctionKindBits : public BitField<FunctionKind, kFunctionKind, 10> {};
class DeoptCountBits : public BitField<int, 0, 4> {};
class OptReenableTriesBits : public BitField<int, 4, 18> {};
......@@ -7775,21 +7751,10 @@ class SharedFunctionInfo: public HeapObject {
static const int kHasDuplicateParametersBit =
kHasDuplicateParameters + kCompilerHintsSmiTagSize;
static const int kIsArrowBit = kIsArrow + kCompilerHintsSmiTagSize;
static const int kIsGeneratorBit = kIsGenerator + kCompilerHintsSmiTagSize;
static const int kIsConciseMethodBit =
kIsConciseMethod + kCompilerHintsSmiTagSize;
static const int kIsAsyncFunctionBit =
kIsAsyncFunction + kCompilerHintsSmiTagSize;
static const int kAccessorFunctionBits =
FunctionKind::kAccessorFunction
<< (kFunctionKind + kCompilerHintsSmiTagSize);
static const int kClassConstructorBits =
FunctionKind::kClassConstructor
<< (kFunctionKind + kCompilerHintsSmiTagSize);
static const int kFunctionKindMaskBits = FunctionKindBits::kMask
<< kCompilerHintsSmiTagSize;
static const int kFunctionKindShift =
kFunctionKind + kCompilerHintsSmiTagSize;
static const int kAllFunctionKindBitsMask = FunctionKindBits::kMask
<< kCompilerHintsSmiTagSize;
// Constants for optimizing codegen for strict mode function and
// native tests.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment