Commit f0a430e5 authored by rmcilroy's avatar rmcilroy Committed by Commit bot

[Code Stubs] Convert FastNewClosureStub to a TurboFanCodeStub.

Converts FastNewClosureStub from a Hydrogen to a TurboFan code stub.
The plan is to start using this in the Interpreter CreateClosure
bytecode handler (in a follow-up CL).

BUG=v8:4280

Review-Url: https://codereview.chromium.org/2100883003
Cr-Commit-Position: refs/heads/master@{#37429}
parent 971731f3
......@@ -460,10 +460,8 @@ Callable CodeFactory::FastNewContext(Isolate* isolate, int slot_count) {
// static
Callable CodeFactory::FastNewClosure(Isolate* isolate,
LanguageMode language_mode,
FunctionKind kind) {
FastNewClosureStub stub(isolate, language_mode, kind);
Callable CodeFactory::FastNewClosure(Isolate* isolate) {
FastNewClosureStub stub(isolate);
return Callable(stub.GetCode(), stub.GetCallInterfaceDescriptor());
}
......
......@@ -120,8 +120,7 @@ class CodeFactory final {
static Callable FastCloneShallowObject(Isolate* isolate, int length);
static Callable FastNewContext(Isolate* isolate, int slot_count);
static Callable FastNewClosure(Isolate* isolate, LanguageMode language_mode,
FunctionKind kind);
static Callable FastNewClosure(Isolate* isolate);
static Callable FastNewObject(Isolate* isolate);
static Callable FastNewRestParameter(Isolate* isolate,
bool skip_stub_frame = false);
......
......@@ -1924,66 +1924,6 @@ HValue* CodeStubGraphBuilder<ToObjectStub>::BuildCodeStub() {
Handle<Code> ToObjectStub::GenerateCode() { return DoGenerateCode(this); }
template<>
HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
Counters* counters = isolate()->counters();
Factory* factory = isolate()->factory();
HInstruction* empty_fixed_array =
Add<HConstant>(factory->empty_fixed_array());
HInstruction* empty_literals_array =
Add<HConstant>(factory->empty_literals_array());
HValue* shared_info = GetParameter(0);
AddIncrementCounter(counters->fast_new_closure_total());
// Create a new closure from the given function info in new space
HValue* size = Add<HConstant>(JSFunction::kSize);
HInstruction* js_function =
Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE,
graph()->GetConstant0());
int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
casted_stub()->kind());
// Compute the function map in the current native context and set that
// as the map of the allocated object.
HInstruction* native_context = BuildGetNativeContext();
HInstruction* map_slot_value = Add<HLoadNamedField>(
native_context, nullptr, HObjectAccess::ForContextSlot(map_index));
Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
// Initialize the rest of the function.
Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
empty_fixed_array);
Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
empty_fixed_array);
Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
empty_literals_array);
Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
graph()->GetConstantHole());
Add<HStoreNamedField>(
js_function, HObjectAccess::ForSharedFunctionInfoPointer(), shared_info);
Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
context());
Handle<Code> lazy_builtin(
isolate()->builtins()->builtin(Builtins::kCompileLazy));
HConstant* lazy = Add<HConstant>(lazy_builtin);
Add<HStoreCodeEntry>(js_function, lazy);
Add<HStoreNamedField>(js_function,
HObjectAccess::ForNextFunctionLinkPointer(),
graph()->GetConstantUndefined());
return js_function;
}
Handle<Code> FastNewClosureStub::GenerateCode() {
return DoGenerateCode(this);
}
template<>
HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
......
......@@ -4189,14 +4189,10 @@ ElementsTransitionAndStoreStub::GetCallInterfaceDescriptor() const {
return VectorStoreTransitionDescriptor(isolate());
}
void FastNewClosureStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {}
void FastNewContextStub::InitializeDescriptor(CodeStubDescriptor* d) {}
void TypeofStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {}
void NumberToStringStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
descriptor->Initialize(
Runtime::FunctionForId(Runtime::kNumberToString)->entry);
......@@ -4425,6 +4421,154 @@ compiler::Node* HasPropertyStub::Generate(CodeStubAssembler* assembler,
return result.value();
}
// static
compiler::Node* FastNewClosureStub::Generate(CodeStubAssembler* assembler,
compiler::Node* shared_info,
compiler::Node* context) {
typedef compiler::Node Node;
typedef compiler::CodeAssembler::Label Label;
typedef compiler::CodeAssembler::Variable Variable;
Isolate* isolate = assembler->isolate();
Factory* factory = assembler->isolate()->factory();
assembler->IncrementCounter(isolate->counters()->fast_new_closure_total(), 1);
// Create a new closure from the given function info in new space
Node* result = assembler->Allocate(JSFunction::kSize);
// Calculate the index of the map we should install on the function based on
// the FunctionKind and LanguageMode of the function.
// Note: Must be kept in sync with Context::FunctionMapIndex
Node* compiler_hints = assembler->LoadObjectField(
shared_info, SharedFunctionInfo::kCompilerHintsOffset,
MachineType::Uint32());
Node* is_strict = assembler->Word32And(
compiler_hints,
assembler->Int32Constant(1 << SharedFunctionInfo::kStrictModeBit));
Label if_normal(assembler), if_generator(assembler), if_async(assembler),
if_class_constructor(assembler), if_function_without_prototype(assembler),
load_map(assembler);
Variable map_index(assembler, MachineRepresentation::kTagged);
Node* is_not_normal = assembler->Word32And(
compiler_hints,
assembler->Int32Constant(SharedFunctionInfo::kFunctionKindMaskBits));
assembler->GotoUnless(is_not_normal, &if_normal);
Node* is_generator = assembler->Word32And(
compiler_hints,
assembler->Int32Constant(1 << SharedFunctionInfo::kIsGeneratorBit));
assembler->GotoIf(is_generator, &if_generator);
Node* is_async = assembler->Word32And(
compiler_hints,
assembler->Int32Constant(1 << SharedFunctionInfo::kIsAsyncFunctionBit));
assembler->GotoIf(is_async, &if_async);
Node* is_class_constructor = assembler->Word32And(
compiler_hints,
assembler->Int32Constant(SharedFunctionInfo::kClassConstructorBits));
assembler->GotoIf(is_class_constructor, &if_class_constructor);
if (FLAG_debug_code) {
// Function must be a function without a prototype.
assembler->Assert(assembler->Word32And(
compiler_hints, assembler->Int32Constant(
SharedFunctionInfo::kAccessorFunctionBits |
(1 << SharedFunctionInfo::kIsArrowBit) |
(1 << SharedFunctionInfo::kIsConciseMethodBit))));
}
assembler->Goto(&if_function_without_prototype);
assembler->Bind(&if_normal);
{
map_index.Bind(assembler->Select(
is_strict, assembler->Int32Constant(Context::STRICT_FUNCTION_MAP_INDEX),
assembler->Int32Constant(Context::SLOPPY_FUNCTION_MAP_INDEX)));
assembler->Goto(&load_map);
}
assembler->Bind(&if_generator);
{
map_index.Bind(assembler->Select(
is_strict,
assembler->Int32Constant(Context::STRICT_GENERATOR_FUNCTION_MAP_INDEX),
assembler->Int32Constant(
Context::SLOPPY_GENERATOR_FUNCTION_MAP_INDEX)));
assembler->Goto(&load_map);
}
assembler->Bind(&if_async);
{
map_index.Bind(assembler->Select(
is_strict,
assembler->Int32Constant(Context::STRICT_ASYNC_FUNCTION_MAP_INDEX),
assembler->Int32Constant(Context::SLOPPY_ASYNC_FUNCTION_MAP_INDEX)));
assembler->Goto(&load_map);
}
assembler->Bind(&if_class_constructor);
{
map_index.Bind(
assembler->Int32Constant(Context::STRICT_FUNCTION_MAP_INDEX));
assembler->Goto(&load_map);
}
assembler->Bind(&if_function_without_prototype);
{
map_index.Bind(assembler->Int32Constant(
Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX));
assembler->Goto(&load_map);
}
assembler->Bind(&load_map);
// Get the function map in the current native context and set that
// as the map of the allocated object.
Node* native_context = assembler->LoadNativeContext(context);
Node* map_slot_value =
assembler->LoadFixedArrayElement(native_context, map_index.value());
assembler->StoreMapNoWriteBarrier(result, map_slot_value);
// Initialize the rest of the function.
Node* empty_fixed_array =
assembler->HeapConstant(factory->empty_fixed_array());
Node* empty_literals_array =
assembler->HeapConstant(factory->empty_literals_array());
assembler->StoreObjectFieldNoWriteBarrier(result, JSObject::kPropertiesOffset,
empty_fixed_array);
assembler->StoreObjectFieldNoWriteBarrier(result, JSObject::kElementsOffset,
empty_fixed_array);
assembler->StoreObjectFieldNoWriteBarrier(result, JSFunction::kLiteralsOffset,
empty_literals_array);
assembler->StoreObjectFieldNoWriteBarrier(
result, JSFunction::kPrototypeOrInitialMapOffset,
assembler->TheHoleConstant());
assembler->StoreObjectFieldNoWriteBarrier(
result, JSFunction::kSharedFunctionInfoOffset, shared_info);
assembler->StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset,
context);
Handle<Code> lazy_builtin_handle(
assembler->isolate()->builtins()->builtin(Builtins::kCompileLazy));
Node* lazy_builtin = assembler->HeapConstant(lazy_builtin_handle);
Node* lazy_builtin_entry = assembler->IntPtrAdd(
lazy_builtin,
assembler->IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
assembler->StoreObjectFieldNoWriteBarrier(
result, JSFunction::kCodeEntryOffset, lazy_builtin_entry);
assembler->StoreObjectFieldNoWriteBarrier(result,
JSFunction::kNextFunctionLinkOffset,
assembler->UndefinedConstant());
return result;
}
void FastNewClosureStub::GenerateAssembly(CodeStubAssembler* assembler) const {
assembler->Return(
Generate(assembler, assembler->Parameter(0), assembler->Parameter(1)));
}
void CreateAllocationSiteStub::GenerateAheadOfTime(Isolate* isolate) {
CreateAllocationSiteStub stub(isolate);
stub.GetCode();
......
......@@ -63,7 +63,6 @@ namespace internal {
V(FastCloneRegExp) \
V(FastCloneShallowArray) \
V(FastFunctionBind) \
V(FastNewClosure) \
V(FastNewContext) \
V(FastNewObject) \
V(FastNewRestParameter) \
......@@ -118,6 +117,7 @@ namespace internal {
V(InternalArraySingleArgumentConstructor) \
V(Dec) \
V(FastCloneShallowObject) \
V(FastNewClosure) \
V(InstanceOf) \
V(LessThan) \
V(LessThanOrEqual) \
......@@ -1034,32 +1034,16 @@ class TypeofStub final : public HydrogenCodeStub {
DEFINE_HYDROGEN_CODE_STUB(Typeof, HydrogenCodeStub);
};
class FastNewClosureStub : public HydrogenCodeStub {
class FastNewClosureStub : public TurboFanCodeStub {
public:
FastNewClosureStub(Isolate* isolate, LanguageMode language_mode,
FunctionKind kind)
: HydrogenCodeStub(isolate) {
DCHECK(IsValidFunctionKind(kind));
set_sub_minor_key(LanguageModeBits::encode(language_mode) |
FunctionKindBits::encode(kind));
}
LanguageMode language_mode() const {
return LanguageModeBits::decode(sub_minor_key());
}
FunctionKind kind() const {
return FunctionKindBits::decode(sub_minor_key());
}
explicit FastNewClosureStub(Isolate* isolate) : TurboFanCodeStub(isolate) {}
private:
STATIC_ASSERT(LANGUAGE_END == 3);
class LanguageModeBits : public BitField<LanguageMode, 0, 2> {};
class FunctionKindBits : public BitField<FunctionKind, 2, 9> {};
static compiler::Node* Generate(CodeStubAssembler* assembler,
compiler::Node* shared_info,
compiler::Node* context);
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastNewClosure);
DEFINE_HYDROGEN_CODE_STUB(FastNewClosure, HydrogenCodeStub);
DEFINE_TURBOFAN_CODE_STUB(FastNewClosure, TurboFanCodeStub);
};
......
......@@ -694,6 +694,27 @@ void CodeAssembler::Switch(Node* index, Label* default_label,
labels, case_count);
}
Node* CodeAssembler::Select(Node* condition, Node* true_value,
Node* false_value, MachineRepresentation rep) {
Variable value(this, rep);
Label vtrue(this), vfalse(this), end(this);
Branch(condition, &vtrue, &vfalse);
Bind(&vtrue);
{
value.Bind(true_value);
Goto(&end);
}
Bind(&vfalse);
{
value.Bind(false_value);
Goto(&end);
}
Bind(&end);
return value.value();
}
// RawMachineAssembler delegate helpers:
Isolate* CodeAssembler::isolate() const { return raw_assembler_->isolate(); }
......
......@@ -239,6 +239,9 @@ class CodeAssembler {
void Switch(Node* index, Label* default_label, int32_t* case_values,
Label** case_labels, size_t case_count);
Node* Select(Node* condition, Node* true_value, Node* false_value,
MachineRepresentation rep = MachineRepresentation::kTagged);
// Access to the frame pointer
Node* LoadFramePointer();
Node* LoadParentFramePointer();
......
......@@ -465,8 +465,7 @@ void JSGenericLowering::LowerJSCreateClosure(Node* node) {
// Use the FastNewClosureStub only for functions allocated in new space.
if (p.pretenure() == NOT_TENURED) {
Callable callable = CodeFactory::FastNewClosure(
isolate(), shared_info->language_mode(), shared_info->kind());
Callable callable = CodeFactory::FastNewClosure(isolate());
ReplaceWithStubCall(node, callable, flags);
} else {
ReplaceWithRuntimeCall(node, (p.pretenure() == TENURED)
......
......@@ -112,7 +112,6 @@ void RawMachineAssembler::Switch(Node* index, RawMachineLabel* default_label,
current_block_ = nullptr;
}
void RawMachineAssembler::Return(Node* value) {
Node* ret = MakeNode(common()->Return(), 1, &value);
schedule()->AddReturn(CurrentBlock(), ret);
......
......@@ -523,6 +523,7 @@ class Context: public FixedArray {
}
static int FunctionMapIndex(LanguageMode language_mode, FunctionKind kind) {
// Note: Must be kept in sync with FastNewClosureStub::Generate.
if (IsGeneratorFunction(kind)) {
return is_strict(language_mode) ? STRICT_GENERATOR_FUNCTION_MAP_INDEX
: SLOPPY_GENERATOR_FUNCTION_MAP_INDEX;
......
......@@ -5677,8 +5677,7 @@ void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
HConstant* shared_info_value = Add<HConstant>(shared_info);
HInstruction* instr;
if (!expr->pretenure()) {
FastNewClosureStub stub(isolate(), shared_info->language_mode(),
shared_info->kind());
FastNewClosureStub stub(isolate());
FastNewClosureDescriptor descriptor(isolate());
HValue* values[] = {context(), shared_info_value};
HConstant* stub_value = Add<HConstant>(stub.GetCode());
......
......@@ -1023,7 +1023,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
// doesn't just get a copy of the existing unoptimized code.
if (!FLAG_always_opt && !FLAG_prepare_always_opt && !pretenure &&
scope()->is_function_scope()) {
FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
FastNewClosureStub stub(isolate());
__ Move(stub.GetCallInterfaceDescriptor().GetRegisterParameter(0), info);
__ CallStub(&stub);
} else {
......
......@@ -7423,9 +7423,21 @@ class SharedFunctionInfo: public HeapObject {
static const int kHasDuplicateParametersBit =
kHasDuplicateParameters + kCompilerHintsSmiTagSize;
static const int kIsArrowBit = kIsArrow + kCompilerHintsSmiTagSize;
static const int kIsGeneratorBit = kIsGenerator + kCompilerHintsSmiTagSize;
static const int kIsConciseMethodBit =
kIsConciseMethod + kCompilerHintsSmiTagSize;
static const int kIsAsyncFunctionBit =
kIsAsyncFunction + kCompilerHintsSmiTagSize;
static const int kAccessorFunctionBits =
FunctionKind::kAccessorFunction
<< (kFunctionKind + kCompilerHintsSmiTagSize);
static const int kClassConstructorBits =
FunctionKind::kClassConstructor
<< (kFunctionKind + kCompilerHintsSmiTagSize);
static const int kFunctionKindMaskBits = FunctionKindBits::kMask
<< kCompilerHintsSmiTagSize;
// Constants for optimizing codegen for strict mode function and
// native tests.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment