Commit 05873add authored by mvstanton's avatar mvstanton Committed by Commit bot

[builtins] More stubs to the builtin-o-sphere.

The following ported to builtins:
FastCloneRegExp
FastCloneShallowArray
FastCloneShallowObject

BUG=
TBR=rmcilroy@chromium.org, rossberg@chromium.org

Review-Url: https://codereview.chromium.org/2605893002
Cr-Commit-Position: refs/heads/master@{#41989}
parent f2e8c978
...@@ -10,6 +10,7 @@ ...@@ -10,6 +10,7 @@
#include "src/ast/prettyprinter.h" #include "src/ast/prettyprinter.h"
#include "src/ast/scopes.h" #include "src/ast/scopes.h"
#include "src/base/hashmap.h" #include "src/base/hashmap.h"
#include "src/builtins/builtins-constructor.h"
#include "src/builtins/builtins.h" #include "src/builtins/builtins.h"
#include "src/code-stubs.h" #include "src/code-stubs.h"
#include "src/contexts.h" #include "src/contexts.h"
...@@ -577,12 +578,12 @@ void ObjectLiteral::BuildConstantProperties(Isolate* isolate) { ...@@ -577,12 +578,12 @@ void ObjectLiteral::BuildConstantProperties(Isolate* isolate) {
} }
bool ObjectLiteral::IsFastCloningSupported() const { bool ObjectLiteral::IsFastCloningSupported() const {
// FastCloneShallowObjectStub doesn't copy elements, and object literals don't // The FastCloneShallowObject builtin doesn't copy elements, and object
// support copy-on-write (COW) elements for now. // literals don't support copy-on-write (COW) elements for now.
// TODO(mvstanton): make object literals support COW elements. // TODO(mvstanton): make object literals support COW elements.
return fast_elements() && has_shallow_properties() && return fast_elements() && has_shallow_properties() &&
properties_count() <= properties_count() <= ConstructorBuiltinsAssembler::
FastCloneShallowObjectStub::kMaximumClonedProperties; kMaximumClonedShallowObjectProperties;
} }
void ArrayLiteral::BuildConstantElements(Isolate* isolate) { void ArrayLiteral::BuildConstantElements(Isolate* isolate) {
...@@ -659,7 +660,7 @@ void ArrayLiteral::BuildConstantElements(Isolate* isolate) { ...@@ -659,7 +660,7 @@ void ArrayLiteral::BuildConstantElements(Isolate* isolate) {
bool ArrayLiteral::IsFastCloningSupported() const { bool ArrayLiteral::IsFastCloningSupported() const {
return depth() <= 1 && return depth() <= 1 &&
values()->length() <= values()->length() <=
FastCloneShallowArrayStub::kMaximumClonedElements; ConstructorBuiltinsAssembler::kMaximumClonedShallowArrayElements;
} }
void ArrayLiteral::AssignFeedbackVectorSlots(Isolate* isolate, void ArrayLiteral::AssignFeedbackVectorSlots(Isolate* isolate,
......
...@@ -1416,7 +1416,7 @@ class ObjectLiteral final : public MaterializedLiteral { ...@@ -1416,7 +1416,7 @@ class ObjectLiteral final : public MaterializedLiteral {
// marked expressions, no store code is emitted. // marked expressions, no store code is emitted.
void CalculateEmitStore(Zone* zone); void CalculateEmitStore(Zone* zone);
// Determines whether the {FastCloneShallowObjectStub} can be used. // Determines whether the {FastCloneShallowObject} builtin can be used.
bool IsFastCloningSupported() const; bool IsFastCloningSupported() const;
// Assemble bitfield of flags for the CreateObjectLiteral helper. // Assemble bitfield of flags for the CreateObjectLiteral helper.
...@@ -1562,7 +1562,7 @@ class ArrayLiteral final : public MaterializedLiteral { ...@@ -1562,7 +1562,7 @@ class ArrayLiteral final : public MaterializedLiteral {
// Populate the constant elements fixed array. // Populate the constant elements fixed array.
void BuildConstantElements(Isolate* isolate); void BuildConstantElements(Isolate* isolate);
// Determines whether the {FastCloneShallowArrayStub} can be used. // Determines whether the {FastCloneShallowArray} builtin can be used.
bool IsFastCloningSupported() const; bool IsFastCloningSupported() const;
// Assemble bitfield of flags for the CreateArrayLiteral helper. // Assemble bitfield of flags for the CreateArrayLiteral helper.
......
This diff is collapsed.
...@@ -9,6 +9,7 @@ namespace internal { ...@@ -9,6 +9,7 @@ namespace internal {
typedef compiler::Node Node; typedef compiler::Node Node;
typedef compiler::CodeAssemblerState CodeAssemblerState; typedef compiler::CodeAssemblerState CodeAssemblerState;
typedef compiler::CodeAssemblerLabel CodeAssemblerLabel;
class ConstructorBuiltinsAssembler : public CodeStubAssembler { class ConstructorBuiltinsAssembler : public CodeStubAssembler {
public: public:
...@@ -20,10 +21,37 @@ class ConstructorBuiltinsAssembler : public CodeStubAssembler { ...@@ -20,10 +21,37 @@ class ConstructorBuiltinsAssembler : public CodeStubAssembler {
ScopeType scope_type); ScopeType scope_type);
static int MaximumFunctionContextSlots(); static int MaximumFunctionContextSlots();
Node* EmitFastCloneRegExp(Node* closure, Node* literal_index, Node* pattern,
Node* flags, Node* context);
Node* EmitFastCloneShallowArray(Node* closure, Node* literal_index,
Node* context,
CodeAssemblerLabel* call_runtime,
AllocationSiteMode allocation_site_mode);
// Maximum number of elements in copied array (chosen so that even an array
// backed by a double backing store will fit into new-space).
static const int kMaximumClonedShallowArrayElements =
JSArray::kInitialMaxFastElementArray * kPointerSize / kDoubleSize;
void CreateFastCloneShallowArrayBuiltin(
AllocationSiteMode allocation_site_mode);
// Maximum number of properties in copied objects.
static const int kMaximumClonedShallowObjectProperties = 6;
static int FastCloneShallowObjectPropertiesCount(int literal_length);
Node* EmitFastCloneShallowObject(CodeAssemblerLabel* call_runtime,
Node* closure, Node* literals_index,
Node* properties_count);
void CreateFastCloneShallowObjectBuiltin(int properties_count);
private: private:
static const int kMaximumSlots = 0x8000; static const int kMaximumSlots = 0x8000;
static const int kSmallMaximumSlots = 10; static const int kSmallMaximumSlots = 10;
Node* NonEmptyShallowClone(Node* boilerplate, Node* boilerplate_map,
Node* boilerplate_elements, Node* allocation_site,
Node* capacity, ElementsKind kind);
// FastNewFunctionContext can only allocate closures which fit in the // FastNewFunctionContext can only allocate closures which fit in the
// new space. // new space.
STATIC_ASSERT(((kMaximumSlots + Context::MIN_CONTEXT_SLOTS) * kPointerSize + STATIC_ASSERT(((kMaximumSlots + Context::MIN_CONTEXT_SLOTS) * kPointerSize +
......
...@@ -94,6 +94,25 @@ namespace internal { ...@@ -94,6 +94,25 @@ namespace internal {
FastNewFunctionContext) \ FastNewFunctionContext) \
TFS(FastNewFunctionContextFunction, BUILTIN, kNoExtraICState, \ TFS(FastNewFunctionContextFunction, BUILTIN, kNoExtraICState, \
FastNewFunctionContext) \ FastNewFunctionContext) \
TFS(FastCloneRegExp, BUILTIN, kNoExtraICState, FastCloneRegExp) \
TFS(FastCloneShallowArrayTrack, BUILTIN, kNoExtraICState, \
FastCloneShallowArray) \
TFS(FastCloneShallowArrayDontTrack, BUILTIN, kNoExtraICState, \
FastCloneShallowArray) \
TFS(FastCloneShallowObject0, BUILTIN, kNoExtraICState, \
FastCloneShallowObject) \
TFS(FastCloneShallowObject1, BUILTIN, kNoExtraICState, \
FastCloneShallowObject) \
TFS(FastCloneShallowObject2, BUILTIN, kNoExtraICState, \
FastCloneShallowObject) \
TFS(FastCloneShallowObject3, BUILTIN, kNoExtraICState, \
FastCloneShallowObject) \
TFS(FastCloneShallowObject4, BUILTIN, kNoExtraICState, \
FastCloneShallowObject) \
TFS(FastCloneShallowObject5, BUILTIN, kNoExtraICState, \
FastCloneShallowObject) \
TFS(FastCloneShallowObject6, BUILTIN, kNoExtraICState, \
FastCloneShallowObject) \
\ \
/* Apply and entries */ \ /* Apply and entries */ \
ASM(Apply) \ ASM(Apply) \
...@@ -784,6 +803,8 @@ class Builtins { ...@@ -784,6 +803,8 @@ class Builtins {
CallableType function_type = CallableType::kAny); CallableType function_type = CallableType::kAny);
Handle<Code> InterpreterPushArgsAndConstruct(CallableType function_type); Handle<Code> InterpreterPushArgsAndConstruct(CallableType function_type);
Handle<Code> NewFunctionContext(ScopeType scope_type); Handle<Code> NewFunctionContext(ScopeType scope_type);
Handle<Code> NewCloneShallowArray(AllocationSiteMode allocation_mode);
Handle<Code> NewCloneShallowObject(int length);
Code* builtin(Name name) { Code* builtin(Name name) {
// Code::cast cannot be used here since we access builtins // Code::cast cannot be used here since we access builtins
......
...@@ -338,24 +338,23 @@ Callable CodeFactory::ResumeGenerator(Isolate* isolate) { ...@@ -338,24 +338,23 @@ Callable CodeFactory::ResumeGenerator(Isolate* isolate) {
// static // static
Callable CodeFactory::FastCloneRegExp(Isolate* isolate) { Callable CodeFactory::FastCloneRegExp(Isolate* isolate) {
FastCloneRegExpStub stub(isolate); return Callable(isolate->builtins()->FastCloneRegExp(),
return make_callable(stub); FastCloneRegExpDescriptor(isolate));
} }
// static // static
Callable CodeFactory::FastCloneShallowArray(Isolate* isolate) { Callable CodeFactory::FastCloneShallowArray(
// TODO(mstarzinger): Thread through AllocationSiteMode at some point. Isolate* isolate, AllocationSiteMode allocation_mode) {
FastCloneShallowArrayStub stub(isolate, DONT_TRACK_ALLOCATION_SITE); return Callable(isolate->builtins()->NewCloneShallowArray(allocation_mode),
return make_callable(stub); FastCloneShallowArrayDescriptor(isolate));
} }
// static // static
Callable CodeFactory::FastCloneShallowObject(Isolate* isolate, int length) { Callable CodeFactory::FastCloneShallowObject(Isolate* isolate, int length) {
FastCloneShallowObjectStub stub(isolate, length); return Callable(isolate->builtins()->NewCloneShallowObject(length),
return make_callable(stub); FastCloneShallowObjectDescriptor(isolate));
} }
// static // static
Callable CodeFactory::FastNewFunctionContext(Isolate* isolate, Callable CodeFactory::FastNewFunctionContext(Isolate* isolate,
ScopeType scope_type) { ScopeType scope_type) {
......
...@@ -131,7 +131,8 @@ class V8_EXPORT_PRIVATE CodeFactory final { ...@@ -131,7 +131,8 @@ class V8_EXPORT_PRIVATE CodeFactory final {
static Callable GetSuperConstructor(Isolate* isolate); static Callable GetSuperConstructor(Isolate* isolate);
static Callable FastCloneRegExp(Isolate* isolate); static Callable FastCloneRegExp(Isolate* isolate);
static Callable FastCloneShallowArray(Isolate* isolate); static Callable FastCloneShallowArray(Isolate* isolate,
AllocationSiteMode allocation_mode);
static Callable FastCloneShallowObject(Isolate* isolate, int length); static Callable FastCloneShallowObject(Isolate* isolate, int length);
static Callable FastNewFunctionContext(Isolate* isolate, static Callable FastNewFunctionContext(Isolate* isolate,
......
This diff is collapsed.
...@@ -96,9 +96,6 @@ class Node; ...@@ -96,9 +96,6 @@ class Node;
V(InternalArrayNoArgumentConstructor) \ V(InternalArrayNoArgumentConstructor) \
V(InternalArraySingleArgumentConstructor) \ V(InternalArraySingleArgumentConstructor) \
V(ElementsTransitionAndStore) \ V(ElementsTransitionAndStore) \
V(FastCloneRegExp) \
V(FastCloneShallowArray) \
V(FastCloneShallowObject) \
V(KeyedLoadSloppyArguments) \ V(KeyedLoadSloppyArguments) \
V(KeyedStoreSloppyArguments) \ V(KeyedStoreSloppyArguments) \
V(LoadScriptContextField) \ V(LoadScriptContextField) \
...@@ -830,81 +827,6 @@ class FastNewStrictArgumentsStub final : public PlatformCodeStub { ...@@ -830,81 +827,6 @@ class FastNewStrictArgumentsStub final : public PlatformCodeStub {
class SkipStubFrameBits : public BitField<bool, 0, 1> {}; class SkipStubFrameBits : public BitField<bool, 0, 1> {};
}; };
class FastCloneRegExpStub final : public TurboFanCodeStub {
public:
explicit FastCloneRegExpStub(Isolate* isolate) : TurboFanCodeStub(isolate) {}
static compiler::Node* Generate(CodeStubAssembler* assembler,
compiler::Node* closure,
compiler::Node* literal_index,
compiler::Node* pattern,
compiler::Node* flags,
compiler::Node* context);
private:
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastCloneRegExp);
DEFINE_TURBOFAN_CODE_STUB(FastCloneRegExp, TurboFanCodeStub);
};
class FastCloneShallowArrayStub : public TurboFanCodeStub {
public:
// Maximum number of elements in copied array (chosen so that even an array
// backed by a double backing store will fit into new-space).
static const int kMaximumClonedElements =
JSArray::kInitialMaxFastElementArray * kPointerSize / kDoubleSize;
FastCloneShallowArrayStub(Isolate* isolate,
AllocationSiteMode allocation_site_mode)
: TurboFanCodeStub(isolate) {
minor_key_ = AllocationSiteModeBits::encode(allocation_site_mode);
}
static compiler::Node* Generate(CodeStubAssembler* assembler,
compiler::Node* closure,
compiler::Node* literal_index,
compiler::Node* context,
compiler::CodeAssemblerLabel* call_runtime,
AllocationSiteMode allocation_site_mode);
AllocationSiteMode allocation_site_mode() const {
return AllocationSiteModeBits::decode(minor_key_);
}
private:
class AllocationSiteModeBits: public BitField<AllocationSiteMode, 0, 1> {};
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastCloneShallowArray);
DEFINE_TURBOFAN_CODE_STUB(FastCloneShallowArray, TurboFanCodeStub);
};
class FastCloneShallowObjectStub : public TurboFanCodeStub {
public:
// Maximum number of properties in copied object.
static const int kMaximumClonedProperties = 6;
FastCloneShallowObjectStub(Isolate* isolate, int length)
: TurboFanCodeStub(isolate) {
DCHECK_GE(length, 0);
DCHECK_LE(length, kMaximumClonedProperties);
minor_key_ = LengthBits::encode(LengthBits::encode(length));
}
static compiler::Node* GenerateFastPath(
CodeStubAssembler* assembler, compiler::CodeAssemblerLabel* call_runtime,
compiler::Node* closure, compiler::Node* literals_index,
compiler::Node* properties_count);
static int PropertiesCount(int literal_length);
int length() const { return LengthBits::decode(minor_key_); }
private:
class LengthBits : public BitField<int, 0, 4> {};
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastCloneShallowObject);
DEFINE_TURBOFAN_CODE_STUB(FastCloneShallowObject, TurboFanCodeStub);
};
class CreateAllocationSiteStub : public TurboFanCodeStub { class CreateAllocationSiteStub : public TurboFanCodeStub {
public: public:
explicit CreateAllocationSiteStub(Isolate* isolate) explicit CreateAllocationSiteStub(Isolate* isolate)
......
...@@ -392,11 +392,13 @@ void JSGenericLowering::LowerJSCreateLiteralArray(Node* node) { ...@@ -392,11 +392,13 @@ void JSGenericLowering::LowerJSCreateLiteralArray(Node* node) {
node->InsertInput(zone(), 1, jsgraph()->SmiConstant(p.index())); node->InsertInput(zone(), 1, jsgraph()->SmiConstant(p.index()));
node->InsertInput(zone(), 2, jsgraph()->HeapConstant(p.constant())); node->InsertInput(zone(), 2, jsgraph()->HeapConstant(p.constant()));
// Use the FastCloneShallowArrayStub only for shallow boilerplates without // Use the FastCloneShallowArray builtin only for shallow boilerplates without
// properties up to the number of elements that the stubs can handle. // properties up to the number of elements that the stubs can handle.
if ((p.flags() & ArrayLiteral::kShallowElements) != 0 && if ((p.flags() & ArrayLiteral::kShallowElements) != 0 &&
p.length() < FastCloneShallowArrayStub::kMaximumClonedElements) { p.length() <
Callable callable = CodeFactory::FastCloneShallowArray(isolate()); ConstructorBuiltinsAssembler::kMaximumClonedShallowArrayElements) {
Callable callable = CodeFactory::FastCloneShallowArray(
isolate(), DONT_TRACK_ALLOCATION_SITE);
ReplaceWithStubCall(node, callable, flags); ReplaceWithStubCall(node, callable, flags);
} else { } else {
node->InsertInput(zone(), 3, jsgraph()->SmiConstant(p.flags())); node->InsertInput(zone(), 3, jsgraph()->SmiConstant(p.flags()));
...@@ -412,10 +414,11 @@ void JSGenericLowering::LowerJSCreateLiteralObject(Node* node) { ...@@ -412,10 +414,11 @@ void JSGenericLowering::LowerJSCreateLiteralObject(Node* node) {
node->InsertInput(zone(), 2, jsgraph()->HeapConstant(p.constant())); node->InsertInput(zone(), 2, jsgraph()->HeapConstant(p.constant()));
node->InsertInput(zone(), 3, jsgraph()->SmiConstant(p.flags())); node->InsertInput(zone(), 3, jsgraph()->SmiConstant(p.flags()));
// Use the FastCloneShallowObjectStub only for shallow boilerplates without // Use the FastCloneShallowObject builtin only for shallow boilerplates
// elements up to the number of properties that the stubs can handle. // without elements up to the number of properties that the stubs can handle.
if ((p.flags() & ObjectLiteral::kShallowProperties) != 0 && if ((p.flags() & ObjectLiteral::kShallowProperties) != 0 &&
p.length() <= FastCloneShallowObjectStub::kMaximumClonedProperties) { p.length() <=
ConstructorBuiltinsAssembler::kMaximumClonedShallowObjectProperties) {
Callable callable = Callable callable =
CodeFactory::FastCloneShallowObject(isolate(), p.length()); CodeFactory::FastCloneShallowObject(isolate(), p.length());
ReplaceWithStubCall(node, callable, flags); ReplaceWithStubCall(node, callable, flags);
......
...@@ -1218,8 +1218,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { ...@@ -1218,8 +1218,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ Push(r3, r2, r1, r0); __ Push(r3, r2, r1, r0);
__ CallRuntime(Runtime::kCreateObjectLiteral); __ CallRuntime(Runtime::kCreateObjectLiteral);
} else { } else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); Callable callable = CodeFactory::FastCloneShallowObject(
__ CallStub(&stub); isolate(), expr->properties_count());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext(); RestoreContext();
} }
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
...@@ -1356,8 +1357,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { ...@@ -1356,8 +1357,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ Push(r3, r2, r1, r0); __ Push(r3, r2, r1, r0);
__ CallRuntime(Runtime::kCreateArrayLiteral); __ CallRuntime(Runtime::kCreateArrayLiteral);
} else { } else {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); Callable callable =
__ CallStub(&stub); CodeFactory::FastCloneShallowArray(isolate(), allocation_site_mode);
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext(); RestoreContext();
} }
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
......
...@@ -1205,8 +1205,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { ...@@ -1205,8 +1205,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ Push(x3, x2, x1, x0); __ Push(x3, x2, x1, x0);
__ CallRuntime(Runtime::kCreateObjectLiteral); __ CallRuntime(Runtime::kCreateObjectLiteral);
} else { } else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); Callable callable = CodeFactory::FastCloneShallowObject(
__ CallStub(&stub); isolate(), expr->properties_count());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext(); RestoreContext();
} }
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
...@@ -1341,8 +1342,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { ...@@ -1341,8 +1342,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ Push(x3, x2, x1, x0); __ Push(x3, x2, x1, x0);
__ CallRuntime(Runtime::kCreateArrayLiteral); __ CallRuntime(Runtime::kCreateArrayLiteral);
} else { } else {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); Callable callable =
__ CallStub(&stub); CodeFactory::FastCloneShallowArray(isolate(), allocation_site_mode);
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext(); RestoreContext();
} }
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
......
...@@ -1149,8 +1149,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { ...@@ -1149,8 +1149,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ mov(ebx, Immediate(Smi::FromInt(expr->literal_index()))); __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
__ mov(ecx, Immediate(constant_properties)); __ mov(ecx, Immediate(constant_properties));
__ mov(edx, Immediate(Smi::FromInt(flags))); __ mov(edx, Immediate(Smi::FromInt(flags)));
FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); Callable callable = CodeFactory::FastCloneShallowObject(
__ CallStub(&stub); isolate(), expr->properties_count());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext(); RestoreContext();
} }
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
...@@ -1282,8 +1283,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { ...@@ -1282,8 +1283,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ mov(ebx, Immediate(Smi::FromInt(expr->literal_index()))); __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
__ mov(ecx, Immediate(constant_elements)); __ mov(ecx, Immediate(constant_elements));
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); Callable callable =
__ CallStub(&stub); CodeFactory::FastCloneShallowArray(isolate(), allocation_site_mode);
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext(); RestoreContext();
} }
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
......
...@@ -1217,8 +1217,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { ...@@ -1217,8 +1217,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ Push(a3, a2, a1, a0); __ Push(a3, a2, a1, a0);
__ CallRuntime(Runtime::kCreateObjectLiteral); __ CallRuntime(Runtime::kCreateObjectLiteral);
} else { } else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); Callable callable = CodeFactory::FastCloneShallowObject(
__ CallStub(&stub); isolate(), expr->properties_count());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext(); RestoreContext();
} }
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
...@@ -1356,8 +1357,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { ...@@ -1356,8 +1357,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ Push(a3, a2, a1, a0); __ Push(a3, a2, a1, a0);
__ CallRuntime(Runtime::kCreateArrayLiteral); __ CallRuntime(Runtime::kCreateArrayLiteral);
} else { } else {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); Callable callable =
__ CallStub(&stub); CodeFactory::FastCloneShallowArray(isolate(), allocation_site_mode);
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext(); RestoreContext();
} }
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
......
...@@ -1219,8 +1219,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { ...@@ -1219,8 +1219,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ Push(a3, a2, a1, a0); __ Push(a3, a2, a1, a0);
__ CallRuntime(Runtime::kCreateObjectLiteral); __ CallRuntime(Runtime::kCreateObjectLiteral);
} else { } else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); Callable callable = CodeFactory::FastCloneShallowObject(
__ CallStub(&stub); isolate(), expr->properties_count());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext(); RestoreContext();
} }
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
...@@ -1358,8 +1359,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { ...@@ -1358,8 +1359,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ Push(a3, a2, a1, a0); __ Push(a3, a2, a1, a0);
__ CallRuntime(Runtime::kCreateArrayLiteral); __ CallRuntime(Runtime::kCreateArrayLiteral);
} else { } else {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); Callable callable =
__ CallStub(&stub); CodeFactory::FastCloneShallowArray(isolate(), allocation_site_mode);
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext(); RestoreContext();
} }
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
......
...@@ -1178,8 +1178,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { ...@@ -1178,8 +1178,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ Move(rbx, Smi::FromInt(expr->literal_index())); __ Move(rbx, Smi::FromInt(expr->literal_index()));
__ Move(rcx, constant_properties); __ Move(rcx, constant_properties);
__ Move(rdx, Smi::FromInt(flags)); __ Move(rdx, Smi::FromInt(flags));
FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); Callable callable = CodeFactory::FastCloneShallowObject(
__ CallStub(&stub); isolate(), expr->properties_count());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext(); RestoreContext();
} }
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
...@@ -1310,8 +1311,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { ...@@ -1310,8 +1311,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ Move(rbx, Smi::FromInt(expr->literal_index())); __ Move(rbx, Smi::FromInt(expr->literal_index()));
__ Move(rcx, constant_elements); __ Move(rcx, constant_elements);
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); Callable callable =
__ CallStub(&stub); CodeFactory::FastCloneShallowArray(isolate(), allocation_site_mode);
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext(); RestoreContext();
} }
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
......
...@@ -888,6 +888,14 @@ enum ScopeType : uint8_t { ...@@ -888,6 +888,14 @@ enum ScopeType : uint8_t {
WITH_SCOPE // The scope introduced by with. WITH_SCOPE // The scope introduced by with.
}; };
// AllocationSiteMode controls whether allocations are tracked by an allocation
// site.
enum AllocationSiteMode {
DONT_TRACK_ALLOCATION_SITE,
TRACK_ALLOCATION_SITE,
LAST_ALLOCATION_SITE_MODE = TRACK_ALLOCATION_SITE
};
// The mips architecture prior to revision 5 has inverted encoding for sNaN. // The mips architecture prior to revision 5 has inverted encoding for sNaN.
// The x87 FPU convert the sNaN to qNaN automatically when loading sNaN from // The x87 FPU convert the sNaN to qNaN automatically when loading sNaN from
// memmory. // memmory.
......
...@@ -4,6 +4,7 @@ ...@@ -4,6 +4,7 @@
#include "src/interpreter/bytecode-flags.h" #include "src/interpreter/bytecode-flags.h"
#include "src/builtins/builtins-constructor.h"
#include "src/code-stubs.h" #include "src/code-stubs.h"
namespace v8 { namespace v8 {
...@@ -25,10 +26,11 @@ uint8_t CreateObjectLiteralFlags::Encode(bool fast_clone_supported, ...@@ -25,10 +26,11 @@ uint8_t CreateObjectLiteralFlags::Encode(bool fast_clone_supported,
uint8_t result = FlagsBits::encode(runtime_flags); uint8_t result = FlagsBits::encode(runtime_flags);
if (fast_clone_supported) { if (fast_clone_supported) {
STATIC_ASSERT( STATIC_ASSERT(
FastCloneShallowObjectStub::kMaximumClonedProperties <= ConstructorBuiltinsAssembler::kMaximumClonedShallowObjectProperties <=
1 << CreateObjectLiteralFlags::FastClonePropertiesCountBits::kShift); 1 << CreateObjectLiteralFlags::FastClonePropertiesCountBits::kShift);
DCHECK_LE(properties_count, DCHECK_LE(
FastCloneShallowObjectStub::kMaximumClonedProperties); properties_count,
ConstructorBuiltinsAssembler::kMaximumClonedShallowObjectProperties);
result |= CreateObjectLiteralFlags::FastClonePropertiesCountBits::encode( result |= CreateObjectLiteralFlags::FastClonePropertiesCountBits::encode(
properties_count); properties_count);
} }
......
...@@ -1603,7 +1603,8 @@ void BytecodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { ...@@ -1603,7 +1603,8 @@ void BytecodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// Deep-copy the literal boilerplate. // Deep-copy the literal boilerplate.
uint8_t flags = CreateObjectLiteralFlags::Encode( uint8_t flags = CreateObjectLiteralFlags::Encode(
expr->IsFastCloningSupported(), expr->IsFastCloningSupported(),
FastCloneShallowObjectStub::PropertiesCount(expr->properties_count()), ConstructorBuiltinsAssembler::FastCloneShallowObjectPropertiesCount(
expr->properties_count()),
expr->ComputeFlags()); expr->ComputeFlags());
// If constant properties is an empty fixed array, use our cached // If constant properties is an empty fixed array, use our cached
// empty_fixed_array to ensure it's only added to the constant pool once. // empty_fixed_array to ensure it's only added to the constant pool once.
......
...@@ -2551,8 +2551,9 @@ void Interpreter::DoCreateRegExpLiteral(InterpreterAssembler* assembler) { ...@@ -2551,8 +2551,9 @@ void Interpreter::DoCreateRegExpLiteral(InterpreterAssembler* assembler) {
Node* flags = __ SmiFromWord32(__ BytecodeOperandFlag(2)); Node* flags = __ SmiFromWord32(__ BytecodeOperandFlag(2));
Node* closure = __ LoadRegister(Register::function_closure()); Node* closure = __ LoadRegister(Register::function_closure());
Node* context = __ GetContext(); Node* context = __ GetContext();
Node* result = FastCloneRegExpStub::Generate( ConstructorBuiltinsAssembler constructor_assembler(assembler->state());
assembler, closure, literal_index, pattern, flags, context); Node* result = constructor_assembler.EmitFastCloneRegExp(
closure, literal_index, pattern, flags, context);
__ SetAccumulator(result); __ SetAccumulator(result);
__ Dispatch(); __ Dispatch();
} }
...@@ -2576,9 +2577,9 @@ void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) { ...@@ -2576,9 +2577,9 @@ void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) {
__ Bind(&fast_shallow_clone); __ Bind(&fast_shallow_clone);
{ {
DCHECK(FLAG_allocation_site_pretenuring); DCHECK(FLAG_allocation_site_pretenuring);
Node* result = FastCloneShallowArrayStub::Generate( ConstructorBuiltinsAssembler constructor_assembler(assembler->state());
assembler, closure, literal_index, context, &call_runtime, Node* result = constructor_assembler.EmitFastCloneShallowArray(
TRACK_ALLOCATION_SITE); closure, literal_index, context, &call_runtime, TRACK_ALLOCATION_SITE);
__ SetAccumulator(result); __ SetAccumulator(result);
__ Dispatch(); __ Dispatch();
} }
...@@ -2619,8 +2620,9 @@ void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) { ...@@ -2619,8 +2620,9 @@ void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) {
__ Bind(&if_fast_clone); __ Bind(&if_fast_clone);
{ {
// If we can do a fast clone do the fast-path in FastCloneShallowObjectStub. // If we can do a fast clone do the fast-path in FastCloneShallowObjectStub.
Node* result = FastCloneShallowObjectStub::GenerateFastPath( ConstructorBuiltinsAssembler constructor_assembler(assembler->state());
assembler, &if_not_fast_clone, closure, literal_index, Node* result = constructor_assembler.EmitFastCloneShallowObject(
&if_not_fast_clone, closure, literal_index,
fast_clone_properties_count); fast_clone_properties_count);
__ StoreRegister(result, __ BytecodeOperandReg(3)); __ StoreRegister(result, __ BytecodeOperandReg(3));
__ Dispatch(); __ Dispatch();
......
...@@ -8987,14 +8987,6 @@ class TypeFeedbackInfo: public Struct { ...@@ -8987,14 +8987,6 @@ class TypeFeedbackInfo: public Struct {
DISALLOW_IMPLICIT_CONSTRUCTORS(TypeFeedbackInfo); DISALLOW_IMPLICIT_CONSTRUCTORS(TypeFeedbackInfo);
}; };
enum AllocationSiteMode {
DONT_TRACK_ALLOCATION_SITE,
TRACK_ALLOCATION_SITE,
LAST_ALLOCATION_SITE_MODE = TRACK_ALLOCATION_SITE
};
class AllocationSite: public Struct { class AllocationSite: public Struct {
public: public:
static const uint32_t kMaximumArrayBytesToPretransition = 8 * 1024; static const uint32_t kMaximumArrayBytesToPretransition = 8 * 1024;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment