Commit a0198f2c authored by mstarzinger's avatar mstarzinger Committed by Commit bot

[fullcodegen] Factor out EmitNewClosure from architectures.

This makes the aforementioned emitting function independent of the
target architecture by leveraging existing abstractions.

R=mvstanton@chromium.org

Review URL: https://codereview.chromium.org/1730243002

Cr-Commit-Position: refs/heads/master@{#34263}
parent 1aee7555
......@@ -1172,31 +1172,6 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
}
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
bool pretenure) {
// Use the fast case closure allocation code that allocates in new
// space for nested functions that don't need literals cloning. If
// we're running with the --always-opt or the --prepare-always-opt
// flag, we need to use the runtime function so that the new function
// we are creating here gets a chance to have its code optimized and
// doesn't just get a copy of the existing unoptimized code.
if (!FLAG_always_opt &&
!FLAG_prepare_always_opt &&
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
__ mov(r2, Operand(info));
__ CallStub(&stub);
} else {
__ Push(info);
__ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
: Runtime::kNewClosure);
}
context()->Plug(r0);
}
void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
FeedbackVectorSlot slot) {
DCHECK(NeedsHomeObject(initializer));
......
......@@ -1165,31 +1165,6 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
}
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
bool pretenure) {
// Use the fast case closure allocation code that allocates in new space for
// nested functions that don't need literals cloning. If we're running with
// the --always-opt or the --prepare-always-opt flag, we need to use the
// runtime function so that the new function we are creating here gets a
// chance to have its code optimized and doesn't just get a copy of the
// existing unoptimized code.
if (!FLAG_always_opt &&
!FLAG_prepare_always_opt &&
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
__ Mov(x2, Operand(info));
__ CallStub(&stub);
} else {
__ Push(info);
__ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
: Runtime::kNewClosure);
}
context()->Plug(x0);
}
void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
FeedbackVectorSlot slot) {
DCHECK(NeedsHomeObject(initializer));
......
......@@ -1007,6 +1007,30 @@ void FullCodeGenerator::EmitUnwindAndReturn() {
EmitReturnSequence();
}
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
bool pretenure) {
// Use the fast case closure allocation code that allocates in new
// space for nested functions that don't need literals cloning. If
// we're running with the --always-opt or the --prepare-always-opt
// flag, we need to use the runtime function so that the new function
// we are creating here gets a chance to have its code optimized and
// doesn't just get a copy of the existing unoptimized code.
if (!FLAG_always_opt &&
!FLAG_prepare_always_opt &&
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
__ Move(stub.GetCallInterfaceDescriptor().GetRegisterParameter(0), info);
__ CallStub(&stub);
} else {
__ Push(info);
__ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
: Runtime::kNewClosure);
}
context()->Plug(result_register());
}
void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
SetExpressionPosition(prop);
Literal* key = prop->key()->AsLiteral();
......
......@@ -1098,31 +1098,6 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
}
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
bool pretenure) {
// Use the fast case closure allocation code that allocates in new
// space for nested functions that don't need literals cloning. If
// we're running with the --always-opt or the --prepare-always-opt
// flag, we need to use the runtime function so that the new function
// we are creating here gets a chance to have its code optimized and
// doesn't just get a copy of the existing unoptimized code.
if (!FLAG_always_opt &&
!FLAG_prepare_always_opt &&
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
__ mov(ebx, Immediate(info));
__ CallStub(&stub);
} else {
__ push(Immediate(info));
__ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
: Runtime::kNewClosure);
}
context()->Plug(eax);
}
void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
FeedbackVectorSlot slot) {
DCHECK(NeedsHomeObject(initializer));
......
......@@ -1167,31 +1167,6 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
}
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
bool pretenure) {
// Use the fast case closure allocation code that allocates in new
// space for nested functions that don't need literals cloning. If
// we're running with the --always-opt or the --prepare-always-opt
// flag, we need to use the runtime function so that the new function
// we are creating here gets a chance to have its code optimized and
// doesn't just get a copy of the existing unoptimized code.
if (!FLAG_always_opt &&
!FLAG_prepare_always_opt &&
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
__ li(a2, Operand(info));
__ CallStub(&stub);
} else {
__ Push(info);
__ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
: Runtime::kNewClosure);
}
context()->Plug(v0);
}
void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
FeedbackVectorSlot slot) {
DCHECK(NeedsHomeObject(initializer));
......
......@@ -1169,31 +1169,6 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
}
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
bool pretenure) {
// Use the fast case closure allocation code that allocates in new
// space for nested functions that don't need literals cloning. If
// we're running with the --always-opt or the --prepare-always-opt
// flag, we need to use the runtime function so that the new function
// we are creating here gets a chance to have its code optimized and
// doesn't just get a copy of the existing unoptimized code.
if (!FLAG_always_opt &&
!FLAG_prepare_always_opt &&
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
__ li(a2, Operand(info));
__ CallStub(&stub);
} else {
__ Push(info);
__ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
: Runtime::kNewClosure);
}
context()->Plug(v0);
}
void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
FeedbackVectorSlot slot) {
DCHECK(NeedsHomeObject(initializer));
......
......@@ -1138,28 +1138,6 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
}
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
bool pretenure) {
// Use the fast case closure allocation code that allocates in new
// space for nested functions that don't need literals cloning. If
// we're running with the --always-opt or the --prepare-always-opt
// flag, we need to use the runtime function so that the new function
// we are creating here gets a chance to have its code optimized and
// doesn't just get a copy of the existing unoptimized code.
if (!FLAG_always_opt && !FLAG_prepare_always_opt && !pretenure &&
scope()->is_function_scope() && info->num_literals() == 0) {
FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
__ mov(r5, Operand(info));
__ CallStub(&stub);
} else {
__ Push(info);
__ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
: Runtime::kNewClosure);
}
context()->Plug(r3);
}
void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
FeedbackVectorSlot slot) {
DCHECK(NeedsHomeObject(initializer));
......
......@@ -1124,31 +1124,6 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
}
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
bool pretenure) {
// Use the fast case closure allocation code that allocates in new
// space for nested functions that don't need literals cloning. If
// we're running with the --always-opt or the --prepare-always-opt
// flag, we need to use the runtime function so that the new function
// we are creating here gets a chance to have its code optimized and
// doesn't just get a copy of the existing unoptimized code.
if (!FLAG_always_opt &&
!FLAG_prepare_always_opt &&
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
__ Move(rbx, info);
__ CallStub(&stub);
} else {
__ Push(info);
__ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
: Runtime::kNewClosure);
}
context()->Plug(rax);
}
void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
FeedbackVectorSlot slot) {
DCHECK(NeedsHomeObject(initializer));
......
......@@ -1091,31 +1091,6 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
}
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
bool pretenure) {
// Use the fast case closure allocation code that allocates in new
// space for nested functions that don't need literals cloning. If
// we're running with the --always-opt or the --prepare-always-opt
// flag, we need to use the runtime function so that the new function
// we are creating here gets a chance to have its code optimized and
// doesn't just get a copy of the existing unoptimized code.
if (!FLAG_always_opt &&
!FLAG_prepare_always_opt &&
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
__ mov(ebx, Immediate(info));
__ CallStub(&stub);
} else {
__ push(Immediate(info));
__ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
: Runtime::kNewClosure);
}
context()->Plug(eax);
}
void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
FeedbackVectorSlot slot) {
DCHECK(NeedsHomeObject(initializer));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment