Commit 4c14bbf9 authored by rmcilroy's avatar rmcilroy Committed by Commit bot

[fullcodegen] Remove deprecated support for lookup variables, eval and with.

This code is no longer used by full-codegen since all functions which use with statments,
call eval or have lookup variable access now go through Ignition first.

BUG=v8:5657

Review-Url: https://codereview.chromium.org/2514393002
Cr-Commit-Position: refs/heads/master@{#41276}
parent 5925074a
......@@ -532,6 +532,9 @@ void AstNumberingVisitor::VisitArrayLiteral(ArrayLiteral* node) {
void AstNumberingVisitor::VisitCall(Call* node) {
if (node->is_possibly_eval()) {
DisableFullCodegenAndCrankshaft(kFunctionCallsEval);
}
IncrementNodeCount();
ReserveFeedbackSlots(node);
node->set_base_id(ReserveIdRange(Call::num_ids()));
......@@ -587,7 +590,6 @@ void AstNumberingVisitor::VisitRewritableExpression(
bool AstNumberingVisitor::Renumber(FunctionLiteral* node) {
DeclarationScope* scope = node->scope();
if (scope->new_target_var()) DisableFullCodegenAndCrankshaft(kSuperReference);
if (scope->calls_eval()) DisableFullCodegenAndCrankshaft(kFunctionCallsEval);
if (scope->arguments() != NULL && !scope->arguments()->IsStackAllocated()) {
DisableFullCodegenAndCrankshaft(kContextAllocatedArguments);
}
......
This diff is collapsed.
......@@ -257,11 +257,6 @@ class AstGraphBuilder : public AstVisitor<AstGraphBuilder> {
// Named and keyed loads require a VectorSlotPair for successful lowering.
VectorSlotPair CreateVectorSlotPair(FeedbackVectorSlot slot) const;
// Determine which contexts need to be checked for extension objects that
// might shadow the optimistic declaration of dynamic lookup variables.
uint32_t ComputeBitsetForDynamicGlobal(Variable* variable);
uint32_t ComputeBitsetForDynamicContext(Variable* variable);
// Computes the frequency for JSCallFunction and JSCallConstruct nodes.
float ComputeCallFrequency(FeedbackVectorSlot slot) const;
......@@ -327,10 +322,6 @@ class AstGraphBuilder : public AstVisitor<AstGraphBuilder> {
Node* BuildGlobalStore(Handle<Name> name, Node* value,
const VectorSlotPair& feedback);
// Builders for dynamic variable loads and stores.
Node* BuildDynamicLoad(Handle<Name> name, TypeofMode typeof_mode);
Node* BuildDynamicStore(Handle<Name> name, Node* value);
// Builders for accessing the function context.
Node* BuildLoadGlobalObject();
Node* BuildLoadNativeContextField(int index);
......@@ -382,14 +373,6 @@ class AstGraphBuilder : public AstVisitor<AstGraphBuilder> {
// Optimization for variable load from global object.
Node* TryLoadGlobalConstant(Handle<Name> name);
// Optimization for variable load of dynamic lookup slot that is most likely
// to resolve to a global slot or context slot (inferred from scope chain).
Node* TryLoadDynamicVariable(Variable* variable, Handle<String> name,
BailoutId bailout_id,
const VectorSlotPair& feedback,
OutputFrameStateCombine combine,
TypeofMode typeof_mode);
// Optimizations for automatic type conversion.
Node* TryFastToBoolean(Node* input);
Node* TryFastToName(Node* input);
......
This diff is collapsed.
......@@ -501,8 +501,7 @@ void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
TypeofMode typeof_mode) {
Variable* var = proxy->var();
DCHECK(var->IsUnallocated() ||
(var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
DCHECK(var->IsUnallocated());
__ Move(LoadDescriptor::NameRegister(), var->name());
EmitLoadSlot(LoadGlobalDescriptor::SlotRegister(),
......@@ -914,8 +913,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
DCHECK(!context()->IsEffect());
DCHECK(!context()->IsTest());
if (proxy != NULL &&
(proxy->var()->IsUnallocated() || proxy->var()->IsLookupSlot())) {
if (proxy != NULL && proxy->var()->IsUnallocated()) {
EmitVariableLoad(proxy, INSIDE_TYPEOF);
PrepareForBailout(proxy, BailoutState::TOS_REGISTER);
} else {
......@@ -1670,48 +1668,44 @@ void FullCodeGenerator::VisitCall(Call* expr) {
Expression* callee = expr->expression();
Call::CallType call_type = expr->GetCallType();
if (expr->is_possibly_eval()) {
EmitPossiblyEvalCall(expr);
} else {
switch (call_type) {
case Call::GLOBAL_CALL:
EmitCallWithLoadIC(expr);
break;
case Call::WITH_CALL:
// Call to a lookup slot looked up through a with scope.
PushCalleeAndWithBaseObject(expr);
EmitCall(expr);
break;
case Call::NAMED_PROPERTY_CALL: {
Property* property = callee->AsProperty();
VisitForStackValue(property->obj());
EmitCallWithLoadIC(expr);
break;
}
case Call::KEYED_PROPERTY_CALL: {
Property* property = callee->AsProperty();
VisitForStackValue(property->obj());
EmitKeyedCallWithLoadIC(expr, property->key());
break;
}
case Call::NAMED_SUPER_PROPERTY_CALL:
EmitSuperCallWithLoadIC(expr);
break;
case Call::KEYED_SUPER_PROPERTY_CALL:
EmitKeyedSuperCallWithLoadIC(expr);
break;
case Call::SUPER_CALL:
EmitSuperConstructorCall(expr);
break;
case Call::OTHER_CALL:
// Call to an arbitrary expression not handled specially above.
VisitForStackValue(callee);
OperandStackDepthIncrement(1);
__ PushRoot(Heap::kUndefinedValueRootIndex);
// Emit function call.
EmitCall(expr);
break;
// Eval is unsupported.
CHECK(!expr->is_possibly_eval());
switch (call_type) {
case Call::GLOBAL_CALL:
EmitCallWithLoadIC(expr);
break;
case Call::NAMED_PROPERTY_CALL: {
Property* property = callee->AsProperty();
VisitForStackValue(property->obj());
EmitCallWithLoadIC(expr);
break;
}
case Call::KEYED_PROPERTY_CALL: {
Property* property = callee->AsProperty();
VisitForStackValue(property->obj());
EmitKeyedCallWithLoadIC(expr, property->key());
break;
}
case Call::NAMED_SUPER_PROPERTY_CALL:
EmitSuperCallWithLoadIC(expr);
break;
case Call::KEYED_SUPER_PROPERTY_CALL:
EmitKeyedSuperCallWithLoadIC(expr);
break;
case Call::SUPER_CALL:
EmitSuperConstructorCall(expr);
break;
case Call::OTHER_CALL:
// Call to an arbitrary expression not handled specially above.
VisitForStackValue(callee);
OperandStackDepthIncrement(1);
__ PushRoot(Heap::kUndefinedValueRootIndex);
// Emit function call.
EmitCall(expr);
break;
case Call::WITH_CALL:
UNREACHABLE();
}
#ifdef DEBUG
......
......@@ -467,7 +467,6 @@ class FullCodeGenerator final : public AstVisitor<FullCodeGenerator> {
void EmitSuperCallWithLoadIC(Call* expr);
void EmitKeyedCallWithLoadIC(Call* expr, Expression* key);
void EmitKeyedSuperCallWithLoadIC(Call* expr);
void EmitPossiblyEvalCall(Call* expr);
#define FOR_EACH_FULL_CODE_INTRINSIC(F) \
F(IsSmi) \
......@@ -506,20 +505,12 @@ class FullCodeGenerator final : public AstVisitor<FullCodeGenerator> {
void RestoreContext();
// Platform-specific code for loading variables.
void EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
TypeofMode typeof_mode, Label* slow);
MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow);
void EmitDynamicLookupFastCase(VariableProxy* proxy, TypeofMode typeof_mode,
Label* slow, Label* done);
void EmitGlobalVariableLoad(VariableProxy* proxy, TypeofMode typeof_mode);
void EmitVariableLoad(VariableProxy* proxy,
TypeofMode typeof_mode = NOT_INSIDE_TYPEOF);
void EmitAccessor(ObjectLiteralProperty* property);
// Expects the arguments and the function already pushed.
void EmitResolvePossiblyDirectEval(Call* expr);
// Platform-specific support for allocating a new closure based on
// the given function info.
void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure);
......@@ -698,8 +689,6 @@ class FullCodeGenerator final : public AstVisitor<FullCodeGenerator> {
// and PushCatchContext.
void PushFunctionArgumentForContextAllocation();
void PushCalleeAndWithBaseObject(Call* expr);
// AST node visit functions.
#define DECLARE_VISIT(type) void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment