MIPS: port Remove variable rewrites and the unneccesary Slot class.

Port r9162 (2215df8).

BUG=
TEST=

Review URL: http://codereview.chromium.org/7847015
Patch from Paul Lind <plind44@gmail.com>.

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@9187 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent cd39194c
......@@ -200,14 +200,14 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// Copy any necessary parameters into the context.
int num_parameters = info->scope()->num_parameters();
for (int i = 0; i < num_parameters; i++) {
Slot* slot = scope()->parameter(i)->rewrite();
if (slot != NULL && slot->type() == Slot::CONTEXT) {
Variable* var = scope()->parameter(i);
if (var->IsContextSlot()) {
int parameter_offset = StandardFrameConstants::kCallerSPOffset +
(num_parameters - 1 - i) * kPointerSize;
// Load parameter from stack.
__ lw(a0, MemOperand(fp, parameter_offset));
// Store it in the context.
__ li(a1, Operand(Context::SlotOffset(slot->index())));
__ li(a1, Operand(Context::SlotOffset(var->index())));
__ addu(a2, cp, a1);
__ sw(a0, MemOperand(a2, 0));
// Update the write barrier. This clobbers all involved
......@@ -252,7 +252,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
ArgumentsAccessStub stub(type);
__ CallStub(&stub);
Move(arguments->rewrite(), v0, a1, a2);
SetVar(arguments, v0, a1, a2);
}
if (FLAG_trace) {
......@@ -271,7 +271,8 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
EmitDeclaration(scope()->function(), Variable::CONST, NULL);
int ignored = 0;
EmitDeclaration(scope()->function(), Variable::CONST, NULL, &ignored);
}
VisitDeclarations(scope()->declarations());
}
......@@ -371,24 +372,27 @@ void FullCodeGenerator::EmitReturnSequence() {
}
void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
ASSERT(var->IsStackAllocated() || var->IsContextSlot());
}
void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
codegen()->Move(result_register(), slot);
void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
ASSERT(var->IsStackAllocated() || var->IsContextSlot());
codegen()->GetVar(result_register(), var);
}
void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
codegen()->Move(result_register(), slot);
void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
ASSERT(var->IsStackAllocated() || var->IsContextSlot());
codegen()->GetVar(result_register(), var);
__ push(result_register());
}
void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
void FullCodeGenerator::TestContext::Plug(Variable* var) const {
// For simplicity we always test the accumulator register.
codegen()->Move(result_register(), slot);
codegen()->GetVar(result_register(), var);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(this);
}
......@@ -621,30 +625,56 @@ void FullCodeGenerator::Split(Condition cc,
}
MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
switch (slot->type()) {
case Slot::PARAMETER:
case Slot::LOCAL:
return MemOperand(fp, SlotOffset(slot));
case Slot::CONTEXT: {
int context_chain_length =
scope()->ContextChainLength(slot->var()->scope());
__ LoadContext(scratch, context_chain_length);
return ContextOperand(scratch, slot->index());
MemOperand FullCodeGenerator::StackOperand(Variable* var) {
ASSERT(var->IsStackAllocated());
// Offset is negative because higher indexes are at lower addresses.
int offset = -var->index() * kPointerSize;
// Adjust by a (parameter or local) base offset.
if (var->IsParameter()) {
offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
} else {
offset += JavaScriptFrameConstants::kLocal0Offset;
}
case Slot::LOOKUP:
case Slot::GLOBAL:
UNREACHABLE();
return MemOperand(fp, offset);
}
MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
ASSERT(var->IsContextSlot() || var->IsStackAllocated());
if (var->IsContextSlot()) {
int context_chain_length = scope()->ContextChainLength(var->scope());
__ LoadContext(scratch, context_chain_length);
return ContextOperand(scratch, var->index());
} else {
return StackOperand(var);
}
UNREACHABLE();
return MemOperand(v0, 0);
}
void FullCodeGenerator::Move(Register destination, Slot* source) {
void FullCodeGenerator::GetVar(Register dest, Variable* var) {
// Use destination as scratch.
MemOperand slot_operand = EmitSlotSearch(source, destination);
__ lw(destination, slot_operand);
MemOperand location = VarOperand(var, dest);
__ lw(dest, location);
}
void FullCodeGenerator::SetVar(Variable* var,
Register src,
Register scratch0,
Register scratch1) {
ASSERT(var->IsContextSlot() || var->IsStackAllocated());
ASSERT(!scratch0.is(src));
ASSERT(!scratch0.is(scratch1));
ASSERT(!scratch1.is(src));
MemOperand location = VarOperand(var, scratch0);
__ sw(src, location);
// Emit the write barrier code if the location is in the heap.
if (var->IsContextSlot()) {
__ RecordWrite(scratch0,
Operand(Context::SlotOffset(var->index())),
scratch1,
src);
}
}
......@@ -674,48 +704,33 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
}
void FullCodeGenerator::Move(Slot* dst,
Register src,
Register scratch1,
Register scratch2) {
ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
ASSERT(!scratch1.is(src) && !scratch2.is(src));
MemOperand location = EmitSlotSearch(dst, scratch1);
__ sw(src, location);
// Emit the write barrier code if the location is in the heap.
if (dst->type() == Slot::CONTEXT) {
__ RecordWrite(scratch1,
Operand(Context::SlotOffset(dst->index())),
scratch2,
src);
}
}
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
Variable::Mode mode,
FunctionLiteral* function) {
Comment cmnt(masm_, "[ Declaration");
FunctionLiteral* function,
int* global_count) {
// If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the
// local context.
Variable* variable = proxy->var();
ASSERT(variable != NULL); // Must have been resolved.
Slot* slot = variable->rewrite();
ASSERT(slot != NULL);
switch (slot->type()) {
case Slot::PARAMETER:
case Slot::LOCAL:
switch (variable->location()) {
case Variable::UNALLOCATED:
++(*global_count);
break;
case Variable::PARAMETER:
case Variable::LOCAL:
if (function != NULL) {
Comment cmnt(masm_, "[ Declaration");
VisitForAccumulatorValue(function);
__ sw(result_register(), MemOperand(fp, SlotOffset(slot)));
__ sw(result_register(), StackOperand(variable));
} else if (mode == Variable::CONST || mode == Variable::LET) {
Comment cmnt(masm_, "[ Declaration");
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
__ sw(t0, MemOperand(fp, SlotOffset(slot)));
__ sw(t0, StackOperand(variable));
}
break;
case Slot::CONTEXT:
// We bypass the general EmitSlotSearch because we know more about
// this specific context.
case Variable::CONTEXT:
// The variable in the decl always resides in the current function
// context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
......@@ -730,24 +745,27 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
a1, Operand(t0));
}
if (function != NULL) {
Comment cmnt(masm_, "[ Declaration");
VisitForAccumulatorValue(function);
__ sw(result_register(), ContextOperand(cp, slot->index()));
int offset = Context::SlotOffset(slot->index());
__ sw(result_register(), ContextOperand(cp, variable->index()));
int offset = Context::SlotOffset(variable->index());
// We know that we have written a function, which is not a smi.
__ mov(a1, cp);
__ RecordWrite(a1, Operand(offset), a2, result_register());
PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
} else if (mode == Variable::CONST || mode == Variable::LET) {
Comment cmnt(masm_, "[ Declaration");
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
__ sw(at, ContextOperand(cp, slot->index()));
__ sw(at, ContextOperand(cp, variable->index()));
// No write barrier since the_hole_value is in old space.
PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
}
break;
case Slot::LOOKUP: {
case Variable::LOOKUP: {
Comment cmnt(masm_, "[ Declaration");
__ li(a2, Operand(variable->name()));
// Declaration nodes are always introduced in one of two modes.
// Declaration nodes are always introduced in one of three modes.
ASSERT(mode == Variable::VAR ||
mode == Variable::CONST ||
mode == Variable::LET);
......@@ -766,23 +784,17 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
__ Push(cp, a2, a1, a0);
} else {
ASSERT(Smi::FromInt(0) == 0);
// No initial value!
__ mov(a0, zero_reg); // Operand(Smi::FromInt(0)));
__ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
__ Push(cp, a2, a1, a0);
}
__ CallRuntime(Runtime::kDeclareContextSlot, 4);
break;
}
case Slot::GLOBAL:
UNREACHABLE();
}
}
void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
EmitDeclaration(decl->proxy(), decl->mode(), decl->fun());
}
void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
......@@ -1095,8 +1107,7 @@ void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
}
void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
Slot* slot,
void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
TypeofState typeof_state,
Label* slow) {
Register current = cp;
......@@ -1142,7 +1153,7 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
}
__ lw(a0, GlobalObjectOperand());
__ li(a2, Operand(slot->var()->name()));
__ li(a2, Operand(var->name()));
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
? RelocInfo::CODE_TARGET
: RelocInfo::CODE_TARGET_CONTEXT;
......@@ -1151,15 +1162,14 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
}
MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
Slot* slot,
MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
Label* slow) {
ASSERT(slot->type() == Slot::CONTEXT);
ASSERT(var->IsContextSlot());
Register context = cp;
Register next = a3;
Register temp = t0;
for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
if (s->num_heap_slots() > 0) {
if (s->calls_eval()) {
// Check that extension is NULL.
......@@ -1178,12 +1188,11 @@ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
// This function is used only for loads, not stores, so it's safe to
// return an cp-based operand (the write barrier cannot be allowed to
// destroy the cp register).
return ContextOperand(context, slot->index());
return ContextOperand(context, var->index());
}
void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
Slot* slot,
void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
TypeofState typeof_state,
Label* slow,
Label* done) {
......@@ -1192,46 +1201,19 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
// introducing variables. In those cases, we do not want to
// perform a runtime call for all variables in the scope
// containing the eval.
if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
if (var->mode() == Variable::DYNAMIC_GLOBAL) {
EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
__ Branch(done);
} else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
Slot* potential_slot = slot->var()->local_if_not_shadowed()->rewrite();
Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
if (potential_slot != NULL) {
// Generate fast case for locals that rewrite to slots.
__ lw(v0, ContextSlotOperandCheckExtensions(potential_slot, slow));
if (potential_slot->var()->mode() == Variable::CONST) {
} else if (var->mode() == Variable::DYNAMIC_LOCAL) {
Variable* local = var->local_if_not_shadowed();
__ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
if (local->mode() == Variable::CONST) {
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
__ subu(at, v0, at); // Sub as compare: at == 0 on eq.
__ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
__ movz(v0, a0, at); // Conditional move.
__ movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
}
__ Branch(done);
} else if (rewrite != NULL) {
// Generate fast case for calls of an argument function.
Property* property = rewrite->AsProperty();
if (property != NULL) {
VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
Literal* key_literal = property->key()->AsLiteral();
if (obj_proxy != NULL &&
key_literal != NULL &&
obj_proxy->IsArguments() &&
key_literal->handle()->IsSmi()) {
// Load arguments object if there are no eval-introduced
// variables. Then load the argument from the arguments
// object using keyed load.
__ lw(a1,
ContextSlotOperandCheckExtensions(obj_proxy->var()->rewrite(),
slow));
__ li(a0, Operand(key_literal->handle()));
Handle<Code> ic =
isolate()->builtins()->KeyedLoadIC_Initialize();
__ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
__ Branch(done);
}
}
}
}
}
......@@ -1241,12 +1223,10 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
SetSourcePosition(proxy->position());
Variable* var = proxy->var();
// Three cases: non-this global variables, lookup slots, and all other
// types of slots.
Slot* slot = var->rewrite();
ASSERT((var->is_global() && !var->is_this()) == (slot == NULL));
if (slot == NULL) {
// Three cases: global variables, lookup variables, and all other types of
// variables.
switch (var->location()) {
case Variable::UNALLOCATED: {
Comment cmnt(masm_, "Global variable");
// Use inline caching. Variable name is passed in a2 and the global
// object (receiver) in a0.
......@@ -1255,52 +1235,50 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
__ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
context()->Plug(v0);
break;
}
} else if (slot->type() == Slot::LOOKUP) {
Label done, slow;
case Variable::PARAMETER:
case Variable::LOCAL:
case Variable::CONTEXT: {
Comment cmnt(masm_, var->IsContextSlot()
? "Context variable"
: "Stack variable");
if (var->mode() != Variable::LET && var->mode() != Variable::CONST) {
context()->Plug(var);
} else {
// Let and const need a read barrier.
GetVar(v0, var);
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
__ subu(at, v0, at); // Sub as compare: at == 0 on eq.
if (var->mode() == Variable::LET) {
Label done;
__ Branch(&done, ne, at, Operand(zero_reg));
__ li(a0, Operand(var->name()));
__ push(a0);
__ CallRuntime(Runtime::kThrowReferenceError, 1);
__ bind(&done);
} else {
__ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
__ movz(v0, a0, at); // Conditional move: Undefined if TheHole.
}
context()->Plug(v0);
}
break;
}
case Variable::LOOKUP: {
Label done, slow;
// Generate code for loading from variables potentially shadowed
// by eval-introduced variables.
EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
__ bind(&slow);
Comment cmnt(masm_, "Lookup slot");
Comment cmnt(masm_, "Lookup variable");
__ li(a1, Operand(var->name()));
__ Push(cp, a1); // Context and name.
__ CallRuntime(Runtime::kLoadContextSlot, 2);
__ bind(&done);
context()->Plug(v0);
} else {
Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
? "Context slot"
: "Stack slot");
if (var->mode() == Variable::CONST) {
// Constants may be the hole value if they have not been initialized.
// Unhole them.
MemOperand slot_operand = EmitSlotSearch(slot, a0);
__ lw(v0, slot_operand);
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
__ subu(at, v0, at); // Sub as compare: at == 0 on eq.
__ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
__ movz(v0, a0, at); // Conditional move.
context()->Plug(v0);
} else if (var->mode() == Variable::LET) {
// Let bindings may be the hole value if they have not been initialized.
// Throw a type error in this case.
Label done;
MemOperand slot_operand = EmitSlotSearch(slot, a0);
__ lw(v0, slot_operand);
__ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
__ Branch(&done, ne, v0, Operand(a1));
__ li(v0, Operand(var->name()));
__ push(v0);
__ CallRuntime(Runtime::kThrowReferenceError, 1);
__ bind(&done);
context()->Plug(v0);
} else {
context()->Plug(slot);
}
}
}
......@@ -1839,14 +1817,8 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Token::Value op) {
ASSERT(var != NULL);
ASSERT(var->is_global() || var->rewrite() != NULL);
if (var->is_global()) {
ASSERT(!var->is_this());
// Assignment to a global variable. Use inline caching for the
// assignment. Right-hand-side value is passed in a0, variable name in
// a2, and the global object in a1.
if (var->IsUnallocated()) {
// Global var, const, or let.
__ mov(a0, result_register());
__ li(a2, Operand(var->name()));
__ lw(a1, GlobalObjectOperand());
......@@ -1856,62 +1828,41 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
__ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
// Like var declarations, const declarations are hoisted to function
// scope. However, unlike var initializers, const initializers are able
// to drill a hole to that function context, even from inside a 'with'
// context. We thus bypass the normal static scope lookup.
Slot* slot = var->rewrite();
// Const initializers need a write barrier.
ASSERT(!var->IsParameter()); // No const parameters.
if (var->IsStackLocal()) {
Label skip;
switch (slot->type()) {
case Slot::PARAMETER:
// No const parameters.
UNREACHABLE();
break;
case Slot::LOCAL:
// Detect const reinitialization by checking for the hole value.
__ lw(a1, MemOperand(fp, SlotOffset(slot)));
__ lw(a1, StackOperand(var));
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
__ Branch(&skip, ne, a1, Operand(t0));
__ sw(result_register(), MemOperand(fp, SlotOffset(slot)));
break;
case Slot::CONTEXT:
case Slot::LOOKUP:
__ push(result_register());
__ li(a0, Operand(slot->var()->name()));
__ sw(result_register(), StackOperand(var));
__ bind(&skip);
} else {
ASSERT(var->IsContextSlot() || var->IsLookupSlot());
// Like var declarations, const declarations are hoisted to function
// scope. However, unlike var initializers, const initializers are
// able to drill a hole to that function context, even from inside a
// 'with' context. We thus bypass the normal static scope lookup for
// var->IsContextSlot().
__ push(v0);
__ li(a0, Operand(var->name()));
__ Push(cp, a0); // Context and name.
__ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
break;
case Slot::GLOBAL:
UNREACHABLE();
}
__ bind(&skip);
} else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
// Perform the assignment for non-const variables. Const assignments
// are simply skipped.
Slot* slot = var->AsSlot();
switch (slot->type()) {
case Slot::PARAMETER:
case Slot::LOCAL: {
Label assign;
// Check for an initialized let binding.
__ lw(a1, MemOperand(fp, SlotOffset(slot)));
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
__ Branch(&assign, ne, a1, Operand(t0));
// Non-initializing assignment to let variable needs a write barrier.
if (var->IsLookupSlot()) {
__ push(v0); // Value.
__ li(a1, Operand(var->name()));
__ push(a1);
__ CallRuntime(Runtime::kThrowReferenceError, 1);
// Perform the assignment.
__ bind(&assign);
__ sw(result_register(), MemOperand(fp, SlotOffset(slot)));
break;
}
case Slot::CONTEXT: {
// Let variables may be the hole value if they have not been
// initialized. Throw a type error in this case.
__ li(a0, Operand(Smi::FromInt(strict_mode_flag())));
__ Push(cp, a1, a0); // Context, name, strict mode.
__ CallRuntime(Runtime::kStoreContextSlot, 4);
} else {
ASSERT(var->IsStackAllocated() || var->IsContextSlot());
Label assign;
MemOperand target = EmitSlotSearch(slot, a1);
// Check for an initialized let binding.
__ lw(a3, target);
MemOperand location = VarOperand(var, a1);
__ lw(a3, location);
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
__ Branch(&assign, ne, a3, Operand(t0));
__ li(a3, Operand(var->name()));
......@@ -1919,58 +1870,41 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
__ CallRuntime(Runtime::kThrowReferenceError, 1);
// Perform the assignment.
__ bind(&assign);
__ sw(result_register(), target);
__ sw(result_register(), location);
if (var->IsContextSlot()) {
// RecordWrite may destroy all its register arguments.
__ mov(a3, result_register());
int offset = Context::SlotOffset(slot->index());
int offset = Context::SlotOffset(var->index());
__ RecordWrite(a1, Operand(offset), a2, a3);
break;
}
case Slot::LOOKUP:
// Call the runtime for the assignment.
__ push(v0); // Value.
__ li(a1, Operand(slot->var()->name()));
__ li(a0, Operand(Smi::FromInt(strict_mode_flag())));
__ Push(cp, a1, a0); // Context, name, strict mode.
__ CallRuntime(Runtime::kStoreContextSlot, 4);
break;
}
} else if (var->mode() != Variable::CONST) {
// Perform the assignment for non-const variables. Const assignments
// are simply skipped.
Slot* slot = var->rewrite();
switch (slot->type()) {
case Slot::PARAMETER:
case Slot::LOCAL:
// Assignment to var or initializing assignment to let.
if (var->IsStackAllocated() || var->IsContextSlot()) {
MemOperand location = VarOperand(var, a1);
if (FLAG_debug_code && op == Token::INIT_LET) {
// Check for an uninitialized let binding.
__ lw(a2, location);
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
__ Check(eq, "Let binding re-initialization.", a2, Operand(t0));
}
// Perform the assignment.
__ sw(result_register(), MemOperand(fp, SlotOffset(slot)));
break;
case Slot::CONTEXT: {
MemOperand target = EmitSlotSearch(slot, a1);
// Perform the assignment and issue the write barrier.
__ sw(result_register(), target);
// RecordWrite may destroy all its register arguments.
__ mov(a3, result_register());
int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
__ RecordWrite(a1, Operand(offset), a2, a3);
break;
__ sw(v0, location);
if (var->IsContextSlot()) {
__ mov(a3, v0);
__ RecordWrite(a1, Operand(Context::SlotOffset(var->index())), a2, a3);
}
case Slot::LOOKUP:
// Call the runtime for the assignment.
} else {
ASSERT(var->IsLookupSlot());
__ push(v0); // Value.
__ li(a1, Operand(slot->var()->name()));
__ li(a1, Operand(var->name()));
__ li(a0, Operand(Smi::FromInt(strict_mode_flag())));
__ Push(cp, a1, a0); // Context, name, strict mode.
__ CallRuntime(Runtime::kStoreContextSlot, 4);
break;
case Slot::GLOBAL:
UNREACHABLE();
}
}
// Non-initializing assignments to consts are ignored.
}
......@@ -2211,10 +2145,11 @@ void FullCodeGenerator::VisitCall(Call* expr) {
#endif
Comment cmnt(masm_, "[ Call");
Expression* fun = expr->expression();
Variable* var = fun->AsVariableProxy()->AsVariable();
Expression* callee = expr->expression();
VariableProxy* proxy = callee->AsVariableProxy();
Property* property = callee->AsProperty();
if (var != NULL && var->is_possibly_eval()) {
if (proxy != NULL && proxy->var()->is_possibly_eval()) {
// In a call to eval, we first call %ResolvePossiblyDirectEval to
// resolve the function we need to call and the receiver of the
// call. Then we call the resolved function using the given
......@@ -2223,7 +2158,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
int arg_count = args->length();
{ PreservePositionScope pos_scope(masm()->positions_recorder());
VisitForStackValue(fun);
VisitForStackValue(callee);
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
__ push(a2); // Reserved receiver slot.
......@@ -2231,16 +2166,16 @@ void FullCodeGenerator::VisitCall(Call* expr) {
for (int i = 0; i < arg_count; i++) {
VisitForStackValue(args->at(i));
}
// If we know that eval can only be shadowed by eval-introduced
// variables we attempt to load the global eval function directly
// in generated code. If we succeed, there is no need to perform a
// context lookup in the runtime system.
Label done;
if (var->rewrite() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
Variable* var = proxy->var();
if (!var->IsUnallocated() && var->mode() == Variable::DYNAMIC_GLOBAL) {
Label slow;
EmitLoadGlobalSlotCheckExtensions(var->rewrite(),
NOT_INSIDE_TYPEOF,
&slow);
EmitLoadGlobalCheckExtensions(var, NOT_INSIDE_TYPEOF, &slow);
// Push the function and resolve eval.
__ push(v0);
EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
......@@ -2248,14 +2183,12 @@ void FullCodeGenerator::VisitCall(Call* expr) {
__ bind(&slow);
}
// Push copy of the function (found below the arguments) and
// Push a copy of the function (found below the arguments) and
// resolve eval.
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ push(a1);
EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
if (done.is_linked()) {
__ bind(&done);
}
// The runtime call returns a pair of values in v0 (function) and
// v1 (receiver). Touch up the stack with the right values.
......@@ -2271,30 +2204,26 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, v0);
} else if (var != NULL && !var->is_this() && var->is_global()) {
} else if (proxy != NULL && proxy->var()->IsUnallocated()) {
// Push global object as receiver for the call IC.
__ lw(a0, GlobalObjectOperand());
__ push(a0);
EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
} else if (var != NULL && var->rewrite() != NULL &&
var->rewrite()->type() == Slot::LOOKUP) {
EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
} else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
// Call to a lookup slot (dynamically introduced variable).
Label slow, done;
{ PreservePositionScope scope(masm()->positions_recorder());
// Generate code for loading from variables potentially shadowed
// by eval-introduced variables.
EmitDynamicLoadFromSlotFastCase(var->rewrite(),
NOT_INSIDE_TYPEOF,
&slow,
&done);
EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
}
__ bind(&slow);
// Call the runtime to find the function to call (returned in v0)
// and the object holding it (returned in v1).
__ push(context_register());
__ li(a2, Operand(var->name()));
__ li(a2, Operand(proxy->name()));
__ push(a2);
__ CallRuntime(Runtime::kLoadContextSlot, 2);
__ Push(v0, v1); // Function, receiver.
......@@ -2319,26 +2248,21 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// by LoadContextSlot. That object could be the hole if the
// receiver is implicitly the global object.
EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
} else if (fun->AsProperty() != NULL) {
// Call to an object property.
Property* prop = fun->AsProperty();
Literal* key = prop->key()->AsLiteral();
if (key != NULL && key->handle()->IsSymbol()) {
// Call to a named property, use call IC.
} else if (property != NULL) {
{ PreservePositionScope scope(masm()->positions_recorder());
VisitForStackValue(prop->obj());
VisitForStackValue(property->obj());
}
EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
if (property->key()->IsPropertyName()) {
EmitCallWithIC(expr,
property->key()->AsLiteral()->handle(),
RelocInfo::CODE_TARGET);
} else {
// Call to a keyed property.
{ PreservePositionScope scope(masm()->positions_recorder());
VisitForStackValue(prop->obj());
}
EmitKeyedCallWithIC(expr, prop->key());
EmitKeyedCallWithIC(expr, property->key());
}
} else {
// Call to an arbitrary expression not handled specially above.
{ PreservePositionScope scope(masm()->positions_recorder());
VisitForStackValue(fun);
VisitForStackValue(callee);
}
// Load global receiver object.
__ lw(a1, GlobalObjectOperand());
......@@ -3668,32 +3592,32 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
switch (expr->op()) {
case Token::DELETE: {
Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
Property* prop = expr->expression()->AsProperty();
Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
Property* property = expr->expression()->AsProperty();
VariableProxy* proxy = expr->expression()->AsVariableProxy();
if (prop != NULL) {
VisitForStackValue(prop->obj());
VisitForStackValue(prop->key());
if (property != NULL) {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
__ li(a1, Operand(Smi::FromInt(strict_mode_flag())));
__ push(a1);
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(v0);
} else if (var != NULL) {
} else if (proxy != NULL) {
Variable* var = proxy->var();
// Delete of an unqualified identifier is disallowed in strict mode
// but "delete this" is.
// but "delete this" is allowed.
ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
if (var->is_global()) {
if (var->IsUnallocated()) {
__ lw(a2, GlobalObjectOperand());
__ li(a1, Operand(var->name()));
__ li(a0, Operand(Smi::FromInt(kNonStrictMode)));
__ Push(a2, a1, a0);
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(v0);
} else if (var->rewrite() != NULL &&
var->rewrite()->type() != Slot::LOOKUP) {
} else if (var->IsStackAllocated() || var->IsContextSlot()) {
// Result of deleting non-global, non-dynamic variables is false.
// The subexpression does not have side effects.
context()->Plug(false);
context()->Plug(var->is_this());
} else {
// Non-global variable. Call the runtime to try to delete from the
// context where the variable was introduced.
......@@ -3968,8 +3892,10 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
ASSERT(!context()->IsEffect());
ASSERT(!context()->IsTest());
VariableProxy* proxy = expr->AsVariableProxy();
if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
if (proxy != NULL && proxy->var()->IsUnallocated()) {
Comment cmnt(masm_, "Global variable");
__ lw(a0, GlobalObjectOperand());
__ li(a2, Operand(proxy->name()));
......@@ -3979,15 +3905,12 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
__ Call(ic);
PrepareForBailout(expr, TOS_REG);
context()->Plug(v0);
} else if (proxy != NULL &&
proxy->var()->rewrite() != NULL &&
proxy->var()->rewrite()->type() == Slot::LOOKUP) {
} else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
Label done, slow;
// Generate code for loading from variables potentially shadowed
// by eval-introduced variables.
Slot* slot = proxy->var()->rewrite();
EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
__ bind(&slow);
__ li(a0, Operand(proxy->name()));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment