Commit 01bcdbcd authored by sgjesse@chromium.org's avatar sgjesse@chromium.org

MIPS simple function calls

This lands http://codereview.chromium.org/1018001.

Patch by Alexandre Rames <alexandre.rames@gmail.com> from Sigma Designs Inc.

Mads, please just have to look at the new flag --disable-native-files (two files not in the mips directory).
Review URL: http://codereview.chromium.org/1140004

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@4234 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 0c6a3f48
......@@ -1114,6 +1114,11 @@ bool Genesis::InstallNatives() {
global_context()->set_opaque_reference_function(*opaque_reference_fun);
}
if (FLAG_disable_native_files) {
PrintF("Warning: Running without installed natives!\n");
return true;
}
// Install natives.
for (int i = Natives::GetDebuggerCount();
i < Natives::GetBuiltinsCount();
......
......@@ -124,6 +124,7 @@ DEFINE_string(expose_natives_as, NULL, "expose natives in global object")
DEFINE_string(expose_debug_as, NULL, "expose debug in global object")
DEFINE_bool(expose_gc, false, "expose gc extension")
DEFINE_int(stack_trace_limit, 10, "number of stack frames to capture")
DEFINE_bool(disable_native_files, false, "disable builtin natives files")
// builtins-ia32.cc
DEFINE_bool(inline_new, true, "use fast inline allocation")
......
......@@ -74,7 +74,99 @@ void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
bool is_construct) {
// Called from JSEntryStub::GenerateBody
// Registers:
// a0: entry_address
// a1: function
// a2: reveiver_pointer
// a3: argc
// s0: argv
//
// Stack:
// arguments slots
// handler frame
// entry frame
// callee saved registers + ra
// 4 args slots
// args
// Clear the context before we push it when entering the JS frame.
__ li(cp, Operand(0));
// Enter an internal frame.
__ EnterInternalFrame();
// Set up the context from the function argument.
__ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
// Set up the roots register.
ExternalReference roots_address = ExternalReference::roots_address();
__ li(s6, Operand(roots_address));
// Push the function and the receiver onto the stack.
__ MultiPushReversed(a1.bit() | a2.bit());
// Copy arguments to the stack in a loop.
// a3: argc
// s0: argv, ie points to first arg
Label loop, entry;
__ sll(t0, a3, kPointerSizeLog2);
__ add(t2, s0, t0);
__ b(&entry);
__ nop(); // Branch delay slot nop.
// t2 points past last arg.
__ bind(&loop);
__ lw(t0, MemOperand(s0)); // Read next parameter.
__ addiu(s0, s0, kPointerSize);
__ lw(t0, MemOperand(t0)); // Dereference handle.
__ Push(t0); // Push parameter.
__ bind(&entry);
__ Branch(ne, &loop, s0, Operand(t2));
// Registers:
// a0: entry_address
// a1: function
// a2: reveiver_pointer
// a3: argc
// s0: argv
// s6: roots_address
//
// Stack:
// arguments
// receiver
// function
// arguments slots
// handler frame
// entry frame
// callee saved registers + ra
// 4 args slots
// args
// Initialize all JavaScript callee-saved registers, since they will be seen
// by the garbage collector as part of handlers.
__ LoadRoot(t4, Heap::kUndefinedValueRootIndex);
__ mov(s1, t4);
__ mov(s2, t4);
__ mov(s3, t4);
__ mov(s4, s4);
__ mov(s5, t4);
// s6 holds the root address. Do not clobber.
// s7 is cp. Do not init.
// Invoke the code and pass argc as a0.
__ mov(a0, a3);
if (is_construct) {
UNIMPLEMENTED_MIPS();
__ break_(0x164);
} else {
ParameterCount actual(a0);
__ InvokeFunction(a1, actual, CALL_FUNCTION);
}
__ LeaveInternalFrame();
__ Jump(ra);
}
......@@ -100,6 +192,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
UNIMPLEMENTED_MIPS();
__ break_(0x201);
}
......
......@@ -36,7 +36,31 @@ namespace internal {
// Platform-specific inline functions.
void DeferredCode::Jump() { __ b(&entry_label_); }
void DeferredCode::Jump() {
__ b(&entry_label_);
__ nop();
}
void Reference::GetValueAndSpill() {
GetValue();
}
void CodeGenerator::VisitAndSpill(Statement* statement) {
Visit(statement);
}
void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
VisitStatements(statements);
}
void CodeGenerator::LoadAndSpill(Expression* expression) {
Load(expression);
}
#undef __
......
......@@ -30,13 +30,14 @@
#include "bootstrapper.h"
#include "codegen-inl.h"
#include "compiler.h"
#include "debug.h"
#include "ic-inl.h"
#include "parser.h"
#include "register-allocator-inl.h"
#include "runtime.h"
#include "scopes.h"
#include "compiler.h"
#include "virtual-frame-inl.h"
......@@ -47,7 +48,7 @@ namespace internal {
// -------------------------------------------------------------------------
// -----------------------------------------------------------------------------
// Platform-specific DeferredCode functions.
......@@ -61,13 +62,41 @@ void DeferredCode::RestoreRegisters() {
}
// -------------------------------------------------------------------------
// -----------------------------------------------------------------------------
// CodeGenState implementation.
CodeGenState::CodeGenState(CodeGenerator* owner)
: owner_(owner),
true_target_(NULL),
false_target_(NULL),
previous_(NULL) {
owner_->set_state(this);
}
CodeGenState::CodeGenState(CodeGenerator* owner,
JumpTarget* true_target,
JumpTarget* false_target)
: owner_(owner),
true_target_(true_target),
false_target_(false_target),
previous_(owner->state()) {
owner_->set_state(this);
}
CodeGenState::~CodeGenState() {
ASSERT(owner_->state() == this);
owner_->set_state(previous_);
}
// -----------------------------------------------------------------------------
// CodeGenerator implementation
CodeGenerator::CodeGenerator(MacroAssembler* masm)
: deferred_(8),
masm_(masm),
scope_(NULL),
frame_(NULL),
allocator_(NULL),
cc_reg_(cc_always),
......@@ -77,18 +106,362 @@ CodeGenerator::CodeGenerator(MacroAssembler* masm)
// Calling conventions:
// s8_fp: caller's frame pointer
// fp: caller's frame pointer
// sp: stack pointer
// a1: called JS function
// cp: callee's context
void CodeGenerator::Generate(CompilationInfo* infomode) {
void CodeGenerator::Generate(CompilationInfo* info) {
// Record the position for debugging purposes.
CodeForFunctionPosition(info->function());
// Initialize state.
info_ = info;
ASSERT(allocator_ == NULL);
RegisterAllocator register_allocator(this);
allocator_ = &register_allocator;
ASSERT(frame_ == NULL);
frame_ = new VirtualFrame();
cc_reg_ = cc_always;
{
CodeGenState state(this);
// Registers:
// a1: called JS function
// ra: return address
// fp: caller's frame pointer
// sp: stack pointer
// cp: callee's context
//
// Stack:
// arguments
// receiver
frame_->Enter();
// Allocate space for locals and initialize them.
frame_->AllocateStackSlots();
// Initialize the function return target.
function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
function_return_is_shadowed_ = false;
VirtualFrame::SpilledScope spilled_scope;
if (scope()->num_heap_slots() > 0) {
UNIMPLEMENTED_MIPS();
}
{
Comment cmnt2(masm_, "[ copy context parameters into .context");
// Note that iteration order is relevant here! If we have the same
// parameter twice (e.g., function (x, y, x)), and that parameter
// needs to be copied into the context, it must be the last argument
// passed to the parameter that needs to be copied. This is a rare
// case so we don't check for it, instead we rely on the copying
// order: such a parameter is copied repeatedly into the same
// context location and thus the last value is what is seen inside
// the function.
for (int i = 0; i < scope()->num_parameters(); i++) {
UNIMPLEMENTED_MIPS();
}
}
// Store the arguments object. This must happen after context
// initialization because the arguments object may be stored in the
// context.
if (scope()->arguments() != NULL) {
UNIMPLEMENTED_MIPS();
}
// Generate code to 'execute' declarations and initialize functions
// (source elements). In case of an illegal redeclaration we need to
// handle that instead of processing the declarations.
if (scope()->HasIllegalRedeclaration()) {
Comment cmnt(masm_, "[ illegal redeclarations");
scope()->VisitIllegalRedeclaration(this);
} else {
Comment cmnt(masm_, "[ declarations");
ProcessDeclarations(scope()->declarations());
// Bail out if a stack-overflow exception occurred when processing
// declarations.
if (HasStackOverflow()) return;
}
if (FLAG_trace) {
UNIMPLEMENTED_MIPS();
}
// Compile the body of the function in a vanilla state. Don't
// bother compiling all the code if the scope has an illegal
// redeclaration.
if (!scope()->HasIllegalRedeclaration()) {
Comment cmnt(masm_, "[ function body");
#ifdef DEBUG
bool is_builtin = Bootstrapper::IsActive();
bool should_trace =
is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
if (should_trace) {
UNIMPLEMENTED_MIPS();
}
#endif
VisitStatementsAndSpill(info->function()->body());
}
}
if (has_valid_frame() || function_return_.is_linked()) {
if (!function_return_.is_linked()) {
CodeForReturnPosition(info->function());
}
// Registers:
// v0: result
// sp: stack pointer
// fp: frame pointer
// cp: callee's context
__ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
function_return_.Bind();
if (FLAG_trace) {
UNIMPLEMENTED_MIPS();
}
// Add a label for checking the size of the code used for returning.
Label check_exit_codesize;
masm_->bind(&check_exit_codesize);
masm_->mov(sp, fp);
masm_->lw(fp, MemOperand(sp, 0));
masm_->lw(ra, MemOperand(sp, 4));
masm_->addiu(sp, sp, 8);
// Here we use masm_-> instead of the __ macro to avoid the code coverage
// tool from instrumenting as we rely on the code size here.
// TODO(MIPS): Should we be able to use more than 0x1ffe parameters?
masm_->addiu(sp, sp, (scope()->num_parameters() + 1) * kPointerSize);
masm_->Jump(ra);
// The Jump automatically generates a nop in the branch delay slot.
// Check that the size of the code used for returning matches what is
// expected by the debugger.
ASSERT_EQ(kJSReturnSequenceLength,
masm_->InstructionsGeneratedSince(&check_exit_codesize));
}
// Code generation state must be reset.
ASSERT(!has_cc());
ASSERT(state_ == NULL);
ASSERT(!function_return_is_shadowed_);
function_return_.Unuse();
DeleteFrame();
// Process any deferred code using the register allocator.
if (!HasStackOverflow()) {
ProcessDeferred();
}
allocator_ = NULL;
}
void CodeGenerator::LoadReference(Reference* ref) {
VirtualFrame::SpilledScope spilled_scope;
Comment cmnt(masm_, "[ LoadReference");
Expression* e = ref->expression();
Property* property = e->AsProperty();
Variable* var = e->AsVariableProxy()->AsVariable();
if (property != NULL) {
UNIMPLEMENTED_MIPS();
} else if (var != NULL) {
// The expression is a variable proxy that does not rewrite to a
// property. Global variables are treated as named property references.
if (var->is_global()) {
LoadGlobal();
ref->set_type(Reference::NAMED);
} else {
ASSERT(var->slot() != NULL);
ref->set_type(Reference::SLOT);
}
} else {
UNIMPLEMENTED_MIPS();
}
}
void CodeGenerator::UnloadReference(Reference* ref) {
VirtualFrame::SpilledScope spilled_scope;
// Pop a reference from the stack while preserving TOS.
Comment cmnt(masm_, "[ UnloadReference");
int size = ref->size();
if (size > 0) {
frame_->EmitPop(a0);
frame_->Drop(size);
frame_->EmitPush(a0);
}
ref->set_unloaded();
}
MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
// Currently, this assertion will fail if we try to assign to
// a constant variable that is constant because it is read-only
// (such as the variable referring to a named function expression).
// We need to implement assignments to read-only variables.
// Ideally, we should do this during AST generation (by converting
// such assignments into expression statements); however, in general
// we may not be able to make the decision until past AST generation,
// that is when the entire program is known.
ASSERT(slot != NULL);
int index = slot->index();
switch (slot->type()) {
case Slot::PARAMETER:
UNIMPLEMENTED_MIPS();
return MemOperand(no_reg, 0);
case Slot::LOCAL:
return frame_->LocalAt(index);
case Slot::CONTEXT: {
UNIMPLEMENTED_MIPS();
return MemOperand(no_reg, 0);
}
default:
UNREACHABLE();
return MemOperand(no_reg, 0);
}
}
// Loads a value on TOS. If it is a boolean value, the result may have been
// (partially) translated into branches, or it may have set the condition
// code register. If force_cc is set, the value is forced to set the
// condition code register and no value is pushed. If the condition code
// register was set, has_cc() is true and cc_reg_ contains the condition to
// test for 'true'.
void CodeGenerator::LoadCondition(Expression* x,
JumpTarget* true_target,
JumpTarget* false_target,
bool force_cc) {
ASSERT(!has_cc());
int original_height = frame_->height();
{ CodeGenState new_state(this, true_target, false_target);
Visit(x);
// If we hit a stack overflow, we may not have actually visited
// the expression. In that case, we ensure that we have a
// valid-looking frame state because we will continue to generate
// code as we unwind the C++ stack.
//
// It's possible to have both a stack overflow and a valid frame
// state (eg, a subexpression overflowed, visiting it returned
// with a dummied frame state, and visiting this expression
// returned with a normal-looking state).
if (HasStackOverflow() &&
has_valid_frame() &&
!has_cc() &&
frame_->height() == original_height) {
true_target->Jump();
}
}
if (force_cc && frame_ != NULL && !has_cc()) {
// Convert the TOS value to a boolean in the condition code register.
UNIMPLEMENTED_MIPS();
}
ASSERT(!force_cc || !has_valid_frame() || has_cc());
ASSERT(!has_valid_frame() ||
(has_cc() && frame_->height() == original_height) ||
(!has_cc() && frame_->height() == original_height + 1));
}
void CodeGenerator::Load(Expression* x) {
#ifdef DEBUG
int original_height = frame_->height();
#endif
JumpTarget true_target;
JumpTarget false_target;
LoadCondition(x, &true_target, &false_target, false);
if (has_cc()) {
UNIMPLEMENTED_MIPS();
}
if (true_target.is_linked() || false_target.is_linked()) {
UNIMPLEMENTED_MIPS();
}
ASSERT(has_valid_frame());
ASSERT(!has_cc());
ASSERT(frame_->height() == original_height + 1);
}
void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
void CodeGenerator::LoadGlobal() {
VirtualFrame::SpilledScope spilled_scope;
__ lw(a0, GlobalObject());
frame_->EmitPush(a0);
}
void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
VirtualFrame::SpilledScope spilled_scope;
if (slot->type() == Slot::LOOKUP) {
UNIMPLEMENTED_MIPS();
} else {
__ lw(a0, SlotOperand(slot, a2));
frame_->EmitPush(a0);
if (slot->var()->mode() == Variable::CONST) {
UNIMPLEMENTED_MIPS();
}
}
}
void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
ASSERT(slot != NULL);
if (slot->type() == Slot::LOOKUP) {
UNIMPLEMENTED_MIPS();
} else {
ASSERT(!slot->var()->is_dynamic());
JumpTarget exit;
if (init_state == CONST_INIT) {
UNIMPLEMENTED_MIPS();
}
// We must execute the store. Storing a variable must keep the
// (new) value on the stack. This is necessary for compiling
// assignment expressions.
//
// Note: We will reach here even with slot->var()->mode() ==
// Variable::CONST because of const declarations which will
// initialize consts to 'the hole' value and by doing so, end up
// calling this code. a2 may be loaded with context; used below in
// RecordWrite.
frame_->EmitPop(a0);
__ sw(a0, SlotOperand(slot, a2));
frame_->EmitPush(a0);
if (slot->type() == Slot::CONTEXT) {
UNIMPLEMENTED_MIPS();
}
// If we definitely did not jump over the assignment, we do not need
// to bind the exit label. Doing so can defeat peephole
// optimization.
if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) {
exit.Bind();
}
}
}
void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
VirtualFrame::SpilledScope spilled_scope;
for (int i = 0; frame_ != NULL && i < statements->length(); i++) {
VisitAndSpill(statements->at(i));
}
}
......@@ -98,7 +471,14 @@ void CodeGenerator::VisitBlock(Block* node) {
void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
UNIMPLEMENTED_MIPS();
VirtualFrame::SpilledScope spilled_scope;
frame_->EmitPush(cp);
__ li(t0, Operand(pairs));
frame_->EmitPush(t0);
__ li(t0, Operand(Smi::FromInt(is_eval() ? 1 : 0)));
frame_->EmitPush(t0);
frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
// The result is discarded.
}
......@@ -108,7 +488,17 @@ void CodeGenerator::VisitDeclaration(Declaration* node) {
void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
UNIMPLEMENTED_MIPS();
#ifdef DEBUG
int original_height = frame_->height();
#endif
VirtualFrame::SpilledScope spilled_scope;
Comment cmnt(masm_, "[ ExpressionStatement");
CodeForStatementPosition(node);
Expression* expression = node->expression();
expression->MarkAsStatement();
LoadAndSpill(expression);
frame_->Drop();
ASSERT(frame_->height() == original_height);
}
......@@ -133,7 +523,22 @@ void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
UNIMPLEMENTED_MIPS();
VirtualFrame::SpilledScope spilled_scope;
Comment cmnt(masm_, "[ ReturnStatement");
CodeForStatementPosition(node);
LoadAndSpill(node->expression());
if (function_return_is_shadowed_) {
frame_->EmitPop(v0);
function_return_.Jump();
} else {
// Pop the result from the frame and prepare the frame for
// returning thus making it easier to merge.
frame_->EmitPop(v0);
frame_->PrepareForReturn();
function_return_.Jump();
}
}
......@@ -192,8 +597,8 @@ void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
}
void CodeGenerator::VisitFunctionBoilerplateLiteral(
FunctionBoilerplateLiteral* node) {
void CodeGenerator::VisitSharedFunctionInfoLiteral(
SharedFunctionInfoLiteral* node) {
UNIMPLEMENTED_MIPS();
}
......@@ -204,17 +609,45 @@ void CodeGenerator::VisitConditional(Conditional* node) {
void CodeGenerator::VisitSlot(Slot* node) {
UNIMPLEMENTED_MIPS();
#ifdef DEBUG
int original_height = frame_->height();
#endif
VirtualFrame::SpilledScope spilled_scope;
Comment cmnt(masm_, "[ Slot");
LoadFromSlot(node, typeof_state());
ASSERT(frame_->height() == original_height + 1);
}
void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
UNIMPLEMENTED_MIPS();
#ifdef DEBUG
int original_height = frame_->height();
#endif
VirtualFrame::SpilledScope spilled_scope;
Comment cmnt(masm_, "[ VariableProxy");
Variable* var = node->var();
Expression* expr = var->rewrite();
if (expr != NULL) {
Visit(expr);
} else {
ASSERT(var->is_global());
Reference ref(this, node);
ref.GetValueAndSpill();
}
ASSERT(frame_->height() == original_height + 1);
}
void CodeGenerator::VisitLiteral(Literal* node) {
UNIMPLEMENTED_MIPS();
#ifdef DEBUG
int original_height = frame_->height();
#endif
VirtualFrame::SpilledScope spilled_scope;
Comment cmnt(masm_, "[ Literal");
__ li(t0, Operand(node->handle()));
frame_->EmitPush(t0);
ASSERT(frame_->height() == original_height + 1);
}
......@@ -239,7 +672,47 @@ void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
void CodeGenerator::VisitAssignment(Assignment* node) {
UNIMPLEMENTED_MIPS();
#ifdef DEBUG
int original_height = frame_->height();
#endif
VirtualFrame::SpilledScope spilled_scope;
Comment cmnt(masm_, "[ Assignment");
{ Reference target(this, node->target());
if (target.is_illegal()) {
// Fool the virtual frame into thinking that we left the assignment's
// value on the frame.
frame_->EmitPush(zero_reg);
ASSERT(frame_->height() == original_height + 1);
return;
}
if (node->op() == Token::ASSIGN ||
node->op() == Token::INIT_VAR ||
node->op() == Token::INIT_CONST) {
LoadAndSpill(node->value());
} else {
UNIMPLEMENTED_MIPS();
}
Variable* var = node->target()->AsVariableProxy()->AsVariable();
if (var != NULL &&
(var->mode() == Variable::CONST) &&
node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) {
// Assignment ignored - leave the value on the stack.
} else {
CodeForSourcePosition(node->position());
if (node->op() == Token::INIT_CONST) {
// Dynamic constant initializations must use the function context
// and initialize the actual constant declared. Dynamic variable
// initializations are simply assignments and use SetValue.
target.SetValue(CONST_INIT);
} else {
target.SetValue(NOT_CONST_INIT);
}
}
}
ASSERT(frame_->height() == original_height + 1);
}
......@@ -254,7 +727,73 @@ void CodeGenerator::VisitProperty(Property* node) {
void CodeGenerator::VisitCall(Call* node) {
#ifdef DEBUG
int original_height = frame_->height();
#endif
VirtualFrame::SpilledScope spilled_scope;
Comment cmnt(masm_, "[ Call");
Expression* function = node->expression();
ZoneList<Expression*>* args = node->arguments();
// Standard function call.
// Check if the function is a variable or a property.
Variable* var = function->AsVariableProxy()->AsVariable();
Property* property = function->AsProperty();
// ------------------------------------------------------------------------
// Fast-case: Use inline caching.
// ---
// According to ECMA-262, section 11.2.3, page 44, the function to call
// must be resolved after the arguments have been evaluated. The IC code
// automatically handles this by loading the arguments before the function
// is resolved in cache misses (this also holds for megamorphic calls).
// ------------------------------------------------------------------------
if (var != NULL && var->is_possibly_eval()) {
UNIMPLEMENTED_MIPS();
} else if (var != NULL && !var->is_this() && var->is_global()) {
// ----------------------------------
// JavaScript example: 'foo(1, 2, 3)' // foo is global
// ----------------------------------
int arg_count = args->length();
// We need sp to be 8 bytes aligned when calling the stub.
__ SetupAlignedCall(t0, arg_count);
// Pass the global object as the receiver and let the IC stub
// patch the stack to use the global proxy as 'this' in the
// invoked function.
LoadGlobal();
// Load the arguments.
for (int i = 0; i < arg_count; i++) {
LoadAndSpill(args->at(i));
}
// Setup the receiver register and call the IC initialization code.
__ li(a2, Operand(var->name()));
InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Handle<Code> stub = ComputeCallInitialize(arg_count, in_loop);
CodeForSourcePosition(node->position());
frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT,
arg_count + 1);
__ ReturnFromAlignedCall();
__ lw(cp, frame_->Context());
// Remove the function from the stack.
frame_->EmitPush(v0);
} else if (var != NULL && var->slot() != NULL &&
var->slot()->type() == Slot::LOOKUP) {
UNIMPLEMENTED_MIPS();
} else if (property != NULL) {
UNIMPLEMENTED_MIPS();
} else {
UNIMPLEMENTED_MIPS();
}
ASSERT(frame_->height() == original_height + 1);
}
......@@ -439,9 +978,104 @@ bool CodeGenerator::HasValidEntryRegisters() { return true; }
#undef __
#define __ ACCESS_MASM(masm)
// -----------------------------------------------------------------------------
// Reference support
Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
return Handle<Code>::null();
Reference::Reference(CodeGenerator* cgen,
Expression* expression,
bool persist_after_get)
: cgen_(cgen),
expression_(expression),
type_(ILLEGAL),
persist_after_get_(persist_after_get) {
cgen->LoadReference(this);
}
Reference::~Reference() {
ASSERT(is_unloaded() || is_illegal());
}
Handle<String> Reference::GetName() {
ASSERT(type_ == NAMED);
Property* property = expression_->AsProperty();
if (property == NULL) {
// Global variable reference treated as a named property reference.
VariableProxy* proxy = expression_->AsVariableProxy();
ASSERT(proxy->AsVariable() != NULL);
ASSERT(proxy->AsVariable()->is_global());
return proxy->name();
} else {
Literal* raw_name = property->key()->AsLiteral();
ASSERT(raw_name != NULL);
return Handle<String>(String::cast(*raw_name->handle()));
}
}
void Reference::GetValue() {
ASSERT(cgen_->HasValidEntryRegisters());
ASSERT(!is_illegal());
ASSERT(!cgen_->has_cc());
Property* property = expression_->AsProperty();
if (property != NULL) {
cgen_->CodeForSourcePosition(property->position());
}
switch (type_) {
case SLOT: {
UNIMPLEMENTED_MIPS();
break;
}
case NAMED: {
UNIMPLEMENTED_MIPS();
break;
}
case KEYED: {
UNIMPLEMENTED_MIPS();
break;
}
default:
UNREACHABLE();
}
}
void Reference::SetValue(InitState init_state) {
ASSERT(!is_illegal());
ASSERT(!cgen_->has_cc());
MacroAssembler* masm = cgen_->masm();
Property* property = expression_->AsProperty();
if (property != NULL) {
cgen_->CodeForSourcePosition(property->position());
}
switch (type_) {
case SLOT: {
Comment cmnt(masm, "[ Store to Slot");
Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
cgen_->StoreToSlot(slot, init_state);
cgen_->UnloadReference(this);
break;
}
case NAMED: {
UNIMPLEMENTED_MIPS();
break;
}
case KEYED: {
UNIMPLEMENTED_MIPS();
break;
}
default:
UNREACHABLE();
}
}
......@@ -453,6 +1087,12 @@ void CompareStub::Generate(MacroAssembler* masm) {
}
Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
UNIMPLEMENTED_MIPS();
return Handle<Code>::null();
}
void StackCheckStub::Generate(MacroAssembler* masm) {
UNIMPLEMENTED_MIPS();
__ break_(0x790);
......@@ -477,50 +1117,269 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
Label* throw_out_of_memory_exception,
bool do_gc,
bool always_allocate) {
// s0: number of arguments including receiver (C callee-saved)
// s1: pointer to the first argument (C callee-saved)
// s2: pointer to builtin function (C callee-saved)
if (do_gc) {
UNIMPLEMENTED_MIPS();
}
ExternalReference scope_depth =
ExternalReference::heap_always_allocate_scope_depth();
if (always_allocate) {
UNIMPLEMENTED_MIPS();
__ break_(0x826);
}
// Call C built-in.
// a0 = argc, a1 = argv
__ mov(a0, s0);
__ mov(a1, s1);
__ CallBuiltin(s2);
if (always_allocate) {
UNIMPLEMENTED_MIPS();
}
// Check for failure result.
Label failure_returned;
ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
__ addiu(a2, v0, 1);
__ andi(t0, a2, kFailureTagMask);
__ Branch(eq, &failure_returned, t0, Operand(zero_reg));
// Exit C frame and return.
// v0:v1: result
// sp: stack pointer
// fp: frame pointer
__ LeaveExitFrame(mode_);
// Check if we should retry or throw exception.
Label retry;
__ bind(&failure_returned);
ASSERT(Failure::RETRY_AFTER_GC == 0);
__ andi(t0, v0, ((1 << kFailureTypeTagSize) - 1) << kFailureTagSize);
__ Branch(eq, &retry, t0, Operand(zero_reg));
// Special handling of out of memory exceptions.
Failure* out_of_memory = Failure::OutOfMemoryException();
__ Branch(eq, throw_out_of_memory_exception,
v0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
// Retrieve the pending exception and clear the variable.
__ LoadExternalReference(t0, ExternalReference::the_hole_value_location());
__ lw(a3, MemOperand(t0));
__ LoadExternalReference(t0,
ExternalReference(Top::k_pending_exception_address));
__ lw(v0, MemOperand(t0));
__ sw(a3, MemOperand(t0));
// Special handling of termination exceptions which are uncatchable
// by javascript code.
__ Branch(eq, throw_termination_exception,
v0, Operand(Factory::termination_exception()));
// Handle normal exception.
__ b(throw_normal_exception);
__ nop(); // Branch delay slot nop.
__ bind(&retry); // pass last failure (r0) as parameter (r0) when retrying
}
void CEntryStub::Generate(MacroAssembler* masm) {
UNIMPLEMENTED_MIPS();
__ break_(0x831);
// Called from JavaScript; parameters are on stack as if calling JS function
// a0: number of arguments including receiver
// a1: pointer to builtin function
// fp: frame pointer (restored after C call)
// sp: stack pointer (restored as callee's sp after C call)
// cp: current context (C callee-saved)
// NOTE: Invocations of builtins may return failure objects
// instead of a proper result. The builtin entry handles
// this by performing a garbage collection and retrying the
// builtin once.
// Enter the exit frame that transitions from JavaScript to C++.
__ EnterExitFrame(mode_, s0, s1, s2);
// s0: number of arguments (C callee-saved)
// s1: pointer to first argument (C callee-saved)
// s2: pointer to builtin function (C callee-saved)
Label throw_normal_exception;
Label throw_termination_exception;
Label throw_out_of_memory_exception;
// Call into the runtime system.
GenerateCore(masm,
&throw_normal_exception,
&throw_termination_exception,
&throw_out_of_memory_exception,
false,
false);
// Do space-specific GC and retry runtime call.
GenerateCore(masm,
&throw_normal_exception,
&throw_termination_exception,
&throw_out_of_memory_exception,
true,
false);
// Do full GC and retry runtime call one final time.
Failure* failure = Failure::InternalError();
__ li(v0, Operand(reinterpret_cast<int32_t>(failure)));
GenerateCore(masm,
&throw_normal_exception,
&throw_termination_exception,
&throw_out_of_memory_exception,
true,
true);
__ bind(&throw_out_of_memory_exception);
GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
__ bind(&throw_termination_exception);
GenerateThrowUncatchable(masm, TERMINATION);
__ bind(&throw_normal_exception);
GenerateThrowTOS(masm);
}
void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
UNIMPLEMENTED_MIPS();
Label invoke, exit;
// Registers:
// a0: entry address
// a1: function
// a2: reveiver
// a3: argc
//
// Stack:
// 4 args slots
// args
// Save callee saved registers on the stack.
__ MultiPush(kCalleeSaved | ra.bit());
__ MultiPush((kCalleeSaved | ra.bit()) & ~sp.bit());
// ********** State **********
//
// * Registers:
// We build an EntryFrame.
__ li(t3, Operand(-1)); // Push a bad frame pointer to fail if it is used.
int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
__ li(t2, Operand(Smi::FromInt(marker)));
__ li(t1, Operand(Smi::FromInt(marker)));
__ LoadExternalReference(t0, ExternalReference(Top::k_c_entry_fp_address));
__ lw(t0, MemOperand(t0));
__ MultiPush(t0.bit() | t1.bit() | t2.bit() | t3.bit());
// Setup frame pointer for the frame to be pushed.
__ addiu(fp, sp, -EntryFrameConstants::kCallerFPOffset);
// Load argv in s0 register.
__ lw(s0, MemOperand(sp, (kNumCalleeSaved + 1) * kPointerSize +
StandardFrameConstants::kCArgsSlotsSize));
// Registers:
// a0: entry_address
// a1: function
// a2: reveiver_pointer
// a3: argc
// s0: argv
//
// * Stack:
// ---------------------------
// args
// ---------------------------
// 4 args slots
// ---------------------------
// Stack:
// caller fp |
// function slot | entry frame
// context slot |
// bad fp (0xff...f) |
// callee saved registers + ra
// ---------------------------
// 4 args slots
// args
// Call a faked try-block that does the invoke.
__ bal(&invoke);
__ nop(); // Branch delay slot nop.
// Caught exception: Store result (exception) in the pending
// exception field in the JSEnv and return a failure sentinel.
// Coming in here the fp will be invalid because the PushTryHandler below
// sets it to 0 to signal the existence of the JSEntry frame.
__ LoadExternalReference(t0,
ExternalReference(Top::k_pending_exception_address));
__ sw(v0, MemOperand(t0)); // We come back from 'invoke'. result is in v0.
__ li(v0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
__ b(&exit);
__ nop(); // Branch delay slot nop.
// Invoke: Link this frame into the handler chain.
__ bind(&invoke);
__ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
// If an exception not caught by another handler occurs, this handler
// returns control to the code after the bal(&invoke) above, which
// restores all kCalleeSaved registers (including cp and fp) to their
// saved values before returning a failure to C.
// Clear any pending exceptions.
__ LoadExternalReference(t0, ExternalReference::the_hole_value_location());
__ lw(t1, MemOperand(t0));
__ LoadExternalReference(t0,
ExternalReference(Top::k_pending_exception_address));
__ sw(t1, MemOperand(t0));
// Invoke the function by calling through JS entry trampoline builtin.
// Notice that we cannot store a reference to the trampoline code directly in
// this stub, because runtime stubs are not traversed when doing GC.
// Registers:
// a0: entry_address
// a1: function
// a2: reveiver_pointer
// a3: argc
// s0: argv
//
// ***************************
// Stack:
// handler frame
// entry frame
// callee saved registers + ra
// 4 args slots
// args
__ break_(0x1234);
if (is_construct) {
ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
__ LoadExternalReference(t0, construct_entry);
} else {
ExternalReference entry(Builtins::JSEntryTrampoline);
__ LoadExternalReference(t0, entry);
}
__ lw(t9, MemOperand(t0)); // deref address
// Call JSEntryTrampoline.
__ addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag);
__ CallBuiltin(t9);
// Unlink this frame from the handler chain. When reading the
// address of the next handler, there is no need to use the address
// displacement since the current stack pointer (sp) points directly
// to the stack handler.
__ lw(t1, MemOperand(sp, StackHandlerConstants::kNextOffset));
__ LoadExternalReference(t0, ExternalReference(Top::k_handler_address));
__ sw(t1, MemOperand(t0));
// This restores sp to its position before PushTryHandler.
__ addiu(sp, sp, StackHandlerConstants::kSize);
__ bind(&exit); // v0 holds result
// Restore the top frame descriptors from the stack.
__ Pop(t1);
__ LoadExternalReference(t0, ExternalReference(Top::k_c_entry_fp_address));
__ sw(t1, MemOperand(t0));
// Reset the stack to the callee saved registers.
__ addiu(sp, sp, -EntryFrameConstants::kCallerFPOffset);
// Restore callee saved registers from the stack.
__ MultiPop(kCalleeSaved | ra.bit());
// Load a result.
__ li(v0, Operand(0x1234));
__ jr(ra);
// Return
__ nop();
__ MultiPop((kCalleeSaved | ra.bit()) & ~sp.bit());
// Return.
__ Jump(ra);
}
......
......@@ -42,7 +42,77 @@ enum InitState { CONST_INIT, NOT_CONST_INIT };
enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
// -------------------------------------------------------------------------
// -----------------------------------------------------------------------------
// Reference support
// A reference is a C++ stack-allocated object that keeps an ECMA
// reference on the execution stack while in scope. For variables
// the reference is empty, indicating that it isn't necessary to
// store state on the stack for keeping track of references to those.
// For properties, we keep either one (named) or two (indexed) values
// on the execution stack to represent the reference.
class Reference BASE_EMBEDDED {
public:
// The values of the types is important, see size().
enum Type { UNLOADED = -2, ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 };
Reference(CodeGenerator* cgen,
Expression* expression,
bool persist_after_get = false);
~Reference();
Expression* expression() const { return expression_; }
Type type() const { return type_; }
void set_type(Type value) {
ASSERT_EQ(ILLEGAL, type_);
type_ = value;
}
void set_unloaded() {
ASSERT_NE(ILLEGAL, type_);
ASSERT_NE(UNLOADED, type_);
type_ = UNLOADED;
}
// The size the reference takes up on the stack.
int size() const {
return (type_ < SLOT) ? 0 : type_;
}
bool is_illegal() const { return type_ == ILLEGAL; }
bool is_slot() const { return type_ == SLOT; }
bool is_property() const { return type_ == NAMED || type_ == KEYED; }
bool is_unloaded() const { return type_ == UNLOADED; }
// Return the name. Only valid for named property references.
Handle<String> GetName();
// Generate code to push the value of the reference on top of the
// expression stack. The reference is expected to be already on top of
// the expression stack, and it is consumed by the call unless the
// reference is for a compound assignment.
// If the reference is not consumed, it is left in place under its value.
void GetValue();
// Generate code to pop a reference, push the value of the reference,
// and then spill the stack frame.
inline void GetValueAndSpill();
// Generate code to store the value on top of the expression stack in the
// reference. The reference is expected to be immediately below the value
// on the expression stack. The value is stored in the location specified
// by the reference, and is left on top of the stack, after the reference
// is popped from beneath it (unloaded).
void SetValue(InitState init_state);
private:
CodeGenerator* cgen_;
Expression* expression_;
Type type_;
// Keep the reference on the stack after get, so it can be used by set later.
bool persist_after_get_;
};
// -----------------------------------------------------------------------------
// Code generation state
// The state is passed down the AST by the code generator (and back up, in
......@@ -89,7 +159,7 @@ class CodeGenState BASE_EMBEDDED {
// -------------------------------------------------------------------------
// -----------------------------------------------------------------------------
// CodeGenerator
class CodeGenerator: public AstVisitor {
......@@ -152,7 +222,7 @@ class CodeGenerator: public AstVisitor {
// Number of instructions used for the JS return sequence. The constant is
// used by the debugger to patch the JS return sequence.
static const int kJSReturnSequenceLength = 6;
static const int kJSReturnSequenceLength = 7;
// If the name is an inline runtime function call return the number of
// expected arguments. Otherwise return -1.
......@@ -186,9 +256,51 @@ class CodeGenerator: public AstVisitor {
AST_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
// Visit a statement and then spill the virtual frame if control flow can
// reach the end of the statement (ie, it does not exit via break,
// continue, return, or throw). This function is used temporarily while
// the code generator is being transformed.
inline void VisitAndSpill(Statement* statement);
// Visit a list of statements and then spill the virtual frame if control
// flow can reach the end of the list.
inline void VisitStatementsAndSpill(ZoneList<Statement*>* statements);
// Main code generation function
void Generate(CompilationInfo* info);
// The following are used by class Reference.
void LoadReference(Reference* ref);
void UnloadReference(Reference* ref);
MemOperand ContextOperand(Register context, int index) const {
return MemOperand(context, Context::SlotOffset(index));
}
MemOperand SlotOperand(Slot* slot, Register tmp);
// Expressions
MemOperand GlobalObject() const {
return ContextOperand(cp, Context::GLOBAL_INDEX);
}
void LoadCondition(Expression* x,
JumpTarget* true_target,
JumpTarget* false_target,
bool force_cc);
void Load(Expression* x);
void LoadGlobal();
// Generate code to push the value of an expression on top of the frame
// and then spill the frame fully to memory. This function is used
// temporarily while the code generator is being transformed.
inline void LoadAndSpill(Expression* expression);
// Read a value from a slot and leave it on top of the expression stack.
void LoadFromSlot(Slot* slot, TypeofState typeof_state);
// Store the value on top of the stack to a slot.
void StoreToSlot(Slot* slot, InitState init_state);
struct InlineRuntimeLUT {
void (CodeGenerator::*method)(ZoneList<Expression*>*);
const char* name;
......@@ -290,7 +402,6 @@ class CodeGenerator: public AstVisitor {
CompilationInfo* info_;
// Code generation state
Scope* scope_;
VirtualFrame* frame_;
RegisterAllocator* allocator_;
Condition cc_reg_;
......
......@@ -91,8 +91,7 @@ Address ArgumentsAdaptorFrame::GetCallerStackPointer() const {
Address InternalFrame::GetCallerStackPointer() const {
UNIMPLEMENTED_MIPS();
return static_cast<Address>(NULL); // UNIMPLEMENTED RETURN
return fp() + StandardFrameConstants::kCallerSPOffset;
}
......
......@@ -104,7 +104,7 @@ class ExitFrameConstants : public AllStatic {
static const int kCallerPCOffset = +1 * kPointerSize;
// FP-relative displacement of the caller's SP.
static const int kCallerSPDisplacement = +4 * kPointerSize;
static const int kCallerSPDisplacement = +3 * kPointerSize;
};
......
......@@ -74,6 +74,47 @@ void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
void CallIC::GenerateMiss(MacroAssembler* masm, int argc) {
UNIMPLEMENTED_MIPS();
// Registers:
// a2: name
// ra: return address
// Get the receiver of the function from the stack.
__ lw(a3, MemOperand(sp, argc*kPointerSize));
__ EnterInternalFrame();
// Push the receiver and the name of the function.
__ MultiPush(a2.bit() | a3.bit());
// Call the entry.
__ li(a0, Operand(2));
__ li(a1, Operand(ExternalReference(IC_Utility(kCallIC_Miss))));
CEntryStub stub(1);
__ CallStub(&stub);
// Move result to r1 and leave the internal frame.
__ mov(a1, v0);
__ LeaveInternalFrame();
// Check if the receiver is a global object of some sort.
Label invoke, global;
__ lw(a2, MemOperand(sp, argc * kPointerSize));
__ andi(t0, a2, kSmiTagMask);
__ Branch(eq, &invoke, t0, Operand(zero_reg));
__ GetObjectType(a2, a3, a3);
__ Branch(eq, &global, a3, Operand(JS_GLOBAL_OBJECT_TYPE));
__ Branch(ne, &invoke, a3, Operand(JS_BUILTINS_OBJECT_TYPE));
// Patch the receiver on the stack.
__ bind(&global);
__ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
__ sw(a2, MemOperand(sp, argc * kPointerSize));
// Invoke the function.
ParameterCount actual(argc);
__ bind(&invoke);
__ InvokeFunction(a1, actual, JUMP_FUNCTION);
}
// Defined in ic.cc.
......
......@@ -42,7 +42,37 @@ namespace internal {
#define __ ACCESS_MASM(cgen()->masm())
void JumpTarget::DoJump() {
UNIMPLEMENTED_MIPS();
ASSERT(cgen()->has_valid_frame());
// Live non-frame registers are not allowed at unconditional jumps
// because we have no way of invalidating the corresponding results
// which are still live in the C++ code.
ASSERT(cgen()->HasValidEntryRegisters());
if (is_bound()) {
// Backward jump. There already a frame expectation at the target.
ASSERT(direction_ == BIDIRECTIONAL);
cgen()->frame()->MergeTo(entry_frame_);
cgen()->DeleteFrame();
} else {
// Use the current frame as the expected one at the target if necessary.
if (entry_frame_ == NULL) {
entry_frame_ = cgen()->frame();
RegisterFile empty;
cgen()->SetFrame(NULL, &empty);
} else {
cgen()->frame()->MergeTo(entry_frame_);
cgen()->DeleteFrame();
}
// The predicate is_linked() should be made true. Its implementation
// detects the presence of a frame pointer in the reaching_frames_ list.
if (!is_linked()) {
reaching_frames_.Add(NULL);
ASSERT(is_linked());
}
}
__ b(&entry_label_);
__ nop(); // Branch delay slot nop.
}
......@@ -57,12 +87,47 @@ void JumpTarget::Call() {
void JumpTarget::DoBind() {
UNIMPLEMENTED_MIPS();
ASSERT(!is_bound());
// Live non-frame registers are not allowed at the start of a basic
// block.
ASSERT(!cgen()->has_valid_frame() || cgen()->HasValidEntryRegisters());
if (cgen()->has_valid_frame()) {
// If there is a current frame we can use it on the fall through.
if (entry_frame_ == NULL) {
entry_frame_ = new VirtualFrame(cgen()->frame());
} else {
ASSERT(cgen()->frame()->Equals(entry_frame_));
}
} else {
// If there is no current frame we must have an entry frame which we can
// copy.
ASSERT(entry_frame_ != NULL);
RegisterFile empty;
cgen()->SetFrame(new VirtualFrame(entry_frame_), &empty);
}
// The predicate is_linked() should be made false. Its implementation
// detects the presence (or absence) of frame pointers in the
// reaching_frames_ list. If we inserted a bogus frame to make
// is_linked() true, remove it now.
if (is_linked()) {
reaching_frames_.Clear();
}
__ bind(&entry_label_);
}
void BreakTarget::Jump() {
UNIMPLEMENTED_MIPS();
// On ARM we do not currently emit merge code for jumps, so we need to do
// it explicitly here. The only merging necessary is to drop extra
// statement state from the stack.
ASSERT(cgen()->has_valid_frame());
int count = cgen()->frame()->height() - expected_height_;
cgen()->frame()->Drop(count);
DoJump();
}
......@@ -72,7 +137,26 @@ void BreakTarget::Jump(Result* arg) {
void BreakTarget::Bind() {
UNIMPLEMENTED_MIPS();
#ifdef DEBUG
// All the forward-reaching frames should have been adjusted at the
// jumps to this target.
for (int i = 0; i < reaching_frames_.length(); i++) {
ASSERT(reaching_frames_[i] == NULL ||
reaching_frames_[i]->height() == expected_height_);
}
#endif
// Drop leftover statement state from the frame before merging, even
// on the fall through. This is so we can bind the return target
// with state on the frame.
if (cgen()->has_valid_frame()) {
int count = cgen()->frame()->height() - expected_height_;
// On ARM we do not currently emit merge code at binding sites, so we need
// to do it explicitly here. The only merging necessary is to drop extra
// statement state from the stack.
cgen()->frame()->Drop(count);
}
DoBind();
}
......
......@@ -55,7 +55,7 @@ void MacroAssembler::Jump(Register target, Condition cond,
void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
Condition cond, Register r1, const Operand& r2) {
Jump(Operand(target), cond, r1, r2);
Jump(Operand(target, rmode), cond, r1, r2);
}
......@@ -81,7 +81,7 @@ void MacroAssembler::Call(Register target,
void MacroAssembler::Call(intptr_t target, RelocInfo::Mode rmode,
Condition cond, Register r1, const Operand& r2) {
Call(Operand(target), cond, r1, r2);
Call(Operand(target, rmode), cond, r1, r2);
}
......@@ -106,7 +106,7 @@ void MacroAssembler::Ret(Condition cond, Register r1, const Operand& r2) {
void MacroAssembler::LoadRoot(Register destination,
Heap::RootListIndex index) {
lw(destination, MemOperand(s4, index << kPointerSizeLog2));
lw(destination, MemOperand(s6, index << kPointerSizeLog2));
}
void MacroAssembler::LoadRoot(Register destination,
......@@ -114,8 +114,7 @@ void MacroAssembler::LoadRoot(Register destination,
Condition cond,
Register src1, const Operand& src2) {
Branch(NegateCondition(cond), 2, src1, src2);
nop();
lw(destination, MemOperand(s4, index << kPointerSizeLog2));
lw(destination, MemOperand(s6, index << kPointerSizeLog2));
}
......@@ -320,7 +319,6 @@ void MacroAssembler::movn(Register rd, Register rt) {
}
// load wartd in a register
void MacroAssembler::li(Register rd, Operand j, bool gen2instr) {
ASSERT(!j.is_reg());
......@@ -372,7 +370,7 @@ void MacroAssembler::MultiPush(RegList regs) {
int16_t NumToPush = NumberOfBitsSet(regs);
addiu(sp, sp, -4 * NumToPush);
for (int16_t i = 0; i < kNumRegisters; i++) {
for (int16_t i = kNumRegisters; i > 0; i--) {
if ((regs & (1 << i)) != 0) {
sw(ToRegister(i), MemOperand(sp, 4 * (NumToPush - ++NumSaved)));
}
......@@ -385,7 +383,7 @@ void MacroAssembler::MultiPushReversed(RegList regs) {
int16_t NumToPush = NumberOfBitsSet(regs);
addiu(sp, sp, -4 * NumToPush);
for (int16_t i = kNumRegisters; i > 0; i--) {
for (int16_t i = 0; i < kNumRegisters; i++) {
if ((regs & (1 << i)) != 0) {
sw(ToRegister(i), MemOperand(sp, 4 * (NumToPush - ++NumSaved)));
}
......@@ -396,7 +394,7 @@ void MacroAssembler::MultiPushReversed(RegList regs) {
void MacroAssembler::MultiPop(RegList regs) {
int16_t NumSaved = 0;
for (int16_t i = kNumRegisters; i > 0; i--) {
for (int16_t i = 0; i < kNumRegisters; i++) {
if ((regs & (1 << i)) != 0) {
lw(ToRegister(i), MemOperand(sp, 4 * (NumSaved++)));
}
......@@ -408,7 +406,7 @@ void MacroAssembler::MultiPop(RegList regs) {
void MacroAssembler::MultiPopReversed(RegList regs) {
int16_t NumSaved = 0;
for (int16_t i = 0; i < kNumRegisters; i++) {
for (int16_t i = kNumRegisters; i > 0; i--) {
if ((regs & (1 << i)) != 0) {
lw(ToRegister(i), MemOperand(sp, 4 * (NumSaved++)));
}
......@@ -484,6 +482,8 @@ void MacroAssembler::Branch(Condition cond, int16_t offset, Register rs,
default:
UNREACHABLE();
}
// Emit a nop in the branch delay slot.
nop();
}
......@@ -550,6 +550,8 @@ void MacroAssembler::Branch(Condition cond, Label* L, Register rs,
default:
UNREACHABLE();
}
// Emit a nop in the branch delay slot.
nop();
}
......@@ -629,6 +631,8 @@ void MacroAssembler::BranchAndLink(Condition cond, int16_t offset, Register rs,
default:
UNREACHABLE();
}
// Emit a nop in the branch delay slot.
nop();
}
......@@ -704,6 +708,8 @@ void MacroAssembler::BranchAndLink(Condition cond, Label* L, Register rs,
default:
UNREACHABLE();
}
// Emit a nop in the branch delay slot.
nop();
}
......@@ -714,7 +720,6 @@ void MacroAssembler::Jump(const Operand& target,
jr(target.rm());
} else {
Branch(NegateCondition(cond), 2, rs, rt);
nop();
jr(target.rm());
}
} else { // !target.is_reg()
......@@ -723,20 +728,20 @@ void MacroAssembler::Jump(const Operand& target,
j(target.imm32_);
} else {
Branch(NegateCondition(cond), 2, rs, rt);
nop();
j(target.imm32_); // will generate only one instruction.
j(target.imm32_); // Will generate only one instruction.
}
} else { // MustUseAt(target)
li(at, rt);
li(at, target);
if (cond == cc_always) {
jr(at);
} else {
Branch(NegateCondition(cond), 2, rs, rt);
nop();
jr(at); // will generate only one instruction.
jr(at); // Will generate only one instruction.
}
}
}
// Emit a nop in the branch delay slot.
nop();
}
......@@ -747,7 +752,6 @@ void MacroAssembler::Call(const Operand& target,
jalr(target.rm());
} else {
Branch(NegateCondition(cond), 2, rs, rt);
nop();
jalr(target.rm());
}
} else { // !target.is_reg()
......@@ -756,20 +760,20 @@ void MacroAssembler::Call(const Operand& target,
jal(target.imm32_);
} else {
Branch(NegateCondition(cond), 2, rs, rt);
nop();
jal(target.imm32_); // will generate only one instruction.
jal(target.imm32_); // Will generate only one instruction.
}
} else { // MustUseAt(target)
li(at, rt);
li(at, target);
if (cond == cc_always) {
jalr(at);
} else {
Branch(NegateCondition(cond), 2, rs, rt);
nop();
jalr(at); // will generate only one instruction.
jalr(at); // Will generate only one instruction.
}
}
}
// Emit a nop in the branch delay slot.
nop();
}
void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
......@@ -802,7 +806,58 @@ void MacroAssembler::Call(Label* target) {
void MacroAssembler::PushTryHandler(CodeLocation try_location,
HandlerType type) {
UNIMPLEMENTED_MIPS();
// Adjust this code if not the case.
ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
// The return address is passed in register ra.
if (try_location == IN_JAVASCRIPT) {
if (type == TRY_CATCH_HANDLER) {
li(t0, Operand(StackHandler::TRY_CATCH));
} else {
li(t0, Operand(StackHandler::TRY_FINALLY));
}
ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
&& StackHandlerConstants::kFPOffset == 2 * kPointerSize
&& StackHandlerConstants::kPCOffset == 3 * kPointerSize
&& StackHandlerConstants::kNextOffset == 0 * kPointerSize);
// Save the current handler as the next handler.
LoadExternalReference(t2, ExternalReference(Top::k_handler_address));
lw(t1, MemOperand(t2));
addiu(sp, sp, -StackHandlerConstants::kSize);
sw(ra, MemOperand(sp, 12));
sw(fp, MemOperand(sp, 8));
sw(t0, MemOperand(sp, 4));
sw(t1, MemOperand(sp, 0));
// Link this handler as the new current one.
sw(sp, MemOperand(t2));
} else {
// Must preserve a0-a3, and s0 (argv).
ASSERT(try_location == IN_JS_ENTRY);
ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
&& StackHandlerConstants::kFPOffset == 2 * kPointerSize
&& StackHandlerConstants::kPCOffset == 3 * kPointerSize
&& StackHandlerConstants::kNextOffset == 0 * kPointerSize);
// The frame pointer does not point to a JS frame so we save NULL
// for fp. We expect the code throwing an exception to check fp
// before dereferencing it to restore the context.
li(t0, Operand(StackHandler::ENTRY));
// Save the current handler as the next handler.
LoadExternalReference(t2, ExternalReference(Top::k_handler_address));
lw(t1, MemOperand(t2));
addiu(sp, sp, -StackHandlerConstants::kSize);
sw(ra, MemOperand(sp, 12));
sw(zero_reg, MemOperand(sp, 8));
sw(t0, MemOperand(sp, 4));
sw(t1, MemOperand(sp, 0));
// Link this handler as the new current one.
sw(sp, MemOperand(t2));
}
}
......@@ -812,12 +867,233 @@ void MacroAssembler::PopTryHandler() {
// ---------------------------------------------------------------------------
// -----------------------------------------------------------------------------
// Activation frames
void MacroAssembler::SetupAlignedCall(Register scratch, int arg_count) {
Label extra_push, end;
andi(scratch, sp, 7);
// We check for args and receiver size on the stack, all of them word sized.
// We add one for sp, that we also want to store on the stack.
if (((arg_count + 1) % kPointerSizeLog2) == 0) {
Branch(ne, &extra_push, at, Operand(zero_reg));
} else { // ((arg_count + 1) % 2) == 1
Branch(eq, &extra_push, at, Operand(zero_reg));
}
// Save sp on the stack.
mov(scratch, sp);
Push(scratch);
b(&end);
// Align before saving sp on the stack.
bind(&extra_push);
mov(scratch, sp);
addiu(sp, sp, -8);
sw(scratch, MemOperand(sp));
// The stack is aligned and sp is stored on the top.
bind(&end);
}
void MacroAssembler::ReturnFromAlignedCall() {
lw(sp, MemOperand(sp));
}
// -----------------------------------------------------------------------------
// JavaScript invokes
void MacroAssembler::InvokePrologue(const ParameterCount& expected,
const ParameterCount& actual,
Handle<Code> code_constant,
Register code_reg,
Label* done,
InvokeFlag flag) {
bool definitely_matches = false;
Label regular_invoke;
// Check whether the expected and actual arguments count match. If not,
// setup registers according to contract with ArgumentsAdaptorTrampoline:
// a0: actual arguments count
// a1: function (passed through to callee)
// a2: expected arguments count
// a3: callee code entry
// The code below is made a lot easier because the calling code already sets
// up actual and expected registers according to the contract if values are
// passed in registers.
ASSERT(actual.is_immediate() || actual.reg().is(a0));
ASSERT(expected.is_immediate() || expected.reg().is(a2));
ASSERT((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(a3));
if (expected.is_immediate()) {
ASSERT(actual.is_immediate());
if (expected.immediate() == actual.immediate()) {
definitely_matches = true;
} else {
li(a0, Operand(actual.immediate()));
const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
if (expected.immediate() == sentinel) {
// Don't worry about adapting arguments for builtins that
// don't want that done. Skip adaption code by making it look
// like we have a match between expected and actual number of
// arguments.
definitely_matches = true;
} else {
li(a2, Operand(expected.immediate()));
}
}
} else if (actual.is_immediate()) {
Branch(eq, &regular_invoke, expected.reg(), Operand(actual.immediate()));
li(a0, Operand(actual.immediate()));
} else {
Branch(eq, &regular_invoke, expected.reg(), Operand(actual.reg()));
}
if (!definitely_matches) {
if (!code_constant.is_null()) {
li(a3, Operand(code_constant));
addiu(a3, a3, Code::kHeaderSize - kHeapObjectTag);
}
ExternalReference adaptor(Builtins::ArgumentsAdaptorTrampoline);
if (flag == CALL_FUNCTION) {
CallBuiltin(adaptor);
b(done);
nop();
} else {
JumpToBuiltin(adaptor);
}
bind(&regular_invoke);
}
}
void MacroAssembler::InvokeCode(Register code,
const ParameterCount& expected,
const ParameterCount& actual,
InvokeFlag flag) {
Label done;
InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
if (flag == CALL_FUNCTION) {
Call(code);
} else {
ASSERT(flag == JUMP_FUNCTION);
Jump(code);
}
// Continue here if InvokePrologue does handle the invocation due to
// mismatched parameter counts.
bind(&done);
}
void MacroAssembler::InvokeCode(Handle<Code> code,
const ParameterCount& expected,
const ParameterCount& actual,
RelocInfo::Mode rmode,
InvokeFlag flag) {
Label done;
InvokePrologue(expected, actual, code, no_reg, &done, flag);
if (flag == CALL_FUNCTION) {
Call(code, rmode);
} else {
Jump(code, rmode);
}
// Continue here if InvokePrologue does handle the invocation due to
// mismatched parameter counts.
bind(&done);
}
void MacroAssembler::InvokeFunction(Register function,
const ParameterCount& actual,
InvokeFlag flag) {
// Contract with called JS functions requires that function is passed in a1.
ASSERT(function.is(a1));
Register expected_reg = a2;
Register code_reg = a3;
lw(code_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
lw(expected_reg,
FieldMemOperand(code_reg,
SharedFunctionInfo::kFormalParameterCountOffset));
lw(code_reg,
MemOperand(code_reg, SharedFunctionInfo::kCodeOffset - kHeapObjectTag));
addiu(code_reg, code_reg, Code::kHeaderSize - kHeapObjectTag);
ParameterCount expected(expected_reg);
InvokeCode(code_reg, expected, actual, flag);
}
// ---------------------------------------------------------------------------
// Support functions.
void MacroAssembler::GetObjectType(Register function,
Register map,
Register type_reg) {
lw(map, FieldMemOperand(function, HeapObject::kMapOffset));
lbu(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
}
void MacroAssembler::CallBuiltin(ExternalReference builtin_entry) {
// Load builtin address.
LoadExternalReference(t9, builtin_entry);
lw(t9, MemOperand(t9)); // Deref address.
addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag);
// Call and allocate arguments slots.
jalr(t9);
// Use the branch delay slot to allocated argument slots.
addiu(sp, sp, -StandardFrameConstants::kRArgsSlotsSize);
addiu(sp, sp, StandardFrameConstants::kRArgsSlotsSize);
}
void MacroAssembler::CallBuiltin(Register target) {
// Target already holds target address.
// Call and allocate arguments slots.
jalr(target);
// Use the branch delay slot to allocated argument slots.
addiu(sp, sp, -StandardFrameConstants::kRArgsSlotsSize);
addiu(sp, sp, StandardFrameConstants::kRArgsSlotsSize);
}
void MacroAssembler::JumpToBuiltin(ExternalReference builtin_entry) {
// Load builtin address.
LoadExternalReference(t9, builtin_entry);
lw(t9, MemOperand(t9)); // Deref address.
addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag);
// Call and allocate arguments slots.
jr(t9);
// Use the branch delay slot to allocated argument slots.
addiu(sp, sp, -StandardFrameConstants::kRArgsSlotsSize);
}
void MacroAssembler::JumpToBuiltin(Register target) {
// t9 already holds target address.
// Call and allocate arguments slots.
jr(t9);
// Use the branch delay slot to allocated argument slots.
addiu(sp, sp, -StandardFrameConstants::kRArgsSlotsSize);
}
// -----------------------------------------------------------------------------
// Runtime calls
void MacroAssembler::CallStub(CodeStub* stub, Condition cond,
Register r1, const Operand& r2) {
UNIMPLEMENTED_MIPS();
ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2);
}
......@@ -826,13 +1102,38 @@ void MacroAssembler::StubReturn(int argc) {
}
void MacroAssembler::IllegalOperation(int num_arguments) {
if (num_arguments > 0) {
addiu(sp, sp, num_arguments * kPointerSize);
}
LoadRoot(v0, Heap::kUndefinedValueRootIndex);
}
void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
UNIMPLEMENTED_MIPS();
// All parameters are on the stack. v0 has the return value after call.
// If the expected number of arguments of the runtime function is
// constant, we check that the actual number of arguments match the
// expectation.
if (f->nargs >= 0 && f->nargs != num_arguments) {
IllegalOperation(num_arguments);
return;
}
// TODO(1236192): Most runtime routines don't need the number of
// arguments passed in because it is constant. At some point we
// should remove this need and make the runtime routine entry code
// smarter.
li(a0, num_arguments);
LoadExternalReference(a1, ExternalReference(f));
CEntryStub stub(1);
CallStub(&stub);
}
void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
UNIMPLEMENTED_MIPS();
CallRuntime(Runtime::FunctionForId(fid), num_arguments);
}
......@@ -891,6 +1192,8 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
}
// -----------------------------------------------------------------------------
// Debugging
void MacroAssembler::Assert(Condition cc, const char* msg,
Register rs, Operand rt) {
......@@ -908,5 +1211,113 @@ void MacroAssembler::Abort(const char* msg) {
UNIMPLEMENTED_MIPS();
}
void MacroAssembler::EnterFrame(StackFrame::Type type) {
addiu(sp, sp, -5 * kPointerSize);
li(t0, Operand(Smi::FromInt(type)));
li(t1, Operand(CodeObject()));
sw(ra, MemOperand(sp, 4 * kPointerSize));
sw(fp, MemOperand(sp, 3 * kPointerSize));
sw(cp, MemOperand(sp, 2 * kPointerSize));
sw(t0, MemOperand(sp, 1 * kPointerSize));
sw(t1, MemOperand(sp, 0 * kPointerSize));
addiu(fp, sp, 3 * kPointerSize);
}
void MacroAssembler::LeaveFrame(StackFrame::Type type) {
mov(sp, fp);
lw(fp, MemOperand(sp, 0 * kPointerSize));
lw(ra, MemOperand(sp, 1 * kPointerSize));
addiu(sp, sp, 2 * kPointerSize);
}
void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode,
Register hold_argc,
Register hold_argv,
Register hold_function) {
// Compute the argv pointer and keep it in a callee-saved register.
// a0 is argc.
sll(t0, a0, kPointerSizeLog2);
add(hold_argv, sp, t0);
addi(hold_argv, hold_argv, -kPointerSize);
// Compute callee's stack pointer before making changes and save it as
// t1 register so that it is restored as sp register on exit, thereby
// popping the args.
// t1 = sp + kPointerSize * #args
add(t1, sp, t0);
// Align the stack at this point.
AlignStack(0);
// Save registers.
addiu(sp, sp, -12);
sw(t1, MemOperand(sp, 8));
sw(ra, MemOperand(sp, 4));
sw(fp, MemOperand(sp, 0));
mov(fp, sp); // Setup new frame pointer.
// Push debug marker.
if (mode == ExitFrame::MODE_DEBUG) {
Push(zero_reg);
} else {
li(t0, Operand(CodeObject()));
Push(t0);
}
// Save the frame pointer and the context in top.
LoadExternalReference(t0, ExternalReference(Top::k_c_entry_fp_address));
sw(fp, MemOperand(t0));
LoadExternalReference(t0, ExternalReference(Top::k_context_address));
sw(cp, MemOperand(t0));
// Setup argc and the builtin function in callee-saved registers.
mov(hold_argc, a0);
mov(hold_function, a1);
}
void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
// Clear top frame.
LoadExternalReference(t0, ExternalReference(Top::k_c_entry_fp_address));
sw(zero_reg, MemOperand(t0));
// Restore current context from top and clear it in debug mode.
LoadExternalReference(t0, ExternalReference(Top::k_context_address));
lw(cp, MemOperand(t0));
#ifdef DEBUG
sw(a3, MemOperand(t0));
#endif
// Pop the arguments, restore registers, and return.
mov(sp, fp); // Respect ABI stack constraint.
lw(fp, MemOperand(sp, 0));
lw(ra, MemOperand(sp, 4));
lw(sp, MemOperand(sp, 8));
jr(ra);
nop(); // Branch delay slot nop.
}
void MacroAssembler::AlignStack(int offset) {
// On MIPS an offset of 0 aligns to 0 modulo 8 bytes,
// and an offset of 1 aligns to 4 modulo 8 bytes.
int activation_frame_alignment = OS::ActivationFrameAlignment();
if (activation_frame_alignment != kPointerSize) {
// This code needs to be made more general if this assert doesn't hold.
ASSERT(activation_frame_alignment == 2 * kPointerSize);
if (offset == 0) {
andi(t0, sp, activation_frame_alignment - 1);
Push(zero_reg, eq, t0, zero_reg);
} else {
andi(t0, sp, activation_frame_alignment - 1);
addiu(t0, t0, -4);
Push(zero_reg, eq, t0, zero_reg);
}
}
}
} } // namespace v8::internal
......@@ -41,6 +41,7 @@ class JumpTarget;
// unless we know exactly what we do.
// Registers aliases
// cp is assumed to be a callee saved register.
const Register cp = s7; // JavaScript context pointer
const Register fp = s8_fp; // Alias fp
......@@ -102,10 +103,10 @@ class MacroAssembler: public Assembler {
// Jump unconditionally to given label.
// We NEED a nop in the branch delay slot, as it used by v8, for example in
// CodeGenerator::ProcessDeferred().
// Currently the branch delay slot is filled by the MacroAssembler.
// Use rather b(Label) for code generation.
void jmp(Label* L) {
Branch(cc_always, L);
nop();
}
// Load an object from the root table.
......@@ -115,6 +116,11 @@ class MacroAssembler: public Assembler {
Heap::RootListIndex index,
Condition cond, Register src1, const Operand& src2);
// Load an external reference.
void LoadExternalReference(Register reg, ExternalReference ext) {
li(reg, Operand(ext));
}
// Sets the remembered set bit for [address+offset].
void RecordWrite(Register object, Register offset, Register scratch);
......@@ -191,7 +197,6 @@ class MacroAssembler: public Assembler {
void Push(Register src, Condition cond, Register tst1, Register tst2) {
// Since we don't have conditionnal execution we use a Branch.
Branch(cond, 3, tst1, Operand(tst2));
nop();
Addu(sp, sp, Operand(-kPointerSize));
sw(src, MemOperand(sp, 0));
}
......@@ -209,6 +214,53 @@ class MacroAssembler: public Assembler {
}
// ---------------------------------------------------------------------------
// Activation frames
void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
// Enter specific kind of exit frame; either EXIT or
// EXIT_DEBUG. Expects the number of arguments in register a0 and
// the builtin function to call in register a1.
// On output hold_argc, hold_function, and hold_argv are setup.
void EnterExitFrame(ExitFrame::Mode mode,
Register hold_argc,
Register hold_argv,
Register hold_function);
// Leave the current exit frame. Expects the return value in v0.
void LeaveExitFrame(ExitFrame::Mode mode);
// Align the stack by optionally pushing a Smi zero.
void AlignStack(int offset);
void SetupAlignedCall(Register scratch, int arg_count = 0);
void ReturnFromAlignedCall();
// ---------------------------------------------------------------------------
// JavaScript invokes
// Invoke the JavaScript function code by either calling or jumping.
void InvokeCode(Register code,
const ParameterCount& expected,
const ParameterCount& actual,
InvokeFlag flag);
void InvokeCode(Handle<Code> code,
const ParameterCount& expected,
const ParameterCount& actual,
RelocInfo::Mode rmode,
InvokeFlag flag);
// Invoke the JavaScript function in the given register. Changes the
// current context to the context in the function before invoking.
void InvokeFunction(Register function,
const ParameterCount& actual,
InvokeFlag flag);
#ifdef ENABLE_DEBUGGER_SUPPORT
// ---------------------------------------------------------------------------
// Debugger Support
......@@ -227,8 +279,7 @@ class MacroAssembler: public Assembler {
// Exception handling
// Push a new try handler and link into try handler chain.
// The return address must be passed in register lr.
// On exit, r0 contains TOS (code slot).
// The return address must be passed in register ra.
void PushTryHandler(CodeLocation try_location, HandlerType type);
// Unlink the stack handler on top of the stack from the try handler chain.
......@@ -239,6 +290,10 @@ class MacroAssembler: public Assembler {
// ---------------------------------------------------------------------------
// Support functions.
void GetObjectType(Register function,
Register map,
Register type_reg);
inline void BranchOnSmi(Register value, Label* smi_label,
Register scratch = at) {
ASSERT_EQ(0, kSmiTag);
......@@ -254,6 +309,15 @@ class MacroAssembler: public Assembler {
Branch(ne, not_smi_label, scratch, Operand(zero_reg));
}
void CallBuiltin(ExternalReference builtin_entry);
void CallBuiltin(Register target);
void JumpToBuiltin(ExternalReference builtin_entry);
void JumpToBuiltin(Register target);
// Generates code for reporting that an illegal operation has
// occurred.
void IllegalOperation(int num_arguments);
// ---------------------------------------------------------------------------
// Runtime calls
......@@ -342,20 +406,33 @@ class MacroAssembler: public Assembler {
bool allow_stub_calls() { return allow_stub_calls_; }
private:
List<Unresolved> unresolved_;
bool generating_stub_;
bool allow_stub_calls_;
// This handle will be patched with the code object on installation.
Handle<Object> code_object_;
void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = cc_always,
Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg));
void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = cc_always,
Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg));
// Helper functions for generating invokes.
void InvokePrologue(const ParameterCount& expected,
const ParameterCount& actual,
Handle<Code> code_constant,
Register code_reg,
Label* done,
InvokeFlag flag);
// Get the code for the given builtin. Returns if able to resolve
// the function in the 'resolved' flag.
Handle<Code> ResolveBuiltin(Builtins::JavaScript id, bool* resolved);
List<Unresolved> unresolved_;
bool generating_stub_;
bool allow_stub_calls_;
// This handle will be patched with the code object on installation.
Handle<Object> code_object_;
// Activation support.
// EnterFrame clobbers t0 and t1.
void EnterFrame(StackFrame::Type type);
void LeaveFrame(StackFrame::Type type);
};
......
......@@ -160,8 +160,31 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
UNIMPLEMENTED_MIPS();
return reinterpret_cast<Object*>(NULL); // UNIMPLEMENTED RETURN
// Registers:
// a1: function
// ra: return address
// Enter an internal frame.
__ EnterInternalFrame();
// Preserve the function.
__ Push(a1);
// Setup aligned call.
__ SetupAlignedCall(t0, 1);
// Push the function on the stack as the argument to the runtime function.
__ Push(a1);
// Call the runtime function
__ CallRuntime(Runtime::kLazyCompile, 1);
__ ReturnFromAlignedCall();
// Calculate the entry point.
__ addiu(t9, v0, Code::kHeaderSize - kHeapObjectTag);
// Restore saved function.
__ Pop(a1);
// Tear down temporary frame.
__ LeaveInternalFrame();
// Do a tail-call of the compiled function.
__ Jump(t9);
return GetCodeWithFlags(flags, "LazyCompileStub");
}
......@@ -174,6 +197,26 @@ Object* CallStubCompiler::CompileCallField(JSObject* object,
}
Object* CallStubCompiler::CompileArrayPushCall(Object* object,
JSObject* holder,
JSFunction* function,
String* name,
CheckType check) {
UNIMPLEMENTED_MIPS();
return reinterpret_cast<Object*>(NULL); // UNIMPLEMENTED RETURN
}
Object* CallStubCompiler::CompileArrayPopCall(Object* object,
JSObject* holder,
JSFunction* function,
String* name,
CheckType check) {
UNIMPLEMENTED_MIPS();
return reinterpret_cast<Object*>(NULL); // UNIMPLEMENTED RETURN
}
Object* CallStubCompiler::CompileCallConstant(Object* object,
JSObject* holder,
JSFunction* function,
......
......@@ -53,7 +53,12 @@ void VirtualFrame::SyncElementByPushing(int index) {
void VirtualFrame::SyncRange(int begin, int end) {
UNIMPLEMENTED_MIPS();
// All elements are in memory on MIPS (ie, synced).
#ifdef DEBUG
for (int i = begin; i <= end; i++) {
ASSERT(elements_[i].is_synced());
}
#endif
}
......@@ -63,7 +68,13 @@ void VirtualFrame::MergeTo(VirtualFrame* expected) {
void VirtualFrame::Enter() {
UNIMPLEMENTED_MIPS();
// TODO(MIPS): Implement DEBUG
// We are about to push four values to the frame.
Adjust(4);
__ MultiPush(ra.bit() | fp.bit() | cp.bit() | a1.bit());
// Adjust FP to point to saved FP.
__ addiu(fp, sp, 2 * kPointerSize);
}
......@@ -73,7 +84,17 @@ void VirtualFrame::Exit() {
void VirtualFrame::AllocateStackSlots() {
UNIMPLEMENTED_MIPS();
int count = local_count();
if (count > 0) {
Comment cmnt(masm(), "[ Allocate space for locals");
Adjust(count);
// Initialize stack slots with 'undefined' value.
__ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
__ addiu(sp, sp, -count * kPointerSize);
for (int i = 0; i < count; i++) {
__ sw(t0, MemOperand(sp, (count-i-1)*kPointerSize));
}
}
}
......@@ -128,12 +149,16 @@ void VirtualFrame::CallStub(CodeStub* stub, Result* arg0, Result* arg1) {
void VirtualFrame::CallRuntime(Runtime::Function* f, int arg_count) {
UNIMPLEMENTED_MIPS();
PrepareForCall(arg_count, arg_count);
ASSERT(cgen()->HasValidEntryRegisters());
__ CallRuntime(f, arg_count);
}
void VirtualFrame::CallRuntime(Runtime::FunctionId id, int arg_count) {
UNIMPLEMENTED_MIPS();
PrepareForCall(arg_count, arg_count);
ASSERT(cgen()->HasValidEntryRegisters());
__ CallRuntime(id, arg_count);
}
......@@ -155,16 +180,37 @@ void VirtualFrame::InvokeBuiltin(Builtins::JavaScript id,
}
void VirtualFrame::RawCallCodeObject(Handle<Code> code,
RelocInfo::Mode rmode) {
UNIMPLEMENTED_MIPS();
}
void VirtualFrame::CallCodeObject(Handle<Code> code,
RelocInfo::Mode rmode,
int dropped_args) {
switch (code->kind()) {
case Code::CALL_IC:
break;
case Code::FUNCTION:
UNIMPLEMENTED_MIPS();
break;
case Code::KEYED_LOAD_IC:
UNIMPLEMENTED_MIPS();
break;
case Code::LOAD_IC:
UNIMPLEMENTED_MIPS();
break;
case Code::KEYED_STORE_IC:
UNIMPLEMENTED_MIPS();
break;
case Code::STORE_IC:
UNIMPLEMENTED_MIPS();
break;
case Code::BUILTIN:
UNIMPLEMENTED_MIPS();
break;
default:
UNREACHABLE();
break;
}
Forget(dropped_args);
ASSERT(cgen()->HasValidEntryRegisters());
__ Call(code, rmode);
}
......@@ -187,7 +233,24 @@ void VirtualFrame::CallCodeObject(Handle<Code> code,
void VirtualFrame::Drop(int count) {
UNIMPLEMENTED_MIPS();
ASSERT(count >= 0);
ASSERT(height() >= count);
int num_virtual_elements = (element_count() - 1) - stack_pointer_;
// Emit code to lower the stack pointer if necessary.
if (num_virtual_elements < count) {
int num_dropped = count - num_virtual_elements;
stack_pointer_ -= num_dropped;
__ addiu(sp, sp, num_dropped * kPointerSize);
}
// Discard elements from the virtual frame and free any registers.
for (int i = 0; i < count; i++) {
FrameElement dropped = elements_.RemoveLast();
if (dropped.is_register()) {
Unuse(dropped.reg());
}
}
}
......@@ -199,27 +262,50 @@ void VirtualFrame::DropFromVFrameOnly(int count) {
Result VirtualFrame::Pop() {
UNIMPLEMENTED_MIPS();
Result res = Result();
return res; // UNIMPLEMENTED RETUR
return res; // UNIMPLEMENTED RETURN
}
void VirtualFrame::EmitPop(Register reg) {
UNIMPLEMENTED_MIPS();
ASSERT(stack_pointer_ == element_count() - 1);
stack_pointer_--;
elements_.RemoveLast();
__ Pop(reg);
}
void VirtualFrame::EmitMultiPop(RegList regs) {
UNIMPLEMENTED_MIPS();
ASSERT(stack_pointer_ == element_count() - 1);
for (int16_t i = 0; i < kNumRegisters; i++) {
if ((regs & (1 << i)) != 0) {
stack_pointer_--;
elements_.RemoveLast();
}
}
__ MultiPop(regs);
}
void VirtualFrame::EmitPush(Register reg) {
UNIMPLEMENTED_MIPS();
ASSERT(stack_pointer_ == element_count() - 1);
elements_.Add(FrameElement::MemoryElement(NumberInfo::Unknown()));
stack_pointer_++;
__ Push(reg);
}
void VirtualFrame::EmitMultiPush(RegList regs) {
UNIMPLEMENTED_MIPS();
ASSERT(stack_pointer_ == element_count() - 1);
for (int16_t i = kNumRegisters; i > 0; i--) {
if ((regs & (1 << i)) != 0) {
elements_.Add(FrameElement::MemoryElement(NumberInfo::Unknown()));
stack_pointer_++;
}
}
__ MultiPush(regs);
}
void VirtualFrame::EmitArgumentSlots(RegList reglist) {
UNIMPLEMENTED_MIPS();
}
......
......@@ -292,12 +292,8 @@ class VirtualFrame : public ZoneObject {
RawCallStub(stub);
}
// Call stub that expects its argument in r0. The argument is given
// as a result which must be the register r0.
void CallStub(CodeStub* stub, Result* arg);
// Call stub that expects its arguments in r1 and r0. The arguments
// are given as results which must be the appropriate registers.
void CallStub(CodeStub* stub, Result* arg0, Result* arg1);
// Call runtime given the number of arguments expected on (and
......@@ -356,15 +352,15 @@ class VirtualFrame : public ZoneObject {
// emit a corresponding pop instruction.
void EmitPop(Register reg);
// Same but for multiple registers
void EmitMultiPop(RegList regs); // higher indexed registers popped first
void EmitMultiPopReversed(RegList regs); // lower first
void EmitMultiPop(RegList regs);
void EmitMultiPopReversed(RegList regs);
// Push an element on top of the expression stack and emit a
// corresponding push instruction.
void EmitPush(Register reg);
// Same but for multiple registers.
void EmitMultiPush(RegList regs); // lower indexed registers are pushed first
void EmitMultiPushReversed(RegList regs); // higher first
void EmitMultiPush(RegList regs);
void EmitMultiPushReversed(RegList regs);
// Push an element on the virtual frame.
inline void Push(Register reg, NumberInfo info = NumberInfo::Unknown());
......@@ -392,6 +388,7 @@ class VirtualFrame : public ZoneObject {
void EmitArgumentSlots(RegList reglist);
inline void SetTypeForLocalAt(int index, NumberInfo info);
inline void SetTypeForParamAt(int index, NumberInfo info);
private:
static const int kLocal0Offset = JavaScriptFrameConstants::kLocal0Offset;
......
......@@ -83,7 +83,7 @@ SOURCES = {
'arch:x64': ['test-assembler-x64.cc',
'test-macro-assembler-x64.cc',
'test-log-stack-tracer.cc'],
'arch:mips': ['test-assembler-mips.cc'],
'arch:mips': ['test-assembler-mips.cc', 'test-mips.cc'],
'os:linux': ['test-platform-linux.cc'],
'os:macos': ['test-platform-macos.cc'],
'os:nullos': ['test-platform-nullos.cc'],
......
......@@ -59,6 +59,7 @@ test-accessors: SKIP
test-alloc: SKIP
test-api: SKIP
test-compiler: SKIP
test-cpu-profiler: SKIP
test-debug: SKIP
test-decls: SKIP
test-func-name-inference: SKIP
......
......@@ -49,8 +49,8 @@ static v8::Persistent<v8::Context> env;
// The test framework does not accept flags on the command line, so we set them.
static void InitializeVM() {
// Disable compilation of natives by specifying an empty natives file.
FLAG_natives_file = "";
// Disable compilation of natives.
FLAG_disable_native_files = true;
// Enable generation of comments.
FLAG_debug_code = true;
......
// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "execution.h"
#include "cctest.h"
using ::v8::Local;
using ::v8::String;
using ::v8::Script;
namespace i = ::v8::internal;
TEST(MIPSFunctionCalls) {
// Disable compilation of natives.
i::FLAG_disable_native_files = true;
i::FLAG_full_compiler = false;
v8::HandleScope scope;
LocalContext env; // from cctest.h
const char* c_source = "function foo() { return 0x1234; }; foo();";
Local<String> source = ::v8::String::New(c_source);
Local<Script> script = ::v8::Script::Compile(source);
CHECK_EQ(0x1234, script->Run()->Int32Value());
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment