Commit 01bcdbcd authored by sgjesse@chromium.org's avatar sgjesse@chromium.org

MIPS simple function calls

This lands http://codereview.chromium.org/1018001.

Patch by Alexandre Rames <alexandre.rames@gmail.com> from Sigma Designs Inc.

Mads, please just have to look at the new flag --disable-native-files (two files not in the mips directory).
Review URL: http://codereview.chromium.org/1140004

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@4234 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 0c6a3f48
......@@ -1114,6 +1114,11 @@ bool Genesis::InstallNatives() {
global_context()->set_opaque_reference_function(*opaque_reference_fun);
}
if (FLAG_disable_native_files) {
PrintF("Warning: Running without installed natives!\n");
return true;
}
// Install natives.
for (int i = Natives::GetDebuggerCount();
i < Natives::GetBuiltinsCount();
......
......@@ -124,6 +124,7 @@ DEFINE_string(expose_natives_as, NULL, "expose natives in global object")
DEFINE_string(expose_debug_as, NULL, "expose debug in global object")
DEFINE_bool(expose_gc, false, "expose gc extension")
DEFINE_int(stack_trace_limit, 10, "number of stack frames to capture")
DEFINE_bool(disable_native_files, false, "disable builtin natives files")
// builtins-ia32.cc
DEFINE_bool(inline_new, true, "use fast inline allocation")
......
......@@ -74,7 +74,99 @@ void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
bool is_construct) {
UNIMPLEMENTED_MIPS();
// Called from JSEntryStub::GenerateBody
// Registers:
// a0: entry_address
// a1: function
// a2: reveiver_pointer
// a3: argc
// s0: argv
//
// Stack:
// arguments slots
// handler frame
// entry frame
// callee saved registers + ra
// 4 args slots
// args
// Clear the context before we push it when entering the JS frame.
__ li(cp, Operand(0));
// Enter an internal frame.
__ EnterInternalFrame();
// Set up the context from the function argument.
__ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
// Set up the roots register.
ExternalReference roots_address = ExternalReference::roots_address();
__ li(s6, Operand(roots_address));
// Push the function and the receiver onto the stack.
__ MultiPushReversed(a1.bit() | a2.bit());
// Copy arguments to the stack in a loop.
// a3: argc
// s0: argv, ie points to first arg
Label loop, entry;
__ sll(t0, a3, kPointerSizeLog2);
__ add(t2, s0, t0);
__ b(&entry);
__ nop(); // Branch delay slot nop.
// t2 points past last arg.
__ bind(&loop);
__ lw(t0, MemOperand(s0)); // Read next parameter.
__ addiu(s0, s0, kPointerSize);
__ lw(t0, MemOperand(t0)); // Dereference handle.
__ Push(t0); // Push parameter.
__ bind(&entry);
__ Branch(ne, &loop, s0, Operand(t2));
// Registers:
// a0: entry_address
// a1: function
// a2: reveiver_pointer
// a3: argc
// s0: argv
// s6: roots_address
//
// Stack:
// arguments
// receiver
// function
// arguments slots
// handler frame
// entry frame
// callee saved registers + ra
// 4 args slots
// args
// Initialize all JavaScript callee-saved registers, since they will be seen
// by the garbage collector as part of handlers.
__ LoadRoot(t4, Heap::kUndefinedValueRootIndex);
__ mov(s1, t4);
__ mov(s2, t4);
__ mov(s3, t4);
__ mov(s4, s4);
__ mov(s5, t4);
// s6 holds the root address. Do not clobber.
// s7 is cp. Do not init.
// Invoke the code and pass argc as a0.
__ mov(a0, a3);
if (is_construct) {
UNIMPLEMENTED_MIPS();
__ break_(0x164);
} else {
ParameterCount actual(a0);
__ InvokeFunction(a1, actual, CALL_FUNCTION);
}
__ LeaveInternalFrame();
__ Jump(ra);
}
......@@ -100,6 +192,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
UNIMPLEMENTED_MIPS();
__ break_(0x201);
}
......
......@@ -36,7 +36,31 @@ namespace internal {
// Platform-specific inline functions.
void DeferredCode::Jump() { __ b(&entry_label_); }
void DeferredCode::Jump() {
__ b(&entry_label_);
__ nop();
}
void Reference::GetValueAndSpill() {
GetValue();
}
void CodeGenerator::VisitAndSpill(Statement* statement) {
Visit(statement);
}
void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
VisitStatements(statements);
}
void CodeGenerator::LoadAndSpill(Expression* expression) {
Load(expression);
}
#undef __
......
This diff is collapsed.
......@@ -42,7 +42,77 @@ enum InitState { CONST_INIT, NOT_CONST_INIT };
enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
// -------------------------------------------------------------------------
// -----------------------------------------------------------------------------
// Reference support
// A reference is a C++ stack-allocated object that keeps an ECMA
// reference on the execution stack while in scope. For variables
// the reference is empty, indicating that it isn't necessary to
// store state on the stack for keeping track of references to those.
// For properties, we keep either one (named) or two (indexed) values
// on the execution stack to represent the reference.
class Reference BASE_EMBEDDED {
public:
// The values of the types is important, see size().
enum Type { UNLOADED = -2, ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 };
Reference(CodeGenerator* cgen,
Expression* expression,
bool persist_after_get = false);
~Reference();
Expression* expression() const { return expression_; }
Type type() const { return type_; }
void set_type(Type value) {
ASSERT_EQ(ILLEGAL, type_);
type_ = value;
}
void set_unloaded() {
ASSERT_NE(ILLEGAL, type_);
ASSERT_NE(UNLOADED, type_);
type_ = UNLOADED;
}
// The size the reference takes up on the stack.
int size() const {
return (type_ < SLOT) ? 0 : type_;
}
bool is_illegal() const { return type_ == ILLEGAL; }
bool is_slot() const { return type_ == SLOT; }
bool is_property() const { return type_ == NAMED || type_ == KEYED; }
bool is_unloaded() const { return type_ == UNLOADED; }
// Return the name. Only valid for named property references.
Handle<String> GetName();
// Generate code to push the value of the reference on top of the
// expression stack. The reference is expected to be already on top of
// the expression stack, and it is consumed by the call unless the
// reference is for a compound assignment.
// If the reference is not consumed, it is left in place under its value.
void GetValue();
// Generate code to pop a reference, push the value of the reference,
// and then spill the stack frame.
inline void GetValueAndSpill();
// Generate code to store the value on top of the expression stack in the
// reference. The reference is expected to be immediately below the value
// on the expression stack. The value is stored in the location specified
// by the reference, and is left on top of the stack, after the reference
// is popped from beneath it (unloaded).
void SetValue(InitState init_state);
private:
CodeGenerator* cgen_;
Expression* expression_;
Type type_;
// Keep the reference on the stack after get, so it can be used by set later.
bool persist_after_get_;
};
// -----------------------------------------------------------------------------
// Code generation state
// The state is passed down the AST by the code generator (and back up, in
......@@ -89,7 +159,7 @@ class CodeGenState BASE_EMBEDDED {
// -------------------------------------------------------------------------
// -----------------------------------------------------------------------------
// CodeGenerator
class CodeGenerator: public AstVisitor {
......@@ -152,7 +222,7 @@ class CodeGenerator: public AstVisitor {
// Number of instructions used for the JS return sequence. The constant is
// used by the debugger to patch the JS return sequence.
static const int kJSReturnSequenceLength = 6;
static const int kJSReturnSequenceLength = 7;
// If the name is an inline runtime function call return the number of
// expected arguments. Otherwise return -1.
......@@ -186,9 +256,51 @@ class CodeGenerator: public AstVisitor {
AST_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
// Visit a statement and then spill the virtual frame if control flow can
// reach the end of the statement (ie, it does not exit via break,
// continue, return, or throw). This function is used temporarily while
// the code generator is being transformed.
inline void VisitAndSpill(Statement* statement);
// Visit a list of statements and then spill the virtual frame if control
// flow can reach the end of the list.
inline void VisitStatementsAndSpill(ZoneList<Statement*>* statements);
// Main code generation function
void Generate(CompilationInfo* info);
// The following are used by class Reference.
void LoadReference(Reference* ref);
void UnloadReference(Reference* ref);
MemOperand ContextOperand(Register context, int index) const {
return MemOperand(context, Context::SlotOffset(index));
}
MemOperand SlotOperand(Slot* slot, Register tmp);
// Expressions
MemOperand GlobalObject() const {
return ContextOperand(cp, Context::GLOBAL_INDEX);
}
void LoadCondition(Expression* x,
JumpTarget* true_target,
JumpTarget* false_target,
bool force_cc);
void Load(Expression* x);
void LoadGlobal();
// Generate code to push the value of an expression on top of the frame
// and then spill the frame fully to memory. This function is used
// temporarily while the code generator is being transformed.
inline void LoadAndSpill(Expression* expression);
// Read a value from a slot and leave it on top of the expression stack.
void LoadFromSlot(Slot* slot, TypeofState typeof_state);
// Store the value on top of the stack to a slot.
void StoreToSlot(Slot* slot, InitState init_state);
struct InlineRuntimeLUT {
void (CodeGenerator::*method)(ZoneList<Expression*>*);
const char* name;
......@@ -290,7 +402,6 @@ class CodeGenerator: public AstVisitor {
CompilationInfo* info_;
// Code generation state
Scope* scope_;
VirtualFrame* frame_;
RegisterAllocator* allocator_;
Condition cc_reg_;
......
......@@ -91,8 +91,7 @@ Address ArgumentsAdaptorFrame::GetCallerStackPointer() const {
Address InternalFrame::GetCallerStackPointer() const {
UNIMPLEMENTED_MIPS();
return static_cast<Address>(NULL); // UNIMPLEMENTED RETURN
return fp() + StandardFrameConstants::kCallerSPOffset;
}
......
......@@ -104,7 +104,7 @@ class ExitFrameConstants : public AllStatic {
static const int kCallerPCOffset = +1 * kPointerSize;
// FP-relative displacement of the caller's SP.
static const int kCallerSPDisplacement = +4 * kPointerSize;
static const int kCallerSPDisplacement = +3 * kPointerSize;
};
......
......@@ -74,6 +74,47 @@ void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
void CallIC::GenerateMiss(MacroAssembler* masm, int argc) {
UNIMPLEMENTED_MIPS();
// Registers:
// a2: name
// ra: return address
// Get the receiver of the function from the stack.
__ lw(a3, MemOperand(sp, argc*kPointerSize));
__ EnterInternalFrame();
// Push the receiver and the name of the function.
__ MultiPush(a2.bit() | a3.bit());
// Call the entry.
__ li(a0, Operand(2));
__ li(a1, Operand(ExternalReference(IC_Utility(kCallIC_Miss))));
CEntryStub stub(1);
__ CallStub(&stub);
// Move result to r1 and leave the internal frame.
__ mov(a1, v0);
__ LeaveInternalFrame();
// Check if the receiver is a global object of some sort.
Label invoke, global;
__ lw(a2, MemOperand(sp, argc * kPointerSize));
__ andi(t0, a2, kSmiTagMask);
__ Branch(eq, &invoke, t0, Operand(zero_reg));
__ GetObjectType(a2, a3, a3);
__ Branch(eq, &global, a3, Operand(JS_GLOBAL_OBJECT_TYPE));
__ Branch(ne, &invoke, a3, Operand(JS_BUILTINS_OBJECT_TYPE));
// Patch the receiver on the stack.
__ bind(&global);
__ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
__ sw(a2, MemOperand(sp, argc * kPointerSize));
// Invoke the function.
ParameterCount actual(argc);
__ bind(&invoke);
__ InvokeFunction(a1, actual, JUMP_FUNCTION);
}
// Defined in ic.cc.
......
......@@ -42,7 +42,37 @@ namespace internal {
#define __ ACCESS_MASM(cgen()->masm())
void JumpTarget::DoJump() {
UNIMPLEMENTED_MIPS();
ASSERT(cgen()->has_valid_frame());
// Live non-frame registers are not allowed at unconditional jumps
// because we have no way of invalidating the corresponding results
// which are still live in the C++ code.
ASSERT(cgen()->HasValidEntryRegisters());
if (is_bound()) {
// Backward jump. There already a frame expectation at the target.
ASSERT(direction_ == BIDIRECTIONAL);
cgen()->frame()->MergeTo(entry_frame_);
cgen()->DeleteFrame();
} else {
// Use the current frame as the expected one at the target if necessary.
if (entry_frame_ == NULL) {
entry_frame_ = cgen()->frame();
RegisterFile empty;
cgen()->SetFrame(NULL, &empty);
} else {
cgen()->frame()->MergeTo(entry_frame_);
cgen()->DeleteFrame();
}
// The predicate is_linked() should be made true. Its implementation
// detects the presence of a frame pointer in the reaching_frames_ list.
if (!is_linked()) {
reaching_frames_.Add(NULL);
ASSERT(is_linked());
}
}
__ b(&entry_label_);
__ nop(); // Branch delay slot nop.
}
......@@ -57,12 +87,47 @@ void JumpTarget::Call() {
void JumpTarget::DoBind() {
UNIMPLEMENTED_MIPS();
ASSERT(!is_bound());
// Live non-frame registers are not allowed at the start of a basic
// block.
ASSERT(!cgen()->has_valid_frame() || cgen()->HasValidEntryRegisters());
if (cgen()->has_valid_frame()) {
// If there is a current frame we can use it on the fall through.
if (entry_frame_ == NULL) {
entry_frame_ = new VirtualFrame(cgen()->frame());
} else {
ASSERT(cgen()->frame()->Equals(entry_frame_));
}
} else {
// If there is no current frame we must have an entry frame which we can
// copy.
ASSERT(entry_frame_ != NULL);
RegisterFile empty;
cgen()->SetFrame(new VirtualFrame(entry_frame_), &empty);
}
// The predicate is_linked() should be made false. Its implementation
// detects the presence (or absence) of frame pointers in the
// reaching_frames_ list. If we inserted a bogus frame to make
// is_linked() true, remove it now.
if (is_linked()) {
reaching_frames_.Clear();
}
__ bind(&entry_label_);
}
void BreakTarget::Jump() {
UNIMPLEMENTED_MIPS();
// On ARM we do not currently emit merge code for jumps, so we need to do
// it explicitly here. The only merging necessary is to drop extra
// statement state from the stack.
ASSERT(cgen()->has_valid_frame());
int count = cgen()->frame()->height() - expected_height_;
cgen()->frame()->Drop(count);
DoJump();
}
......@@ -72,7 +137,26 @@ void BreakTarget::Jump(Result* arg) {
void BreakTarget::Bind() {
UNIMPLEMENTED_MIPS();
#ifdef DEBUG
// All the forward-reaching frames should have been adjusted at the
// jumps to this target.
for (int i = 0; i < reaching_frames_.length(); i++) {
ASSERT(reaching_frames_[i] == NULL ||
reaching_frames_[i]->height() == expected_height_);
}
#endif
// Drop leftover statement state from the frame before merging, even
// on the fall through. This is so we can bind the return target
// with state on the frame.
if (cgen()->has_valid_frame()) {
int count = cgen()->frame()->height() - expected_height_;
// On ARM we do not currently emit merge code at binding sites, so we need
// to do it explicitly here. The only merging necessary is to drop extra
// statement state from the stack.
cgen()->frame()->Drop(count);
}
DoBind();
}
......
This diff is collapsed.
......@@ -41,6 +41,7 @@ class JumpTarget;
// unless we know exactly what we do.
// Registers aliases
// cp is assumed to be a callee saved register.
const Register cp = s7; // JavaScript context pointer
const Register fp = s8_fp; // Alias fp
......@@ -102,10 +103,10 @@ class MacroAssembler: public Assembler {
// Jump unconditionally to given label.
// We NEED a nop in the branch delay slot, as it used by v8, for example in
// CodeGenerator::ProcessDeferred().
// Currently the branch delay slot is filled by the MacroAssembler.
// Use rather b(Label) for code generation.
void jmp(Label* L) {
Branch(cc_always, L);
nop();
}
// Load an object from the root table.
......@@ -115,6 +116,11 @@ class MacroAssembler: public Assembler {
Heap::RootListIndex index,
Condition cond, Register src1, const Operand& src2);
// Load an external reference.
void LoadExternalReference(Register reg, ExternalReference ext) {
li(reg, Operand(ext));
}
// Sets the remembered set bit for [address+offset].
void RecordWrite(Register object, Register offset, Register scratch);
......@@ -191,7 +197,6 @@ class MacroAssembler: public Assembler {
void Push(Register src, Condition cond, Register tst1, Register tst2) {
// Since we don't have conditionnal execution we use a Branch.
Branch(cond, 3, tst1, Operand(tst2));
nop();
Addu(sp, sp, Operand(-kPointerSize));
sw(src, MemOperand(sp, 0));
}
......@@ -209,6 +214,53 @@ class MacroAssembler: public Assembler {
}
// ---------------------------------------------------------------------------
// Activation frames
void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
// Enter specific kind of exit frame; either EXIT or
// EXIT_DEBUG. Expects the number of arguments in register a0 and
// the builtin function to call in register a1.
// On output hold_argc, hold_function, and hold_argv are setup.
void EnterExitFrame(ExitFrame::Mode mode,
Register hold_argc,
Register hold_argv,
Register hold_function);
// Leave the current exit frame. Expects the return value in v0.
void LeaveExitFrame(ExitFrame::Mode mode);
// Align the stack by optionally pushing a Smi zero.
void AlignStack(int offset);
void SetupAlignedCall(Register scratch, int arg_count = 0);
void ReturnFromAlignedCall();
// ---------------------------------------------------------------------------
// JavaScript invokes
// Invoke the JavaScript function code by either calling or jumping.
void InvokeCode(Register code,
const ParameterCount& expected,
const ParameterCount& actual,
InvokeFlag flag);
void InvokeCode(Handle<Code> code,
const ParameterCount& expected,
const ParameterCount& actual,
RelocInfo::Mode rmode,
InvokeFlag flag);
// Invoke the JavaScript function in the given register. Changes the
// current context to the context in the function before invoking.
void InvokeFunction(Register function,
const ParameterCount& actual,
InvokeFlag flag);
#ifdef ENABLE_DEBUGGER_SUPPORT
// ---------------------------------------------------------------------------
// Debugger Support
......@@ -227,8 +279,7 @@ class MacroAssembler: public Assembler {
// Exception handling
// Push a new try handler and link into try handler chain.
// The return address must be passed in register lr.
// On exit, r0 contains TOS (code slot).
// The return address must be passed in register ra.
void PushTryHandler(CodeLocation try_location, HandlerType type);
// Unlink the stack handler on top of the stack from the try handler chain.
......@@ -239,6 +290,10 @@ class MacroAssembler: public Assembler {
// ---------------------------------------------------------------------------
// Support functions.
void GetObjectType(Register function,
Register map,
Register type_reg);
inline void BranchOnSmi(Register value, Label* smi_label,
Register scratch = at) {
ASSERT_EQ(0, kSmiTag);
......@@ -254,6 +309,15 @@ class MacroAssembler: public Assembler {
Branch(ne, not_smi_label, scratch, Operand(zero_reg));
}
void CallBuiltin(ExternalReference builtin_entry);
void CallBuiltin(Register target);
void JumpToBuiltin(ExternalReference builtin_entry);
void JumpToBuiltin(Register target);
// Generates code for reporting that an illegal operation has
// occurred.
void IllegalOperation(int num_arguments);
// ---------------------------------------------------------------------------
// Runtime calls
......@@ -342,20 +406,33 @@ class MacroAssembler: public Assembler {
bool allow_stub_calls() { return allow_stub_calls_; }
private:
List<Unresolved> unresolved_;
bool generating_stub_;
bool allow_stub_calls_;
// This handle will be patched with the code object on installation.
Handle<Object> code_object_;
void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = cc_always,
Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg));
void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = cc_always,
Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg));
// Helper functions for generating invokes.
void InvokePrologue(const ParameterCount& expected,
const ParameterCount& actual,
Handle<Code> code_constant,
Register code_reg,
Label* done,
InvokeFlag flag);
// Get the code for the given builtin. Returns if able to resolve
// the function in the 'resolved' flag.
Handle<Code> ResolveBuiltin(Builtins::JavaScript id, bool* resolved);
List<Unresolved> unresolved_;
bool generating_stub_;
bool allow_stub_calls_;
// This handle will be patched with the code object on installation.
Handle<Object> code_object_;
// Activation support.
// EnterFrame clobbers t0 and t1.
void EnterFrame(StackFrame::Type type);
void LeaveFrame(StackFrame::Type type);
};
......
......@@ -160,8 +160,31 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
UNIMPLEMENTED_MIPS();
return reinterpret_cast<Object*>(NULL); // UNIMPLEMENTED RETURN
// Registers:
// a1: function
// ra: return address
// Enter an internal frame.
__ EnterInternalFrame();
// Preserve the function.
__ Push(a1);
// Setup aligned call.
__ SetupAlignedCall(t0, 1);
// Push the function on the stack as the argument to the runtime function.
__ Push(a1);
// Call the runtime function
__ CallRuntime(Runtime::kLazyCompile, 1);
__ ReturnFromAlignedCall();
// Calculate the entry point.
__ addiu(t9, v0, Code::kHeaderSize - kHeapObjectTag);
// Restore saved function.
__ Pop(a1);
// Tear down temporary frame.
__ LeaveInternalFrame();
// Do a tail-call of the compiled function.
__ Jump(t9);
return GetCodeWithFlags(flags, "LazyCompileStub");
}
......@@ -174,6 +197,26 @@ Object* CallStubCompiler::CompileCallField(JSObject* object,
}
Object* CallStubCompiler::CompileArrayPushCall(Object* object,
JSObject* holder,
JSFunction* function,
String* name,
CheckType check) {
UNIMPLEMENTED_MIPS();
return reinterpret_cast<Object*>(NULL); // UNIMPLEMENTED RETURN
}
Object* CallStubCompiler::CompileArrayPopCall(Object* object,
JSObject* holder,
JSFunction* function,
String* name,
CheckType check) {
UNIMPLEMENTED_MIPS();
return reinterpret_cast<Object*>(NULL); // UNIMPLEMENTED RETURN
}
Object* CallStubCompiler::CompileCallConstant(Object* object,
JSObject* holder,
JSFunction* function,
......
......@@ -53,7 +53,12 @@ void VirtualFrame::SyncElementByPushing(int index) {
void VirtualFrame::SyncRange(int begin, int end) {
UNIMPLEMENTED_MIPS();
// All elements are in memory on MIPS (ie, synced).
#ifdef DEBUG
for (int i = begin; i <= end; i++) {
ASSERT(elements_[i].is_synced());
}
#endif
}
......@@ -63,7 +68,13 @@ void VirtualFrame::MergeTo(VirtualFrame* expected) {
void VirtualFrame::Enter() {
UNIMPLEMENTED_MIPS();
// TODO(MIPS): Implement DEBUG
// We are about to push four values to the frame.
Adjust(4);
__ MultiPush(ra.bit() | fp.bit() | cp.bit() | a1.bit());
// Adjust FP to point to saved FP.
__ addiu(fp, sp, 2 * kPointerSize);
}
......@@ -73,7 +84,17 @@ void VirtualFrame::Exit() {
void VirtualFrame::AllocateStackSlots() {
UNIMPLEMENTED_MIPS();
int count = local_count();
if (count > 0) {
Comment cmnt(masm(), "[ Allocate space for locals");
Adjust(count);
// Initialize stack slots with 'undefined' value.
__ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
__ addiu(sp, sp, -count * kPointerSize);
for (int i = 0; i < count; i++) {
__ sw(t0, MemOperand(sp, (count-i-1)*kPointerSize));
}
}
}
......@@ -128,12 +149,16 @@ void VirtualFrame::CallStub(CodeStub* stub, Result* arg0, Result* arg1) {
void VirtualFrame::CallRuntime(Runtime::Function* f, int arg_count) {
UNIMPLEMENTED_MIPS();
PrepareForCall(arg_count, arg_count);
ASSERT(cgen()->HasValidEntryRegisters());
__ CallRuntime(f, arg_count);
}
void VirtualFrame::CallRuntime(Runtime::FunctionId id, int arg_count) {
UNIMPLEMENTED_MIPS();
PrepareForCall(arg_count, arg_count);
ASSERT(cgen()->HasValidEntryRegisters());
__ CallRuntime(id, arg_count);
}
......@@ -155,16 +180,37 @@ void VirtualFrame::InvokeBuiltin(Builtins::JavaScript id,
}
void VirtualFrame::RawCallCodeObject(Handle<Code> code,
RelocInfo::Mode rmode) {
UNIMPLEMENTED_MIPS();
}
void VirtualFrame::CallCodeObject(Handle<Code> code,
RelocInfo::Mode rmode,
int dropped_args) {
UNIMPLEMENTED_MIPS();
switch (code->kind()) {
case Code::CALL_IC:
break;
case Code::FUNCTION:
UNIMPLEMENTED_MIPS();
break;
case Code::KEYED_LOAD_IC:
UNIMPLEMENTED_MIPS();
break;
case Code::LOAD_IC:
UNIMPLEMENTED_MIPS();
break;
case Code::KEYED_STORE_IC:
UNIMPLEMENTED_MIPS();
break;
case Code::STORE_IC:
UNIMPLEMENTED_MIPS();
break;
case Code::BUILTIN:
UNIMPLEMENTED_MIPS();
break;
default:
UNREACHABLE();
break;
}
Forget(dropped_args);
ASSERT(cgen()->HasValidEntryRegisters());
__ Call(code, rmode);
}
......@@ -187,7 +233,24 @@ void VirtualFrame::CallCodeObject(Handle<Code> code,
void VirtualFrame::Drop(int count) {
UNIMPLEMENTED_MIPS();
ASSERT(count >= 0);
ASSERT(height() >= count);
int num_virtual_elements = (element_count() - 1) - stack_pointer_;
// Emit code to lower the stack pointer if necessary.
if (num_virtual_elements < count) {
int num_dropped = count - num_virtual_elements;
stack_pointer_ -= num_dropped;
__ addiu(sp, sp, num_dropped * kPointerSize);
}
// Discard elements from the virtual frame and free any registers.
for (int i = 0; i < count; i++) {
FrameElement dropped = elements_.RemoveLast();
if (dropped.is_register()) {
Unuse(dropped.reg());
}
}
}
......@@ -199,27 +262,50 @@ void VirtualFrame::DropFromVFrameOnly(int count) {
Result VirtualFrame::Pop() {
UNIMPLEMENTED_MIPS();
Result res = Result();
return res; // UNIMPLEMENTED RETUR
return res; // UNIMPLEMENTED RETURN
}
void VirtualFrame::EmitPop(Register reg) {
UNIMPLEMENTED_MIPS();
ASSERT(stack_pointer_ == element_count() - 1);
stack_pointer_--;
elements_.RemoveLast();
__ Pop(reg);
}
void VirtualFrame::EmitMultiPop(RegList regs) {
UNIMPLEMENTED_MIPS();
ASSERT(stack_pointer_ == element_count() - 1);
for (int16_t i = 0; i < kNumRegisters; i++) {
if ((regs & (1 << i)) != 0) {
stack_pointer_--;
elements_.RemoveLast();
}
}
__ MultiPop(regs);
}
void VirtualFrame::EmitPush(Register reg) {
UNIMPLEMENTED_MIPS();
ASSERT(stack_pointer_ == element_count() - 1);
elements_.Add(FrameElement::MemoryElement(NumberInfo::Unknown()));
stack_pointer_++;
__ Push(reg);
}
void VirtualFrame::EmitMultiPush(RegList regs) {
UNIMPLEMENTED_MIPS();
ASSERT(stack_pointer_ == element_count() - 1);
for (int16_t i = kNumRegisters; i > 0; i--) {
if ((regs & (1 << i)) != 0) {
elements_.Add(FrameElement::MemoryElement(NumberInfo::Unknown()));
stack_pointer_++;
}
}
__ MultiPush(regs);
}
void VirtualFrame::EmitArgumentSlots(RegList reglist) {
UNIMPLEMENTED_MIPS();
}
......
This diff is collapsed.
......@@ -83,7 +83,7 @@ SOURCES = {
'arch:x64': ['test-assembler-x64.cc',
'test-macro-assembler-x64.cc',
'test-log-stack-tracer.cc'],
'arch:mips': ['test-assembler-mips.cc'],
'arch:mips': ['test-assembler-mips.cc', 'test-mips.cc'],
'os:linux': ['test-platform-linux.cc'],
'os:macos': ['test-platform-macos.cc'],
'os:nullos': ['test-platform-nullos.cc'],
......
......@@ -59,6 +59,7 @@ test-accessors: SKIP
test-alloc: SKIP
test-api: SKIP
test-compiler: SKIP
test-cpu-profiler: SKIP
test-debug: SKIP
test-decls: SKIP
test-func-name-inference: SKIP
......
......@@ -49,8 +49,8 @@ static v8::Persistent<v8::Context> env;
// The test framework does not accept flags on the command line, so we set them.
static void InitializeVM() {
// Disable compilation of natives by specifying an empty natives file.
FLAG_natives_file = "";
// Disable compilation of natives.
FLAG_disable_native_files = true;
// Enable generation of comments.
FLAG_debug_code = true;
......
// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "execution.h"
#include "cctest.h"
using ::v8::Local;
using ::v8::String;
using ::v8::Script;
namespace i = ::v8::internal;
TEST(MIPSFunctionCalls) {
// Disable compilation of natives.
i::FLAG_disable_native_files = true;
i::FLAG_full_compiler = false;
v8::HandleScope scope;
LocalContext env; // from cctest.h
const char* c_source = "function foo() { return 0x1234; }; foo();";
Local<String> source = ::v8::String::New(c_source);
Local<Script> script = ::v8::Script::Compile(source);
CHECK_EQ(0x1234, script->Run()->Int32Value());
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment