Commit 1e08a785 authored by sgjesse@chromium.org's avatar sgjesse@chromium.org

ARM: Align full codegen with IA32 version

Use ToBooleanStub in DoTest instead of runtime call.
Try allocation in generated code in VisitRegExpLiteral.
Use TranscendentalCacheStub for sin, cos and log instead of calling runtime.

Minor "alignment" of the full codegen code across platforms.
Review URL: http://codereview.chromium.org/6597003

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@6953 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent f149e4cd
......@@ -574,13 +574,28 @@ void FullCodeGenerator::TestContext::Plug(bool flag) const {
void FullCodeGenerator::DoTest(Label* if_true,
Label* if_false,
Label* fall_through) {
// Call the runtime to find the boolean value of the source and then
// translate it into control flow to the pair of labels.
__ push(result_register());
__ CallRuntime(Runtime::kToBool, 1);
// Emit the inlined tests assumed by the stub.
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(result_register(), ip);
__ b(eq, if_false);
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(r0, ip);
Split(eq, if_true, if_false, fall_through);
__ cmp(result_register(), ip);
__ b(eq, if_true);
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(result_register(), ip);
__ b(eq, if_false);
STATIC_ASSERT(kSmiTag == 0);
__ tst(result_register(), result_register());
__ b(eq, if_false);
__ JumpIfSmi(result_register(), if_true);
// Call the ToBoolean stub for all other cases.
ToBooleanStub stub(result_register());
__ CallStub(&stub);
__ tst(result_register(), result_register());
// The stub returns nonzero for true.
Split(ne, if_true, if_false, fall_through);
}
......@@ -796,9 +811,9 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
Comment cmnt(masm_, "[ SwitchStatement");
Breakable nested_statement(this, stmt);
SetStatementPosition(stmt);
// Keep the switch value on the stack until a case matches.
VisitForStackValue(stmt->tag());
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
ZoneList<CaseClause*>* clauses = stmt->cases();
......@@ -1062,8 +1077,14 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
bool pretenure) {
// Use the fast case closure allocation code that allocates in new
// space for nested functions that don't need literals cloning.
if (scope()->is_function_scope() &&
// space for nested functions that don't need literals cloning. If
// we're running with the --always-opt or the --prepare-always-opt
// flag, we need to use the runtime function so that the new function
// we are creating here gets a chance to have its code optimized and
// doesn't just get a copy of the existing unoptimized code.
if (!FLAG_always_opt &&
!FLAG_prepare_always_opt &&
scope()->is_function_scope() &&
info->num_literals() == 0 &&
!pretenure) {
FastNewClosureStub stub;
......@@ -1317,18 +1338,19 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
Comment cmnt(masm_, "[ RegExpLiteral");
Label materialized;
// Registers will be used as follows:
// r5 = materialized value (RegExp literal)
// r4 = JS function, literals array
// r3 = literal index
// r2 = RegExp pattern
// r1 = RegExp flags
// r0 = temp + materialized value (RegExp literal)
// r0 = RegExp literal clone
__ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
int literal_offset =
FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
__ ldr(r0, FieldMemOperand(r4, literal_offset));
__ ldr(r5, FieldMemOperand(r4, literal_offset));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r0, ip);
__ cmp(r5, ip);
__ b(ne, &materialized);
// Create regexp literal using runtime function.
......@@ -1338,20 +1360,27 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
__ mov(r1, Operand(expr->flags()));
__ Push(r4, r3, r2, r1);
__ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
__ mov(r5, r0);
__ bind(&materialized);
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
__ push(r0);
Label allocated, runtime_allocate;
__ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
__ jmp(&allocated);
__ bind(&runtime_allocate);
__ push(r5);
__ mov(r0, Operand(Smi::FromInt(size)));
__ push(r0);
__ CallRuntime(Runtime::kAllocateInNewSpace, 1);
__ pop(r5);
__ bind(&allocated);
// After this, registers are used as follows:
// r0: Newly allocated regexp.
// r1: Materialized regexp.
// r5: Materialized regexp.
// r2: temp.
__ pop(r1);
__ CopyFields(r0, r1, r2.bit(), size / kPointerSize);
__ CopyFields(r0, r5, r2.bit(), size / kPointerSize);
context()->Plug(r0);
}
......@@ -3186,37 +3215,40 @@ void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) {
void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
// Load the argument on the stack and call the runtime.
// Load the argument on the stack and call the stub.
TranscendentalCacheStub stub(TranscendentalCache::SIN);
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallRuntime(Runtime::kMath_sin, 1);
__ CallStub(&stub);
context()->Plug(r0);
}
void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
// Load the argument on the stack and call the runtime.
// Load the argument on the stack and call the stub.
TranscendentalCacheStub stub(TranscendentalCache::COS);
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallRuntime(Runtime::kMath_cos, 1);
__ CallStub(&stub);
context()->Plug(r0);
}
void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
// Load the argument on the stack and call the runtime function.
void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
// Load the argument on the stack and call the stub.
TranscendentalCacheStub stub(TranscendentalCache::LOG);
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallRuntime(Runtime::kMath_sqrt, 1);
__ CallStub(&stub);
context()->Plug(r0);
}
void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
// Load the argument on the stack and call the runtime function.
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallRuntime(Runtime::kMath_log, 1);
__ CallRuntime(Runtime::kMath_sqrt, 1);
context()->Plug(r0);
}
......@@ -3375,8 +3407,14 @@ void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
if (FLAG_debug_code) {
__ AbortIfNotString(r0);
}
__ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
__ IndexFromHash(r0, r0);
context()->Plug(r0);
}
......@@ -3538,9 +3576,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
UnaryOverwriteMode overwrite =
can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
GenericUnaryOpStub stub(Token::SUB,
overwrite,
NO_UNARY_FLAGS);
GenericUnaryOpStub stub(Token::SUB, overwrite, NO_UNARY_FLAGS);
// GenericUnaryOpStub expects the argument to be in the
// accumulator register r0.
VisitForAccumulatorValue(expr->expression());
......@@ -4050,11 +4086,43 @@ Register FullCodeGenerator::context_register() {
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
ASSERT(mode == RelocInfo::CODE_TARGET ||
mode == RelocInfo::CODE_TARGET_CONTEXT);
switch (ic->kind()) {
case Code::LOAD_IC:
__ IncrementCounter(&Counters::named_load_full, 1, r1, r2);
break;
case Code::KEYED_LOAD_IC:
__ IncrementCounter(&Counters::keyed_load_full, 1, r1, r2);
break;
case Code::STORE_IC:
__ IncrementCounter(&Counters::named_store_full, 1, r1, r2);
break;
case Code::KEYED_STORE_IC:
__ IncrementCounter(&Counters::keyed_store_full, 1, r1, r2);
default:
break;
}
__ Call(ic, mode);
}
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
switch (ic->kind()) {
case Code::LOAD_IC:
__ IncrementCounter(&Counters::named_load_full, 1, r1, r2);
break;
case Code::KEYED_LOAD_IC:
__ IncrementCounter(&Counters::keyed_load_full, 1, r1, r2);
break;
case Code::STORE_IC:
__ IncrementCounter(&Counters::named_store_full, 1, r1, r2);
break;
case Code::KEYED_STORE_IC:
__ IncrementCounter(&Counters::keyed_store_full, 1, r1, r2);
default:
break;
}
__ Call(ic, RelocInfo::CODE_TARGET);
if (patch_site != NULL && patch_site->is_bound()) {
patch_site->EmitPatchInfo();
......
......@@ -548,7 +548,7 @@ void FullCodeGenerator::DoTest(Label* if_true,
__ j(equal, if_true);
__ cmp(result_register(), Factory::false_value());
__ j(equal, if_false);
ASSERT_EQ(0, kSmiTag);
STATIC_ASSERT(kSmiTag == 0);
__ test(result_register(), Operand(result_register()));
__ j(zero, if_false);
__ test(result_register(), Immediate(kSmiTagMask));
......@@ -655,6 +655,7 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
ASSERT(variable != NULL); // Must have been resolved.
Slot* slot = variable->AsSlot();
Property* prop = variable->AsProperty();
if (slot != NULL) {
switch (slot->type()) {
case Slot::PARAMETER:
......@@ -814,7 +815,6 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
EmitCallIC(ic, &patch_site);
__ test(eax, Operand(eax));
__ j(not_equal, &next_test);
__ Drop(1); // Switch value is no longer needed.
......@@ -2390,7 +2390,9 @@ void FullCodeGenerator::VisitCall(Call* expr) {
Literal* key = prop->key()->AsLiteral();
if (key != NULL && key->handle()->IsSymbol()) {
// Call to a named property, use call IC.
VisitForStackValue(prop->obj());
{ PreservePositionScope scope(masm()->positions_recorder());
VisitForStackValue(prop->obj());
}
EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
} else {
// Call to a keyed property.
......@@ -3401,7 +3403,6 @@ void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
if (FLAG_debug_code) {
......@@ -3791,6 +3792,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
Label* if_true = NULL;
Label* if_false = NULL;
Label* fall_through = NULL;
// Notice that the labels are swapped.
context()->PrepareTest(&materialize_true, &materialize_false,
&if_false, &if_true, &fall_through);
......@@ -4386,6 +4388,22 @@ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
switch (ic->kind()) {
case Code::LOAD_IC:
__ IncrementCounter(&Counters::named_load_full, 1);
break;
case Code::KEYED_LOAD_IC:
__ IncrementCounter(&Counters::keyed_load_full, 1);
break;
case Code::STORE_IC:
__ IncrementCounter(&Counters::named_store_full, 1);
break;
case Code::KEYED_STORE_IC:
__ IncrementCounter(&Counters::keyed_store_full, 1);
default:
break;
}
__ call(ic, RelocInfo::CODE_TARGET);
if (patch_site != NULL && patch_site->is_bound()) {
patch_site->EmitPatchInfo();
......
......@@ -552,7 +552,7 @@ void FullCodeGenerator::DoTest(Label* if_true,
__ j(equal, if_true);
__ CompareRoot(result_register(), Heap::kFalseValueRootIndex);
__ j(equal, if_false);
ASSERT_EQ(0, kSmiTag);
STATIC_ASSERT(kSmiTag == 0);
__ SmiCompare(result_register(), Smi::FromInt(0));
__ j(equal, if_false);
Condition is_smi = masm_->CheckSmi(result_register());
......@@ -1032,8 +1032,14 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
bool pretenure) {
// Use the fast case closure allocation code that allocates in new
// space for nested functions that don't need literals cloning.
if (scope()->is_function_scope() &&
// space for nested functions that don't need literals cloning. If
// we're running with the --always-opt or the --prepare-always-opt
// flag, we need to use the runtime function so that the new function
// we are creating here gets a chance to have its code optimized and
// doesn't just get a copy of the existing unoptimized code.
if (!FLAG_always_opt &&
!FLAG_prepare_always_opt &&
scope()->is_function_scope() &&
info->num_literals() == 0 &&
!pretenure) {
FastNewClosureStub stub;
......@@ -3085,9 +3091,12 @@ void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
if (FLAG_debug_code) {
__ AbortIfNotString(rax);
}
__ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
ASSERT(String::kHashShift >= kSmiTagSize);
__ IndexFromHash(rax, rax);
......@@ -3800,6 +3809,22 @@ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
switch (ic->kind()) {
case Code::LOAD_IC:
__ IncrementCounter(&Counters::named_load_full, 1);
break;
case Code::KEYED_LOAD_IC:
__ IncrementCounter(&Counters::keyed_load_full, 1);
break;
case Code::STORE_IC:
__ IncrementCounter(&Counters::named_store_full, 1);
break;
case Code::KEYED_STORE_IC:
__ IncrementCounter(&Counters::keyed_store_full, 1);
default:
break;
}
__ call(ic, RelocInfo::CODE_TARGET);
if (patch_site != NULL && patch_site->is_bound()) {
patch_site->EmitPatchInfo();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment