Commit 1c9542eb authored by palfia@homejinni.com's avatar palfia@homejinni.com

MIPS: Compile FastCloneShallowObjectStub using Crankshaft.

Port r13732 (686b0a45)

Original commit message:
This changes FastCloneShallowObjectStub to be compiled independent of
the target architecture. It also adds tracing to the deoptimizer for
compiled stubs and contains some minor bugfixes.

BUG=

Review URL: https://codereview.chromium.org/12389070

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@13802 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 590a3f88
......@@ -39,6 +39,18 @@ namespace v8 {
namespace internal {
void FastCloneShallowObjectStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
static Register registers[] = { a3, a2, a1, a0 };
descriptor->register_param_count_ = 4;
descriptor->register_params_ = registers;
descriptor->stack_parameter_count_ = NULL;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry;
}
void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
......@@ -498,49 +510,6 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
}
void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) {
// Stack layout on entry:
//
// [sp]: object literal flags.
// [sp + kPointerSize]: constant properties.
// [sp + (2 * kPointerSize)]: literal index.
// [sp + (3 * kPointerSize)]: literals array.
// Load boilerplate object into a3 and check if we need to create a
// boilerplate.
Label slow_case;
__ lw(a3, MemOperand(sp, 3 * kPointerSize));
__ lw(a0, MemOperand(sp, 2 * kPointerSize));
__ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
__ Addu(a3, t0, a3);
__ lw(a3, MemOperand(a3));
__ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
__ Branch(&slow_case, eq, a3, Operand(t0));
// Check that the boilerplate contains only fast properties and we can
// statically determine the instance size.
int size = JSObject::kHeaderSize + length_ * kPointerSize;
__ lw(a0, FieldMemOperand(a3, HeapObject::kMapOffset));
__ lbu(a0, FieldMemOperand(a0, Map::kInstanceSizeOffset));
__ Branch(&slow_case, ne, a0, Operand(size >> kPointerSizeLog2));
// Allocate the JS object and copy header together with all in-object
// properties from the boilerplate.
__ AllocateInNewSpace(size, v0, a1, a2, &slow_case, TAG_OBJECT);
for (int i = 0; i < size; i += kPointerSize) {
__ lw(a1, FieldMemOperand(a3, i));
__ sw(a1, FieldMemOperand(v0, i));
}
// Return and remove the on-stack parameters.
__ DropAndRet(4);
__ bind(&slow_case);
__ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1);
}
// Takes a Smi and converts to an IEEE 64 bit floating point value in two
// registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and
// 52 fraction bits (20 in the first word, 32 in the second). Zeros is a
......
......@@ -349,8 +349,8 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
}
void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator,
int frame_index) {
void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
int frame_index) {
//
// FROM TO
// | .... | | .... |
......@@ -363,9 +363,9 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator,
// v +-------------------------+ +-------------------------|
// | COMPILED_STUB marker | | STUB_FAILURE marker |
// +-------------------------+ +-------------------------+
// | | | caller args.length_ |
// | ... | +-------------------------+
// | | | caller args.arguments_ |
// | ... | +-------------------------+
// | | | caller args.length_ |
// |-------------------------|<-sp +-------------------------+
// | caller args pointer |
// +-------------------------+
......@@ -387,58 +387,77 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator,
isolate_->code_stub_interface_descriptor(major_key);
// The output frame must have room for all pushed register parameters
// and the standard stack frame slots.
int output_frame_size = StandardFrameConstants::kFixedFrameSize +
kPointerSize * descriptor->register_param_count_;
// and the standard stack frame slots. Include space for an argument
// object to the callee and optionally the space to pass the argument
// object to the stub failure handler.
int height_in_bytes = kPointerSize * descriptor->register_param_count_ +
sizeof(Arguments) + kPointerSize;
int fixed_frame_size = StandardFrameConstants::kFixedFrameSize;
int input_frame_size = input_->GetFrameSize();
int output_frame_size = height_in_bytes + fixed_frame_size;
if (trace_) {
PrintF(" translating %s => StubFailureTrampolineStub, height=%d\n",
CodeStub::MajorName(static_cast<CodeStub::Major>(major_key), false),
height_in_bytes);
}
// Include space for an argument object to the callee and optionally
// the space to pass the argument object to the stub failure handler.
output_frame_size += sizeof(Arguments) + kPointerSize;
// The stub failure trampoline is a single frame.
FrameDescription* output_frame =
new(output_frame_size) FrameDescription(output_frame_size, 0);
new(output_frame_size) FrameDescription(output_frame_size, NULL);
output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE);
ASSERT(frame_index == 0);
output_[frame_index] = output_frame;
Code* notify_failure =
isolate_->builtins()->builtin(Builtins::kNotifyStubFailure);
output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS));
output_frame->SetContinuation(
reinterpret_cast<intptr_t>(notify_failure->entry()));
Code* trampoline = NULL;
int extra = descriptor->extra_expression_stack_count_;
StubFailureTrampolineStub(extra).FindCodeInCache(&trampoline, isolate_);
ASSERT(trampoline != NULL);
output_frame->SetPc(reinterpret_cast<intptr_t>(
trampoline->instruction_start()));
unsigned input_frame_size = input_->GetFrameSize();
intptr_t frame_ptr = input_->GetRegister(fp.code());
// The top address for the output frame can be computed from the input
// frame pointer and the output frame's height. Subtract space for the
// context and function slots.
intptr_t top_address = input_->GetRegister(fp.code()) - (2 * kPointerSize) -
height_in_bytes;
output_frame->SetTop(top_address);
// JSFunction continuation
// Read caller's PC (JSFunction continuation) from the input frame.
intptr_t input_frame_offset = input_frame_size - kPointerSize;
intptr_t output_frame_offset = output_frame_size - kPointerSize;
intptr_t value = input_->GetFrameSlot(input_frame_offset);
output_frame->SetFrameSlot(output_frame_offset, value);
if (trace_) {
PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
top_address + output_frame_offset, output_frame_offset, value);
}
// saved frame ptr
// Read caller's FP from the input frame, and set this frame's FP.
input_frame_offset -= kPointerSize;
value = input_->GetFrameSlot(input_frame_offset);
output_frame_offset -= kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, value);
intptr_t frame_ptr = input_->GetRegister(fp.code());
output_frame->SetRegister(fp.code(), frame_ptr);
output_frame->SetFp(frame_ptr);
if (trace_) {
PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
top_address + output_frame_offset, output_frame_offset, value);
}
// Restore context
// The context can be gotten from the input frame.
input_frame_offset -= kPointerSize;
value = input_->GetFrameSlot(input_frame_offset);
output_frame->SetRegister(cp.code(), value);
output_frame_offset -= kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, value);
if (trace_) {
PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
top_address + output_frame_offset, output_frame_offset, value);
}
// Internal frame markers
// A marker value is used in place of the function.
output_frame_offset -= kPointerSize;
value = reinterpret_cast<intptr_t>(
Smi::FromInt(StackFrame::STUB_FAILURE_TRAMPOLINE));
output_frame->SetFrameSlot(output_frame_offset, value);
if (trace_) {
PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function (stub fail sentinel)\n",
top_address + output_frame_offset, output_frame_offset, value);
}
int caller_arg_count = 0;
if (descriptor->stack_parameter_count_ != NULL) {
......@@ -451,15 +470,27 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator,
value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
(caller_arg_count - 1) * kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, value);
if (trace_) {
PrintF(" 0x%08x: [top + %d] <- 0x%08x ; args.arguments\n",
top_address + output_frame_offset, output_frame_offset, value);
}
output_frame->SetFrameSlot(output_frame_offset, value);
output_frame_offset -= kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, caller_arg_count);
value = caller_arg_count;
output_frame->SetFrameSlot(output_frame_offset, value);
if (trace_) {
PrintF(" 0x%08x: [top + %d] <- 0x%08x ; args.length\n",
top_address + output_frame_offset, output_frame_offset, value);
}
value = frame_ptr - (output_frame_size - output_frame_offset) -
StandardFrameConstants::kMarkerOffset;
output_frame_offset -= kPointerSize;
value = frame_ptr - (output_frame_size - output_frame_offset) -
StandardFrameConstants::kMarkerOffset + kPointerSize;
output_frame->SetFrameSlot(output_frame_offset, value);
if (trace_) {
PrintF(" 0x%08x: [top + %d] <- 0x%08x ; args*\n",
top_address + output_frame_offset, output_frame_offset, value);
}
// Copy the register parameters to the failure frame.
for (int i = 0; i < descriptor->register_param_count_; ++i) {
......@@ -467,14 +498,13 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator,
DoTranslateCommand(iterator, 0, output_frame_offset);
}
ASSERT(0 == output_frame_offset);
for (int i = 0; i < DoubleRegister::kMaxNumRegisters; ++i) {
double double_value = input_->GetDoubleRegister(i);
output_frame->SetDoubleRegister(i, double_value);
}
output_frame->SetRegister(fp.code(), frame_ptr);
output_frame->SetFp(frame_ptr);
ApiFunction function(descriptor->deoptimization_handler_);
ExternalReference xref(&function, ExternalReference::BUILTIN_CALL, isolate_);
intptr_t handler = reinterpret_cast<intptr_t>(xref.address());
......@@ -485,6 +515,19 @@ void Deoptimizer::DoCompiledStubFrame(TranslationIterator* iterator,
output_frame->SetRegister(s0.code(), params);
output_frame->SetRegister(s1.code(), (params - 1) * kPointerSize);
output_frame->SetRegister(s2.code(), handler);
// Compute this frame's PC, state, and continuation.
Code* trampoline = NULL;
int extra = descriptor->extra_expression_stack_count_;
StubFailureTrampolineStub(extra).FindCodeInCache(&trampoline, isolate_);
ASSERT(trampoline != NULL);
output_frame->SetPc(reinterpret_cast<intptr_t>(
trampoline->instruction_start()));
output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS));
Code* notify_failure =
isolate_->builtins()->builtin(Builtins::kNotifyStubFailure);
output_frame->SetContinuation(
reinterpret_cast<intptr_t>(notify_failure->entry()));
}
......
......@@ -1587,7 +1587,7 @@ void FullCodeGenerator::EmitAccessor(Expression* expression) {
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Comment cmnt(masm_, "[ ObjectLiteral");
Handle<FixedArray> constant_properties = expr->constant_properties();
__ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
__ li(a2, Operand(Smi::FromInt(expr->literal_index())));
__ li(a1, Operand(constant_properties));
......@@ -1598,12 +1598,13 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
? ObjectLiteral::kHasFunction
: ObjectLiteral::kNoFlags;
__ li(a0, Operand(Smi::FromInt(flags)));
__ Push(a3, a2, a1, a0);
int properties_count = constant_properties->length() / 2;
if (expr->depth() > 1) {
__ Push(a3, a2, a1, a0);
__ CallRuntime(Runtime::kCreateObjectLiteral, 4);
} else if (flags != ObjectLiteral::kFastElements ||
} else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements ||
properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
__ Push(a3, a2, a1, a0);
__ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
} else {
FastCloneShallowObjectStub stub(properties_count);
......
......@@ -2496,6 +2496,14 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
}
void LCodeGen::DoInstanceSize(LInstanceSize* instr) {
Register object = ToRegister(instr->object());
Register result = ToRegister(instr->result());
__ lw(result, FieldMemOperand(object, HeapObject::kMapOffset));
__ lbu(result, FieldMemOperand(result, Map::kInstanceSizeOffset));
}
void LCodeGen::DoCmpT(LCmpT* instr) {
Token::Value op = instr->op();
......@@ -5275,26 +5283,31 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
DeferredAllocate* deferred =
new(zone()) DeferredAllocate(this, instr);
Register size = ToRegister(instr->size());
Register result = ToRegister(instr->result());
Register scratch = ToRegister(instr->temp1());
Register scratch2 = ToRegister(instr->temp2());
HAllocate* original_instr = instr->hydrogen();
if (original_instr->size()->IsConstant()) {
UNREACHABLE();
// Allocate memory for the object.
AllocationFlags flags = TAG_OBJECT;
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
}
if (instr->size()->IsConstantOperand()) {
int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
__ AllocateInNewSpace(size,
result,
scratch,
scratch2,
deferred->entry(),
flags);
} else {
// Allocate memory for the object.
AllocationFlags flags = TAG_OBJECT;
if (original_instr->MustAllocateDoubleAligned()) {
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
}
Register size = ToRegister(instr->size());
__ AllocateInNewSpace(size,
result,
scratch,
scratch2,
deferred->entry(),
TAG_OBJECT);
flags);
}
__ bind(deferred->exit());
......@@ -5545,21 +5558,22 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
instr->hydrogen()->constant_properties();
// Set up the parameters to the stub/runtime call.
__ LoadHeapObject(t0, literals);
__ li(a3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
__ li(a2, Operand(constant_properties));
__ LoadHeapObject(a3, literals);
__ li(a2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
__ li(a1, Operand(constant_properties));
int flags = instr->hydrogen()->fast_elements()
? ObjectLiteral::kFastElements
: ObjectLiteral::kNoFlags;
__ li(a1, Operand(Smi::FromInt(flags)));
__ Push(t0, a3, a2, a1);
__ li(a0, Operand(Smi::FromInt(flags)));
// Pick the right runtime function or stub to call.
int properties_count = constant_properties->length() / 2;
if (instr->hydrogen()->depth() > 1) {
__ Push(a3, a2, a1, a0);
CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
} else if (flags != ObjectLiteral::kFastElements ||
properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
__ Push(a3, a2, a1, a0);
CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
} else {
FastCloneShallowObjectStub stub(properties_count);
......
......@@ -971,6 +971,12 @@ LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
}
LInstruction* LChunkBuilder::DoInstanceSize(HInstanceSize* instr) {
LOperand* object = UseRegisterAtStart(instr->object());
return DefineAsRegister(new(zone()) LInstanceSize(object));
}
LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
LOperand* receiver = UseRegisterAtStart(instr->receiver());
LOperand* function = UseRegisterAtStart(instr->function());
......
......@@ -108,6 +108,7 @@ class LCodeGen;
V(In) \
V(InstanceOf) \
V(InstanceOfKnownGlobal) \
V(InstanceSize) \
V(InstructionGap) \
V(Integer32ToDouble) \
V(Uint32ToDouble) \
......@@ -959,6 +960,19 @@ class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 1, 1> {
};
class LInstanceSize: public LTemplateInstruction<1, 1, 0> {
public:
explicit LInstanceSize(LOperand* object) {
inputs_[0] = object;
}
LOperand* object() { return inputs_[0]; }
DECLARE_CONCRETE_INSTRUCTION(InstanceSize, "instance-size")
DECLARE_HYDROGEN_ACCESSOR(InstanceSize)
};
class LBoundsCheck: public LTemplateInstruction<0, 2, 0> {
public:
LBoundsCheck(LOperand* index, LOperand* length) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment