Commit 9a4eaa72 authored by palfia@homejinni.com's avatar palfia@homejinni.com

MIPS: Reland and fix "Fix LoadFieldByIndex to take mutable heap-numbers into account."

Port r20358 (5e9b653d)

BUG=
R=plind44@gmail.com

Review URL: https://codereview.chromium.org/219163010

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@20369 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 5607582f
......@@ -5792,13 +5792,60 @@ void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
}
void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
Register result,
Register object,
Register index) {
PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
__ Push(object, index);
__ mov(cp, zero_reg);
__ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble);
RecordSafepointWithRegisters(
instr->pointer_map(), 2, Safepoint::kNoLazyDeopt);
__ StoreToSafepointRegisterSlot(v0, result);
}
void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode {
public:
DeferredLoadMutableDouble(LCodeGen* codegen,
LLoadFieldByIndex* instr,
Register result,
Register object,
Register index)
: LDeferredCode(codegen),
instr_(instr),
result_(result),
object_(object),
index_(index) {
}
virtual void Generate() V8_OVERRIDE {
codegen()->DoDeferredLoadMutableDouble(instr_, result_, object_, index_);
}
virtual LInstruction* instr() V8_OVERRIDE { return instr_; }
private:
LLoadFieldByIndex* instr_;
Register result_;
Register object_;
Register index_;
};
Register object = ToRegister(instr->object());
Register index = ToRegister(instr->index());
Register result = ToRegister(instr->result());
Register scratch = scratch0();
DeferredLoadMutableDouble* deferred;
deferred = new(zone()) DeferredLoadMutableDouble(
this, instr, result, object, index);
Label out_of_object, done;
__ And(scratch, index, Operand(Smi::FromInt(1)));
__ Branch(deferred->entry(), ne, scratch, Operand(zero_reg));
__ sra(index, index, 1);
__ Branch(USE_DELAY_SLOT, &out_of_object, lt, index, Operand(zero_reg));
__ sll(scratch, index, kPointerSizeLog2 - kSmiTagSize); // In delay slot.
......@@ -5814,6 +5861,7 @@ void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
__ Subu(scratch, result, scratch);
__ lw(result, FieldMemOperand(scratch,
FixedArray::kHeaderSize - kPointerSize));
__ bind(deferred->exit());
__ bind(&done);
}
......
......@@ -140,6 +140,10 @@ class LCodeGen: public LCodeGenBase {
Label* map_check);
void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
Register result,
Register object,
Register index);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
......
......@@ -2492,7 +2492,9 @@ LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
LOperand* object = UseRegister(instr->object());
LOperand* index = UseRegister(instr->index());
return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index));
LLoadFieldByIndex* load = new(zone()) LLoadFieldByIndex(object, index);
LInstruction* result = DefineSameAsFirst(load);
return AssignPointerMap(result);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment