Commit 0fb091fc authored by michael_dawson's avatar michael_dawson Committed by Commit bot

Contribution of PowerPC port (continuation of 422063005) - PPC dir update 2 - mark2

Rebase to latest and use branch created by git new-branch to get a patch that
only has the required changes.  Created new branch, reset to HEAD and then
cherry picked across the changes

	modified:   src/compiler/ppc/code-generator-ppc.cc
	modified:   src/compiler/ppc/instruction-selector-ppc.cc
	modified:   src/disassembler.cc
	modified:   src/ic/ppc/handler-compiler-ppc.cc
	modified:   src/ic/ppc/ic-compiler-ppc.cc
	modified:   src/ppc/assembler-ppc-inl.h
	modified:   src/ppc/assembler-ppc.cc
	modified:   src/ppc/assembler-ppc.h
	modified:   src/ppc/builtins-ppc.cc
	modified:   src/ppc/code-stubs-ppc.cc
	modified:   src/ppc/deoptimizer-ppc.cc
	modified:   src/ppc/disasm-ppc.cc
	modified:   src/ppc/full-codegen-ppc.cc
	modified:   src/ppc/macro-assembler-ppc.cc
	modified:   src/serialize.cc

R=danno@chromium.org, svenpanne@chromium.org

Review URL: https://codereview.chromium.org/935383002

Cr-Commit-Position: refs/heads/master@{#26762}
parent ef01ef07
......@@ -556,6 +556,14 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
AssembleArchJump(i.InputRpo(0));
DCHECK_EQ(LeaveRC, i.OutputRCBit());
break;
case kArchLookupSwitch:
AssembleArchLookupSwitch(instr);
DCHECK_EQ(LeaveRC, i.OutputRCBit());
break;
case kArchTableSwitch:
AssembleArchTableSwitch(instr);
DCHECK_EQ(LeaveRC, i.OutputRCBit());
break;
case kArchNop:
// don't emit code for nops.
DCHECK_EQ(LeaveRC, i.OutputRCBit());
......@@ -1075,6 +1083,35 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr,
}
void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
PPCOperandConverter i(this, instr);
Register input = i.InputRegister(0);
for (size_t index = 2; index < instr->InputCount(); index += 2) {
__ Cmpi(input, Operand(i.InputInt32(static_cast<int>(index + 0))), r0);
__ beq(GetLabel(i.InputRpo(static_cast<int>(index + 1))));
}
AssembleArchJump(i.InputRpo(1));
}
void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
PPCOperandConverter i(this, instr);
Register input = i.InputRegister(0);
int32_t const case_count = static_cast<int32_t>(instr->InputCount() - 2);
Label** cases = zone()->NewArray<Label*>(case_count);
for (int32_t index = 0; index < case_count; ++index) {
cases[index] = GetLabel(i.InputRpo(index + 2));
}
Label* const table = AddJumpTable(cases, case_count);
__ Cmpli(input, Operand(case_count), r0);
__ bge(GetLabel(i.InputRpo(1)));
__ mov_label_addr(kScratchReg, table);
__ ShiftLeftImm(r0, input, Operand(kPointerSizeLog2));
__ LoadPX(kScratchReg, MemOperand(kScratchReg, r0));
__ Jump(kScratchReg);
}
void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) {
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
isolate(), deoptimization_id, Deoptimizer::LAZY);
......@@ -1084,10 +1121,9 @@ void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) {
void CodeGenerator::AssemblePrologue() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
int stack_slots = frame()->GetSpillSlotCount();
if (descriptor->kind() == CallDescriptor::kCallAddress) {
#if ABI_USES_FUNCTION_DESCRIPTORS
__ function_descriptor();
#endif
int register_save_area_size = 0;
RegList frame_saves = fp.bit();
__ mflr(r0);
......@@ -1114,12 +1150,11 @@ void CodeGenerator::AssemblePrologue() {
__ Prologue(info->IsCodePreAgingActive());
frame()->SetRegisterSaveAreaSize(
StandardFrameConstants::kFixedFrameSizeFromFp);
} else {
} else if (stack_slots > 0) {
__ StubPrologue();
frame()->SetRegisterSaveAreaSize(
StandardFrameConstants::kFixedFrameSizeFromFp);
}
int stack_slots = frame()->GetSpillSlotCount();
if (info()->is_osr()) {
// TurboFan OSR-compiled functions cannot be entered directly.
......@@ -1143,10 +1178,10 @@ void CodeGenerator::AssemblePrologue() {
void CodeGenerator::AssembleReturn() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
int stack_slots = frame()->GetSpillSlotCount();
if (descriptor->kind() == CallDescriptor::kCallAddress) {
if (frame()->GetRegisterSaveAreaSize() > 0) {
// Remove this frame's spill slots first.
int stack_slots = frame()->GetSpillSlotCount();
if (stack_slots > 0) {
__ Add(sp, sp, stack_slots * kPointerSize, r0);
}
......@@ -1162,12 +1197,14 @@ void CodeGenerator::AssembleReturn() {
}
__ LeaveFrame(StackFrame::MANUAL);
__ Ret();
} else {
} else if (descriptor->IsJSFunctionCall() || stack_slots > 0) {
int pop_count = descriptor->IsJSFunctionCall()
? static_cast<int>(descriptor->JSParameterCount())
: 0;
__ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize);
__ Ret();
} else {
__ Ret();
}
}
......@@ -1333,6 +1370,13 @@ void CodeGenerator::AssembleSwap(InstructionOperand* source,
}
void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
for (size_t index = 0; index < target_count; ++index) {
__ emit_label_addr(targets[index]);
}
}
void CodeGenerator::AddNopForSmiCodeInlining() {
// We do not insert nops for inlined Smi code.
}
......
......@@ -1235,6 +1235,67 @@ void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
}
void InstructionSelector::VisitSwitch(Node* node, BasicBlock* default_branch,
BasicBlock** case_branches,
int32_t* case_values, size_t case_count,
int32_t min_value, int32_t max_value) {
PPCOperandGenerator g(this);
InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
InstructionOperand default_operand = g.Label(default_branch);
// Note that {value_range} can be 0 if {min_value} is -2^31 and {max_value}
// is 2^31-1, so don't assume that it's non-zero below.
size_t value_range =
1u + bit_cast<uint32_t>(max_value) - bit_cast<uint32_t>(min_value);
// Determine whether to issue an ArchTableSwitch or an ArchLookupSwitch
// instruction.
size_t table_space_cost = 4 + value_range;
size_t table_time_cost = 3;
size_t lookup_space_cost = 3 + 2 * case_count;
size_t lookup_time_cost = case_count;
if (case_count > 0 &&
table_space_cost + 3 * table_time_cost <=
lookup_space_cost + 3 * lookup_time_cost &&
min_value > std::numeric_limits<int32_t>::min()) {
InstructionOperand index_operand = value_operand;
if (min_value) {
index_operand = g.TempRegister();
Emit(kPPC_Sub32, index_operand, value_operand,
g.TempImmediate(min_value));
}
size_t input_count = 2 + value_range;
auto* inputs = zone()->NewArray<InstructionOperand>(input_count);
inputs[0] = index_operand;
std::fill(&inputs[1], &inputs[input_count], default_operand);
for (size_t index = 0; index < case_count; ++index) {
size_t value = case_values[index] - min_value;
BasicBlock* branch = case_branches[index];
DCHECK_LE(0u, value);
DCHECK_LT(value + 2, input_count);
inputs[value + 2] = g.Label(branch);
}
Emit(kArchTableSwitch, 0, nullptr, input_count, inputs, 0, nullptr)
->MarkAsControl();
return;
}
// Generate a sequence of conditional jumps.
size_t input_count = 2 + case_count * 2;
auto* inputs = zone()->NewArray<InstructionOperand>(input_count);
inputs[0] = value_operand;
inputs[1] = default_operand;
for (size_t index = 0; index < case_count; ++index) {
int32_t value = case_values[index];
BasicBlock* branch = case_branches[index];
inputs[index * 2 + 2 + 0] = g.TempImmediate(value);
inputs[index * 2 + 2 + 1] = g.Label(branch);
}
Emit(kArchLookupSwitch, 0, nullptr, input_count, inputs, 0, nullptr)
->MarkAsControl();
}
void InstructionSelector::VisitWord32Equal(Node* const node) {
FlagsContinuation cont(kEqual, node);
Int32BinopMatcher m(node);
......
......@@ -85,14 +85,11 @@ static int DecodeIt(Isolate* isolate, std::ostream* os,
} else {
// No relocation information when printing code stubs.
}
#if !V8_TARGET_ARCH_PPC
int constants = -1; // no constants being decoded at the start
#endif
while (pc < end) {
// First decode instruction so that we know its length.
byte* prev_pc = pc;
#if !V8_TARGET_ARCH_PPC
if (constants > 0) {
SNPrintF(decode_buffer,
"%08x constant",
......@@ -121,25 +118,6 @@ static int DecodeIt(Isolate* isolate, std::ostream* os,
pc += d.InstructionDecode(decode_buffer, pc);
}
}
#else // !V8_TARGET_ARCH_PPC
#if ABI_USES_FUNCTION_DESCRIPTORS || V8_OOL_CONSTANT_POOL
// Function descriptors are specially decoded and skipped.
// Other internal references (load of ool constant pool pointer)
// are not since they are a encoded as a regular mov sequence.
int skip;
if (it != NULL && !it->done() && it->rinfo()->pc() == pc &&
it->rinfo()->rmode() == RelocInfo::INTERNAL_REFERENCE &&
(skip = Assembler::DecodeInternalReference(decode_buffer, pc))) {
pc += skip;
} else {
decode_buffer[0] = '\0';
pc += d.InstructionDecode(decode_buffer, pc);
}
#else
decode_buffer[0] = '\0';
pc += d.InstructionDecode(decode_buffer, pc);
#endif // ABI_USES_FUNCTION_DESCRIPTORS || V8_OOL_CONSTANT_POOL
#endif // !V8_TARGET_ARCH_PPC
// Collect RelocInfo for this instruction (prev_pc .. pc-1)
List<const char*> comments(4);
......
......@@ -17,9 +17,8 @@ namespace internal {
void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
MacroAssembler* masm, Handle<HeapType> type, Register receiver,
Register holder, int accessor_index, int expected_arguments,
Register scratch) {
MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
int accessor_index, int expected_arguments, Register scratch) {
// ----------- S t a t e -------------
// -- r3 : receiver
// -- r5 : name
......@@ -32,7 +31,7 @@ void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
DCHECK(!holder.is(scratch));
DCHECK(!receiver.is(scratch));
// Call the JavaScript getter with the receiver on the stack.
if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
if (map->IsJSGlobalObjectMap()) {
// Swap in the global receiver.
__ LoadP(scratch,
FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
......@@ -57,9 +56,8 @@ void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
MacroAssembler* masm, Handle<HeapType> type, Register receiver,
Register holder, int accessor_index, int expected_arguments,
Register scratch) {
MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
int accessor_index, int expected_arguments, Register scratch) {
// ----------- S t a t e -------------
// -- lr : return address
// -----------------------------------
......@@ -74,7 +72,7 @@ void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
DCHECK(!receiver.is(scratch));
DCHECK(!value().is(scratch));
// Call the JavaScript setter with receiver and value on the stack.
if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
if (map->IsJSGlobalObjectMap()) {
// Swap in the global receiver.
__ LoadP(scratch,
FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
......@@ -414,7 +412,7 @@ Register PropertyHandlerCompiler::CheckPrototypes(
Register object_reg, Register holder_reg, Register scratch1,
Register scratch2, Handle<Name> name, Label* miss,
PrototypeCheckType check) {
Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate()));
Handle<Map> receiver_map = map();
// Make sure there's no overlap between holder and object registers.
DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
......@@ -426,8 +424,8 @@ Register PropertyHandlerCompiler::CheckPrototypes(
int depth = 0;
Handle<JSObject> current = Handle<JSObject>::null();
if (type()->IsConstant()) {
current = Handle<JSObject>::cast(type()->AsConstant()->Value());
if (receiver_map->IsJSGlobalObjectMap()) {
current = isolate()->global_object();
}
Handle<JSObject> prototype = Handle<JSObject>::null();
Handle<Map> current_map = receiver_map;
......
......@@ -30,7 +30,7 @@ void PropertyICCompiler::GenerateRuntimeSetProperty(
#define __ ACCESS_MASM(masm())
Handle<Code> PropertyICCompiler::CompilePolymorphic(TypeHandleList* types,
Handle<Code> PropertyICCompiler::CompilePolymorphic(MapHandleList* maps,
CodeHandleList* handlers,
Handle<Name> name,
Code::StubType type,
......@@ -57,7 +57,7 @@ Handle<Code> PropertyICCompiler::CompilePolymorphic(TypeHandleList* types,
}
Label number_case;
Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
Label* smi_target = IncludesNumberMap(maps) ? &number_case : &miss;
__ JumpIfSmi(receiver(), smi_target);
// Polymorphic keyed stores may use the map register
......@@ -65,17 +65,16 @@ Handle<Code> PropertyICCompiler::CompilePolymorphic(TypeHandleList* types,
DCHECK(kind() != Code::KEYED_STORE_IC ||
map_reg.is(ElementTransitionAndStoreDescriptor::MapRegister()));
int receiver_count = types->length();
int receiver_count = maps->length();
int number_of_handled_maps = 0;
__ LoadP(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
for (int current = 0; current < receiver_count; ++current) {
Handle<HeapType> type = types->at(current);
Handle<Map> map = IC::TypeToMap(*type, isolate());
Handle<Map> map = maps->at(current);
if (!map->is_deprecated()) {
number_of_handled_maps++;
Handle<WeakCell> cell = Map::WeakCellForMap(map);
__ CmpWeakValue(map_reg, cell, scratch2());
if (type->Is(HeapType::Number())) {
if (map->instance_type() == HEAP_NUMBER_TYPE) {
DCHECK(!number_case.is_unused());
__ bind(&number_case);
}
......
......@@ -51,14 +51,11 @@ bool CpuFeatures::SupportsCrankshaft() { return true; }
void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
#if ABI_USES_FUNCTION_DESCRIPTORS || V8_OOL_CONSTANT_POOL
if (RelocInfo::IsInternalReference(rmode_)) {
if (IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_)) {
// absolute code pointer inside code object moves with the code object.
Assembler::RelocateInternalReference(pc_, delta, 0, icache_flush_mode);
Assembler::RelocateInternalReference(pc_, delta, 0, rmode_,
icache_flush_mode);
}
#endif
// We do not use pc relative addressing on PPC, so there is
// nothing else to do.
}
......
This diff is collapsed.
......@@ -637,6 +637,12 @@ class Assembler : public AssemblerBase {
// but it may be bound only once.
void bind(Label* L); // binds an unbound label L to the current code position
// Links a label at the current pc_offset(). If already bound, returns the
// bound position. If already linked, returns the position of the prior link.
// Otherwise, returns the current pc_offset().
int link(Label* L);
// Determines if Label is bound and near enough so that a single
// branch instruction can be used to reach it.
bool is_near(Label* L, Condition cond);
......@@ -644,7 +650,10 @@ class Assembler : public AssemblerBase {
// Returns the branch offset to the given label from the current code position
// Links the label to the current position if it is still unbound
// Manages the jump elimination optimization if the second parameter is true.
int branch_offset(Label* L, bool jump_elimination_allowed);
int branch_offset(Label* L, bool jump_elimination_allowed) {
int position = link(L);
return position - pc_offset();
}
// Puts a labels target address at the given position.
// The high 8 bits are set to zero.
......@@ -1076,11 +1085,21 @@ class Assembler : public AssemblerBase {
void cmplw(Register src1, Register src2, CRegister cr = cr7);
void mov(Register dst, const Operand& src);
void bitwise_mov(Register dst, intptr_t value);
void bitwise_mov32(Register dst, int32_t value);
// Load the position of the label relative to the generated code object
// pointer in a register.
void mov_label_offset(Register dst, Label* label);
// Load the address of the label in a register and associate with an
// internal reference relocation.
void mov_label_addr(Register dst, Label* label);
// Emit the address of the label (i.e. a jump table entry) and associate with
// an internal reference relocation.
void emit_label_addr(Label* label);
// Multiply instructions
void mul(Register dst, Register src1, Register src2, OEBit s = LeaveOE,
RCBit r = LeaveRC);
......@@ -1289,7 +1308,7 @@ class Assembler : public AssemblerBase {
// for inline tables, e.g., jump-tables.
void db(uint8_t data);
void dd(uint32_t data);
void emit_ptr(uintptr_t data);
void emit_ptr(intptr_t data);
PositionsRecorder* positions_recorder() { return &positions_recorder_; }
......@@ -1369,12 +1388,17 @@ class Assembler : public AssemblerBase {
}
#endif
#if ABI_USES_FUNCTION_DESCRIPTORS || V8_OOL_CONSTANT_POOL
static void RelocateInternalReference(
Address pc, intptr_t delta, Address code_start,
Address pc, intptr_t delta, Address code_start, RelocInfo::Mode rmode,
ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED);
static int DecodeInternalReference(Vector<char> buffer, Address pc);
#endif
void AddBoundInternalReference(int position) {
internal_reference_positions_.push_back(position);
}
void AddBoundInternalReferenceLoad(int position) {
internal_reference_load_positions_.push_back(position);
}
protected:
// Relocation for a type-recording IC has the AST id added to it. This
......@@ -1440,6 +1464,12 @@ class Assembler : public AssemblerBase {
static const int kMaxRelocSize = RelocInfoWriter::kMaxSize;
RelocInfoWriter reloc_info_writer;
// Internal reference positions, required for (potential) patching in
// GrowBuffer(); contains only those internal references whose labels
// are already bound.
std::deque<int> internal_reference_positions_;
std::deque<int> internal_reference_load_positions_;
// The bound position, before this we cannot do instruction elimination.
int last_bound_pos_;
......
......@@ -760,7 +760,9 @@ void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
// receiver is the hole.
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ Push(r7, ip);
// smi arguments count, new.target, receiver
__ Push(r7, r6, ip);
// Set up pointer to last argument.
__ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
......@@ -772,7 +774,8 @@ void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
// r7: number of arguments (smi-tagged)
// cr0: compare against zero of arguments
// sp[0]: receiver
// sp[1]: number of arguments (smi-tagged)
// sp[1]: new.target
// sp[2]: number of arguments (smi-tagged)
Label loop, no_args;
__ beq(&no_args, cr0);
__ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
......@@ -784,6 +787,23 @@ void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
__ bdnz(&loop);
__ bind(&no_args);
__ addi(r3, r3, Operand(1));
// Handle step in.
Label skip_step_in;
ExternalReference debug_step_in_fp =
ExternalReference::debug_step_in_fp_address(masm->isolate());
__ mov(r5, Operand(debug_step_in_fp));
__ LoadP(r5, MemOperand(r5));
__ and_(r0, r5, r5, SetRC);
__ beq(&skip_step_in, cr0);
__ Push(r3, r4, r4);
__ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
__ Pop(r3, r4);
__ bind(&skip_step_in);
// Call the function.
// r3: number of arguments
// r4: constructor function
......
......@@ -1070,22 +1070,11 @@ void CEntryStub::Generate(MacroAssembler* masm) {
// know where the return address is. The CEntryStub is unmovable, so
// we can store the address on the stack to be able to find it again and
// we never have to restore it, because it will not change.
// Compute the return address in lr to return to after the jump below. Pc is
// already at '+ 8' from the current instruction but return is after three
// instructions so add another 4 to pc to get the return address.
{
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm);
Label here;
__ b(&here, SetLK);
__ bind(&here);
__ mflr(r8);
// Constant used below is dependent on size of Call() macro instructions
__ addi(r0, r8, Operand(20));
__ StoreP(r0, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize));
__ Call(target);
}
Label after_call;
__ mov_label_addr(r0, &after_call);
__ StoreP(r0, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize));
__ Call(target);
__ bind(&after_call);
#if !ABI_RETURNS_OBJECT_PAIRS_IN_REGS
// If return value is on the stack, pop it to registers.
......@@ -1593,6 +1582,7 @@ void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
CHECK(!has_new_target());
// The displacement is the offset of the last parameter (if any)
// relative to the frame pointer.
const int kDisplacement =
......@@ -1653,6 +1643,8 @@ void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
// sp[1] : receiver displacement
// sp[2] : function
CHECK(!has_new_target());
// Check if the calling frame is an arguments adaptor frame.
Label runtime;
__ LoadP(r6, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
......@@ -1683,6 +1675,8 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
// r9 : allocated object (tagged)
// r11 : mapped parameter count (tagged)
CHECK(!has_new_target());
__ LoadP(r4, MemOperand(sp, 0 * kPointerSize));
// r4 = parameter count (tagged)
......@@ -1965,6 +1959,10 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Patch the arguments.length and the parameters pointer.
__ bind(&adaptor_frame);
__ LoadP(r4, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
if (has_new_target()) {
// Subtract 1 from smi-tagged arguments count.
__ SubSmiLiteral(r4, r4, Smi::FromInt(1), r0);
}
__ StoreP(r4, MemOperand(sp, 0));
__ SmiToPtrArrayOffset(r6, r4);
__ add(r6, r5, r6);
......@@ -2051,6 +2049,31 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
}
void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
// Stack layout on entry.
// sp[0] : index of rest parameter
// sp[4] : number of parameters
// sp[8] : receiver displacement
Label runtime;
__ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
__ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
__ bne(&runtime);
// Patch the arguments.length and the parameters pointer.
__ LoadP(r4, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ StoreP(r4, MemOperand(sp, 1 * kPointerSize));
__ SmiToPtrArrayOffset(r6, r4);
__ add(r6, r5, r6);
__ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset));
__ StoreP(r6, MemOperand(sp, 2 * kPointerSize));
__ bind(&runtime);
__ TailCallRuntime(Runtime::kNewRestParam, 3, 1);
}
void RegExpExecStub::Generate(MacroAssembler* masm) {
// Just jump directly to runtime if native RegExp is not selected at compile
// time or if regexp entry in generated code is turned off runtime switch or
......@@ -2760,7 +2783,13 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
}
// Pass function as original constructor.
__ mr(r6, r4);
if (IsSuperConstructorCall()) {
__ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
__ addi(r7, r7, Operand(kPointerSize));
__ LoadPX(r6, MemOperand(sp, r7));
} else {
__ mr(r6, r4);
}
// Jump to the function-specific construct stub.
Register jmp_reg = r7;
......
......@@ -172,6 +172,9 @@ void Deoptimizer::EntryGenerator::Generate() {
}
}
__ mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
__ StoreP(fp, MemOperand(ip));
const int kSavedRegistersAreaSize =
(kNumberOfRegisters * kPointerSize) + kDoubleRegsSize;
......
......@@ -988,6 +988,15 @@ int Decoder::InstructionDecode(byte* instr_ptr) {
out_buffer_pos_ += SNPrintF(out_buffer_ + out_buffer_pos_, "%08x ",
instr->InstructionBits());
#if ABI_USES_FUNCTION_DESCRIPTORS
// The first field will be identified as a jump table entry. We emit the rest
// of the structure as zero, so just skip past them.
if (instr->InstructionBits() == 0) {
Format(instr, "constant");
return Instruction::kInstrSize;
}
#endif
switch (instr->OpcodeValue() << 26) {
case TWI: {
PrintSoftwareInterrupt(instr->SvcValue());
......
......@@ -202,7 +202,7 @@ void FullCodeGenerator::Generate() {
bool need_write_barrier = true;
if (FLAG_harmony_scoping && info->scope()->is_script_scope()) {
__ push(r4);
__ Push(info->scope()->GetScopeInfo());
__ Push(info->scope()->GetScopeInfo(info->isolate()));
__ CallRuntime(Runtime::kNewScriptContext, 2);
} else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
FastNewContextStub stub(isolate(), heap_slots);
......@@ -245,6 +245,25 @@ void FullCodeGenerator::Generate() {
}
}
// Possibly allocate RestParameters
int rest_index;
Variable* rest_param = scope()->rest_parameter(&rest_index);
if (rest_param) {
Comment cmnt(masm_, "[ Allocate rest parameter array");
int num_parameters = info->scope()->num_parameters();
int offset = num_parameters * kPointerSize;
__ addi(r6, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
__ mov(r5, Operand(Smi::FromInt(num_parameters)));
__ mov(r4, Operand(Smi::FromInt(rest_index)));
__ Push(r6, r5, r4);
RestParamAccessStub stub(isolate());
__ CallStub(&stub);
SetVar(rest_param, r3, r4, r5);
}
Variable* arguments = scope()->arguments();
if (arguments != NULL) {
// Function uses arguments object.
......@@ -266,15 +285,19 @@ void FullCodeGenerator::Generate() {
// function, receiver address, parameter count.
// The stub will rewrite receiever and parameter count if the previous
// stack frame was an arguments adapter frame.
ArgumentsAccessStub::HasNewTarget has_new_target =
IsSubclassConstructor(info->function()->kind())
? ArgumentsAccessStub::HAS_NEW_TARGET
: ArgumentsAccessStub::NO_NEW_TARGET;
ArgumentsAccessStub::Type type;
if (is_strict(language_mode())) {
if (is_strict(language_mode()) || !is_simple_parameter_list()) {
type = ArgumentsAccessStub::NEW_STRICT;
} else if (function()->has_duplicate_parameters()) {
type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
} else {
type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
}
ArgumentsAccessStub stub(isolate(), type);
ArgumentsAccessStub stub(isolate(), type, has_new_target);
__ CallStub(&stub);
SetVar(arguments, r3, r4, r5);
......@@ -432,7 +455,11 @@ void FullCodeGenerator::EmitReturnSequence() {
// sequence.
{
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
int32_t arg_count = info_->scope()->num_parameters() + 1;
if (IsSubclassConstructor(info_->function()->kind())) {
arg_count++;
}
int32_t sp_delta = arg_count * kPointerSize;
CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
__ RecordJSReturn();
int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
......@@ -3044,8 +3071,7 @@ void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
}
void FullCodeGenerator::EmitLoadSuperConstructor(SuperReference* super_ref) {
DCHECK(super_ref != NULL);
void FullCodeGenerator::EmitLoadSuperConstructor() {
__ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ Push(r3);
__ CallRuntime(Runtime::kGetPrototype, 1);
......@@ -3236,20 +3262,13 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
SuperReference* super_ref = expr->expression()->AsSuperReference();
EmitLoadSuperConstructor(super_ref);
__ push(result_register());
Variable* this_var = super_ref->this_var()->var();
if (!ValidateSuperCall(expr)) return;
Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
GetVar(result_register(), new_target_var);
__ Push(result_register());
GetVar(r3, this_var);
__ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
Label uninitialized_this;
__ beq(&uninitialized_this);
__ mov(r3, Operand(this_var->name()));
__ push(r3);
__ CallRuntime(Runtime::kThrowReferenceError, 1);
__ bind(&uninitialized_this);
EmitLoadSuperConstructor();
__ push(result_register());
// Push the arguments ("left-to-right") on the stack.
ZoneList<Expression*>* args = expr->arguments();
......@@ -3279,12 +3298,24 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
__ Move(r5, FeedbackVector());
__ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackSlot()));
// TODO(dslomov): use a different stub and propagate new.target.
CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
__ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
__ Drop(1);
RecordJSReturnSite(expr);
SuperReference* super_ref = expr->expression()->AsSuperReference();
Variable* this_var = super_ref->this_var()->var();
GetVar(r4, this_var);
__ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
Label uninitialized_this;
__ beq(&uninitialized_this);
__ mov(r4, Operand(this_var->name()));
__ push(r4);
__ CallRuntime(Runtime::kThrowReferenceError, 1);
__ bind(&uninitialized_this);
EmitVariableAssignment(this_var, Token::INIT_CONST);
context()->Plug(r3);
}
......@@ -4153,6 +4184,59 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
}
void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
Variable* new_target_var = scope()->DeclarationScope()->new_target_var();
GetVar(result_register(), new_target_var);
__ Push(result_register());
EmitLoadSuperConstructor();
__ mr(r4, result_register());
__ Push(r4);
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, args_set_up, runtime;
__ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
__ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
__ beq(&adaptor_frame);
// default constructor has no arguments, so no adaptor frame means no args.
__ li(r3, Operand::Zero());
__ b(&args_set_up);
// Copy arguments from adaptor frame.
{
__ bind(&adaptor_frame);
__ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ SmiUntag(r3);
// Subtract 1 from arguments count, for new.target.
__ subi(r3, r3, Operand(1));
// Get arguments pointer in r5.
__ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
__ add(r5, r5, r0);
__ addi(r5, r5, Operand(StandardFrameConstants::kCallerSPOffset));
Label loop;
__ mtctr(r3);
__ bind(&loop);
// Pre-decrement in order to skip receiver.
__ LoadPU(r6, MemOperand(r5, -kPointerSize));
__ Push(r6);
__ bdnz(&loop);
}
__ bind(&args_set_up);
CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
__ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
__ Drop(1);
context()->Plug(result_register());
}
void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
RegExpConstructResultStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
......
......@@ -675,14 +675,15 @@ void MacroAssembler::LoadConstantPoolPointerRegister(
} else {
DCHECK(access_method == CONSTRUCT_INTERNAL_REFERENCE);
base = kConstantPoolRegister;
ConstantPoolUnavailableScope constant_pool_unavailable(this);
RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
// CheckBuffer() is called too frequently. This will pre-grow
// the buffer if needed to avoid spliting the relocation and instructions
EnsureSpaceFor(kMovInstructionsNoConstantPool * kInstrSize);
uintptr_t code_start = reinterpret_cast<uintptr_t>(pc_) - pc_offset();
mov(base, Operand(code_start, RelocInfo::INTERNAL_REFERENCE));
intptr_t code_start = reinterpret_cast<intptr_t>(pc_) - pc_offset();
AddBoundInternalReferenceLoad(pc_offset());
bitwise_mov(base, code_start);
}
LoadP(kConstantPoolRegister, MemOperand(base, constant_pool_offset));
}
......
......@@ -899,8 +899,7 @@ void Deserializer::ReadObject(int space_number, Object** write_back) {
DCHECK(space_number != CODE_SPACE);
}
#endif
#if V8_TARGET_ARCH_PPC && \
(ABI_USES_FUNCTION_DESCRIPTORS || V8_OOL_CONSTANT_POOL)
#if V8_TARGET_ARCH_PPC
// If we're on a platform that uses function descriptors
// these jump tables make use of RelocInfo::INTERNAL_REFERENCE.
// As the V8 serialization code doesn't handle that relocation type
......@@ -909,9 +908,10 @@ void Deserializer::ReadObject(int space_number, Object** write_back) {
Code* code = reinterpret_cast<Code*>(HeapObject::FromAddress(address));
for (RelocIterator it(code); !it.done(); it.next()) {
RelocInfo::Mode rmode = it.rinfo()->rmode();
if (rmode == RelocInfo::INTERNAL_REFERENCE) {
if (RelocInfo::IsInternalReference(rmode) ||
RelocInfo::IsInternalReferenceEncoded(rmode)) {
Assembler::RelocateInternalReference(it.rinfo()->pc(), 0,
code->instruction_start());
code->instruction_start(), rmode);
}
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment