Commit a3465838 authored by michael_dawson's avatar michael_dawson Committed by Commit bot

Contribution of PowerPC port (continuation of 422063005) - currency

Contribution of PowerPC port (continuation of 422063005, 817143002,
866843003, and 901083004. This patch updates the ppc directories
to make them current with changes in common code, removes the
optimization to use the ool constant pool, and excludes tests that
don't pass under the ppc simulator given a 240s timeout.

Subsequent patches will cover:
   - remaining optimizations for PPC
   - remaining AIX changes not resolved by 4.8 compiler (4.8 is only recently available for AIX)
   - incremental updates required to ppc directories due to platform specific changes made
    in google repos while we complete the above steps.

	modified:   src/compiler/ppc/code-generator-ppc.cc
	modified:   src/ic/ppc/handler-compiler-ppc.cc
	modified:   src/ppc/assembler-ppc-inl.h
	modified:   src/ppc/assembler-ppc.cc
	modified:   src/ppc/assembler-ppc.h
	modified:   src/ppc/builtins-ppc.cc
	modified:   src/ppc/code-stubs-ppc.cc
	modified:   src/ppc/debug-ppc.cc
	modified:   src/ppc/deoptimizer-ppc.cc
	modified:   src/ppc/frames-ppc.cc
	modified:   src/ppc/frames-ppc.h
	modified:   src/ppc/full-codegen-ppc.cc
	modified:   src/ppc/lithium-codegen-ppc.cc
	modified:   src/ppc/lithium-ppc.cc
	modified:   src/ppc/lithium-ppc.h
	modified:   src/ppc/macro-assembler-ppc.cc
	modified:   src/ppc/macro-assembler-ppc.h
	modified:   test/cctest/cctest.status
	modified:   test/mjsunit/mjsunit.status

R=danno@chromium.org, svenpanne@chromium.org

BUG=

Review URL: https://codereview.chromium.org/965823002

Cr-Commit-Position: refs/heads/master@{#26951}
parent 11d97bf5
......@@ -52,7 +52,7 @@ class PPCOperandConverter FINAL : public InstructionOperandConverter {
return false;
}
Operand InputImmediate(int index) {
Operand InputImmediate(size_t index) {
Constant constant = ToConstant(instr_->InputAt(index));
switch (constant.type()) {
case Constant::kInt32:
......@@ -76,8 +76,8 @@ class PPCOperandConverter FINAL : public InstructionOperandConverter {
return Operand::Zero();
}
MemOperand MemoryOperand(AddressingMode* mode, int* first_index) {
const int index = *first_index;
MemOperand MemoryOperand(AddressingMode* mode, size_t* first_index) {
const size_t index = *first_index;
*mode = AddressingModeField::decode(instr_->opcode());
switch (*mode) {
case kMode_None:
......@@ -93,7 +93,7 @@ class PPCOperandConverter FINAL : public InstructionOperandConverter {
return MemOperand(r0);
}
MemOperand MemoryOperand(AddressingMode* mode, int first_index = 0) {
MemOperand MemoryOperand(AddressingMode* mode, size_t first_index = 0) {
return MemoryOperand(mode, &first_index);
}
......@@ -109,7 +109,7 @@ class PPCOperandConverter FINAL : public InstructionOperandConverter {
};
static inline bool HasRegisterInput(Instruction* instr, int index) {
static inline bool HasRegisterInput(Instruction* instr, size_t index) {
return instr->InputAt(index)->IsRegister();
}
......@@ -369,7 +369,7 @@ Condition FlagsConditionToCondition(FlagsCondition condition) {
#define ASSEMBLE_STORE_FLOAT(asm_instr, asm_instrx) \
do { \
int index = 0; \
size_t index = 0; \
AddressingMode mode = kMode_None; \
MemOperand operand = i.MemoryOperand(&mode, &index); \
DoubleRegister value = i.InputDoubleRegister(index); \
......@@ -384,7 +384,7 @@ Condition FlagsConditionToCondition(FlagsCondition condition) {
#define ASSEMBLE_STORE_INTEGER(asm_instr, asm_instrx) \
do { \
int index = 0; \
size_t index = 0; \
AddressingMode mode = kMode_None; \
MemOperand operand = i.MemoryOperand(&mode, &index); \
Register value = i.InputRegister(index); \
......@@ -401,8 +401,9 @@ Condition FlagsConditionToCondition(FlagsCondition condition) {
#define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr, asm_instrx, width) \
do { \
DoubleRegister result = i.OutputDoubleRegister(); \
size_t index = 0; \
AddressingMode mode = kMode_None; \
MemOperand operand = i.MemoryOperand(&mode, 0); \
MemOperand operand = i.MemoryOperand(&mode, index); \
DCHECK_EQ(kMode_MRR, mode); \
Register offset = operand.rb(); \
__ extsw(offset, offset); \
......@@ -427,8 +428,9 @@ Condition FlagsConditionToCondition(FlagsCondition condition) {
#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr, asm_instrx) \
do { \
Register result = i.OutputRegister(); \
size_t index = 0; \
AddressingMode mode = kMode_None; \
MemOperand operand = i.MemoryOperand(&mode, 0); \
MemOperand operand = i.MemoryOperand(&mode, index); \
DCHECK_EQ(kMode_MRR, mode); \
Register offset = operand.rb(); \
__ extsw(offset, offset); \
......@@ -453,8 +455,9 @@ Condition FlagsConditionToCondition(FlagsCondition condition) {
#define ASSEMBLE_CHECKED_STORE_FLOAT(asm_instr, asm_instrx) \
do { \
Label done; \
size_t index = 0; \
AddressingMode mode = kMode_None; \
MemOperand operand = i.MemoryOperand(&mode, 0); \
MemOperand operand = i.MemoryOperand(&mode, index); \
DCHECK_EQ(kMode_MRR, mode); \
Register offset = operand.rb(); \
__ extsw(offset, offset); \
......@@ -479,8 +482,9 @@ Condition FlagsConditionToCondition(FlagsCondition condition) {
#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr, asm_instrx) \
do { \
Label done; \
size_t index = 0; \
AddressingMode mode = kMode_None; \
MemOperand operand = i.MemoryOperand(&mode, 0); \
MemOperand operand = i.MemoryOperand(&mode, index); \
DCHECK_EQ(kMode_MRR, mode); \
Register offset = operand.rb(); \
__ extsw(offset, offset); \
......@@ -1087,8 +1091,8 @@ void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
PPCOperandConverter i(this, instr);
Register input = i.InputRegister(0);
for (size_t index = 2; index < instr->InputCount(); index += 2) {
__ Cmpi(input, Operand(i.InputInt32(static_cast<int>(index + 0))), r0);
__ beq(GetLabel(i.InputRpo(static_cast<int>(index + 1))));
__ Cmpi(input, Operand(i.InputInt32(index + 0)), r0);
__ beq(GetLabel(i.InputRpo(index + 1)));
}
AssembleArchJump(i.InputRpo(1));
}
......@@ -1127,16 +1131,8 @@ void CodeGenerator::AssemblePrologue() {
int register_save_area_size = 0;
RegList frame_saves = fp.bit();
__ mflr(r0);
#if V8_OOL_CONSTANT_POOL
__ Push(r0, fp, kConstantPoolRegister);
// Adjust FP to point to saved FP.
__ subi(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset));
register_save_area_size += kPointerSize;
frame_saves |= kConstantPoolRegister.bit();
#else
__ Push(r0, fp);
__ mr(fp, sp);
#endif
// Save callee-saved registers.
const RegList saves = descriptor->CalleeSavedRegisters() & ~frame_saves;
for (int i = Register::kNumRegisters - 1; i >= 0; i--) {
......@@ -1187,9 +1183,6 @@ void CodeGenerator::AssembleReturn() {
}
// Restore registers.
RegList frame_saves = fp.bit();
#if V8_OOL_CONSTANT_POOL
frame_saves |= kConstantPoolRegister.bit();
#endif
const RegList saves = descriptor->CalleeSavedRegisters() & ~frame_saves;
if (saves != 0) {
__ MultiPop(saves);
......
......@@ -25,7 +25,7 @@ void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
// -- lr : return address
// -----------------------------------
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
if (accessor_index >= 0) {
DCHECK(!holder.is(scratch));
......@@ -62,7 +62,7 @@ void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
// -- lr : return address
// -----------------------------------
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
// Save value register, so we can restore it later.
__ push(value());
......@@ -617,7 +617,7 @@ void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup(
// Save necessary data before invoking an interceptor.
// Requires a frame to make GC aware of pushed pointers.
{
FrameAndConstantPoolScope frame_scope(masm(), StackFrame::INTERNAL);
FrameScope frame_scope(masm(), StackFrame::INTERNAL);
if (must_preserve_receiver_reg) {
__ Push(receiver(), holder_reg, this->name());
} else {
......@@ -669,11 +669,20 @@ void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) {
Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
Handle<JSObject> object, Handle<Name> name, int accessor_index) {
Handle<JSObject> object, Handle<Name> name,
Handle<ExecutableAccessorInfo> callback) {
Register holder_reg = Frontend(name);
__ Push(receiver(), holder_reg); // receiver
__ LoadSmiLiteral(ip, Smi::FromInt(accessor_index));
// If the callback cannot leak, then push the callback directly,
// otherwise wrap it in a weak cell.
if (callback->data()->IsUndefined() || callback->data()->IsSmi()) {
__ mov(ip, Operand(callback));
} else {
Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback);
__ mov(ip, Operand(cell));
}
__ push(ip);
__ mov(ip, Operand(name));
__ Push(ip, value());
......
......@@ -69,14 +69,6 @@ Address RelocInfo::target_address_address() {
DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) ||
rmode_ == EMBEDDED_OBJECT || rmode_ == EXTERNAL_REFERENCE);
#if V8_OOL_CONSTANT_POOL
if (Assembler::IsConstantPoolLoadStart(pc_)) {
// We return the PC for ool constant pool since this function is used by the
// serializerer and expects the address to reside within the code object.
return reinterpret_cast<Address>(pc_);
}
#endif
// Read the address of the word containing the target_address in an
// instruction stream.
// The only architecture-independent user of this function is the serializer.
......@@ -91,13 +83,8 @@ Address RelocInfo::target_address_address() {
Address RelocInfo::constant_pool_entry_address() {
#if V8_OOL_CONSTANT_POOL
return Assembler::target_constant_pool_address_at(pc_,
host_->constant_pool());
#else
UNREACHABLE();
return NULL;
#endif
}
......@@ -131,22 +118,12 @@ Address Assembler::target_address_from_return_address(Address pc) {
// mtlr ip
// blrl
// @ return address
#if V8_OOL_CONSTANT_POOL
if (IsConstantPoolLoadEnd(pc - 3 * kInstrSize)) {
return pc - (kMovInstructionsConstantPool + 2) * kInstrSize;
}
#endif
return pc - (kMovInstructionsNoConstantPool + 2) * kInstrSize;
return pc - (kMovInstructions + 2) * kInstrSize;
}
Address Assembler::return_address_from_call_start(Address pc) {
#if V8_OOL_CONSTANT_POOL
Address load_address = pc + (kMovInstructionsConstantPool - 1) * kInstrSize;
if (IsConstantPoolLoadEnd(load_address))
return pc + (kMovInstructionsConstantPool + 2) * kInstrSize;
#endif
return pc + (kMovInstructionsNoConstantPool + 2) * kInstrSize;
return pc + (kMovInstructions + 2) * kInstrSize;
}
......@@ -224,13 +201,8 @@ void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode write_barrier_mode,
}
#if V8_OOL_CONSTANT_POOL
static const int kNoCodeAgeInstructions = 7;
#else
static const int kNoCodeAgeInstructions = 6;
#endif
static const int kCodeAgingInstructions =
Assembler::kMovInstructionsNoConstantPool + 3;
static const int kCodeAgingInstructions = Assembler::kMovInstructions + 3;
static const int kNoCodeAgeSequenceInstructions =
((kNoCodeAgeInstructions >= kCodeAgingInstructions)
? kNoCodeAgeInstructions
......@@ -456,61 +428,12 @@ Address Assembler::target_address_at(Address pc,
(instr2 & kImm16Mask));
#endif
}
#if V8_OOL_CONSTANT_POOL
return Memory::Address_at(target_constant_pool_address_at(pc, constant_pool));
#else
DCHECK(false);
return (Address)0;
#endif
}
#if V8_OOL_CONSTANT_POOL
bool Assembler::IsConstantPoolLoadStart(Address pc) {
#if V8_TARGET_ARCH_PPC64
if (!IsLi(instr_at(pc))) return false;
pc += kInstrSize;
#endif
return GetRA(instr_at(pc)).is(kConstantPoolRegister);
}
bool Assembler::IsConstantPoolLoadEnd(Address pc) {
#if V8_TARGET_ARCH_PPC64
pc -= kInstrSize;
#endif
return IsConstantPoolLoadStart(pc);
}
int Assembler::GetConstantPoolOffset(Address pc) {
DCHECK(IsConstantPoolLoadStart(pc));
Instr instr = instr_at(pc);
int offset = SIGN_EXT_IMM16((instr & kImm16Mask));
return offset;
}
void Assembler::SetConstantPoolOffset(Address pc, int offset) {
DCHECK(IsConstantPoolLoadStart(pc));
DCHECK(is_int16(offset));
Instr instr = instr_at(pc);
instr &= ~kImm16Mask;
instr |= (offset & kImm16Mask);
instr_at_put(pc, instr);
UNREACHABLE();
return NULL;
}
Address Assembler::target_constant_pool_address_at(
Address pc, ConstantPoolArray* constant_pool) {
Address addr = reinterpret_cast<Address>(constant_pool);
DCHECK(addr);
addr += GetConstantPoolOffset(pc);
return addr;
}
#endif
// This sets the branch destination (which gets loaded at the call address).
// This is for calls and branches within generated code. The serializer
// has already deserialized the mov instructions etc.
......@@ -575,14 +498,9 @@ void Assembler::set_target_address_at(Address pc,
CpuFeatures::FlushICache(p, 2 * kInstrSize);
}
#endif
} else {
#if V8_OOL_CONSTANT_POOL
Memory::Address_at(target_constant_pool_address_at(pc, constant_pool)) =
target;
#else
UNREACHABLE();
#endif
return;
}
UNREACHABLE();
}
}
} // namespace v8::internal
......
This diff is collapsed.
This diff is collapsed.
......@@ -232,7 +232,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
__ push(function); // Preserve the function.
__ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
__ push(r3);
__ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
}
......@@ -252,7 +252,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
__ bind(&gc_required);
__ IncrementCounter(counters->string_ctor_gc_required(), 1, r6, r7);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
__ push(argument);
__ CallRuntime(Runtime::kNewStringWrapper, 1);
}
......@@ -262,7 +262,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
static void CallRuntimePassFunction(MacroAssembler* masm,
Runtime::FunctionId function_id) {
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
// Push function as parameter to the runtime call.
__ Push(r4, r4);
......@@ -353,7 +353,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Enter a construct frame.
{
FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
FrameScope scope(masm, StackFrame::CONSTRUCT);
if (create_memento) {
__ AssertUndefinedOrAllocationSite(r5, r7);
......@@ -752,7 +752,7 @@ void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
CHECK(!FLAG_pretenuring_call_new);
{
FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
FrameScope scope(masm, StackFrame::CONSTRUCT);
// Smi-tagged arguments count.
__ mr(r7, r3);
......@@ -916,7 +916,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
// Push function as parameter to the runtime call.
__ Push(r4, r4);
......@@ -1027,7 +1027,7 @@ void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
SaveFPRegsMode save_doubles) {
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
// Preserve registers across notification, this is important for compiled
// stubs that tail call the runtime on deopts passing their parameters in
......@@ -1056,7 +1056,7 @@ void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
Deoptimizer::BailoutType type) {
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
// Pass the function and deoptimization type to the runtime system.
__ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
__ push(r3);
......@@ -1104,7 +1104,7 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
// Lookup the function in the JavaScript frame.
__ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
// Pass function as argument.
__ push(r3);
__ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
......@@ -1122,12 +1122,8 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
// <deopt_data> = <code>[#deoptimization_data_offset]
__ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
#if V8_OOL_CONSTANT_POOL
{
ConstantPoolUnavailableScope constant_pool_unavailable(masm);
__ LoadP(kConstantPoolRegister,
FieldMemOperand(r3, Code::kConstantPoolOffset));
#endif
__ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
// Load the OSR entrypoint offset from the deoptimization data.
// <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
......@@ -1136,17 +1132,13 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
DeoptimizationInputData::kOsrPcOffsetIndex)));
__ SmiUntag(r4);
// Compute the target address = code_obj + header_size + osr_offset
// <entry_addr> = <code_obj> + #header_size + <osr_offset>
__ add(r3, r3, r4);
__ addi(r0, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
__ mtlr(r0);
// Compute the target address = code start + osr_offset
__ add(r0, r3, r4);
// And "return" to the OSR entry point of the function.
__ Ret();
#if V8_OOL_CONSTANT_POOL
__ mtlr(r0);
__ blr();
}
#endif
}
......@@ -1157,7 +1149,7 @@ void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
__ cmpl(sp, ip);
__ bge(&ok);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
__ CallRuntime(Runtime::kStackGuard, 0);
}
__ Jump(masm->isolate()->builtins()->OnStackReplacement(),
......@@ -1248,7 +1240,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
{
// Enter an internal frame in order to preserve argument count.
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r3);
__ Push(r3, r5);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
......@@ -1381,7 +1373,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
const int kFunctionOffset = 4 * kPointerSize;
{
FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
FrameScope frame_scope(masm, StackFrame::INTERNAL);
__ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function
__ push(r3);
......@@ -1563,11 +1555,7 @@ static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
__ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ mflr(r0);
__ push(r0);
#if V8_OOL_CONSTANT_POOL
__ Push(fp, kConstantPoolRegister, r7, r4, r3);
#else
__ Push(fp, r7, r4, r3);
#endif
__ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
kPointerSize));
}
......
......@@ -110,7 +110,7 @@ void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
int param_count = descriptor.GetEnvironmentParameterCount();
{
// Call the runtime system in a fresh internal frame.
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
DCHECK(param_count == 0 ||
r3.is(descriptor.GetEnvironmentParameterRegister(param_count - 1)));
// Push arguments
......@@ -1184,11 +1184,6 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
// r7: argv
__ li(r0, Operand(-1)); // Push a bad frame pointer to fail if it is used.
__ push(r0);
#if V8_OOL_CONSTANT_POOL
__ mov(kConstantPoolRegister,
Operand(isolate()->factory()->empty_constant_pool_array()));
__ push(kConstantPoolRegister);
#endif
int marker = type();
__ LoadSmiLiteral(r0, Smi::FromInt(marker));
__ push(r0);
......@@ -1336,14 +1331,10 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
const Register scratch = r5;
Register scratch3 = no_reg;
// delta = mov + unaligned LoadP + cmp + bne
#if V8_TARGET_ARCH_PPC64
// delta = mov + tagged LoadP + cmp + bne
const int32_t kDeltaToLoadBoolResult =
(Assembler::kMovInstructions + 4) * Assembler::kInstrSize;
#else
const int32_t kDeltaToLoadBoolResult =
(Assembler::kMovInstructions + 3) * Assembler::kInstrSize;
#endif
(Assembler::kMovInstructions + Assembler::kTaggedLoadInstructions + 2) *
Assembler::kInstrSize;
Label slow, loop, is_instance, is_not_instance, not_js_object;
......@@ -1503,7 +1494,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
} else {
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(r3, r4);
__ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
}
......@@ -2585,7 +2576,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// Create an AllocationSite if we don't already have it, store it in the
// slot.
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
// Arguments register must be smi-tagged to call out.
__ SmiTag(r3);
......@@ -2671,7 +2662,7 @@ static void EmitSlowCase(MacroAssembler* masm, int argc, Label* non_function) {
static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
// Wrap the receiver and patch it back onto the stack.
{
FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
FrameScope frame_scope(masm, StackFrame::INTERNAL);
__ Push(r4, r6);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ pop(r4);
......@@ -2988,7 +2979,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
// r6 - slot
// r4 - function
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
CreateWeakCellStub create_stub(masm->isolate());
__ Push(r4);
__ CallStub(&create_stub);
......@@ -3016,7 +3007,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
void CallICStub::GenerateMiss(MacroAssembler* masm) {
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
// Push the function and feedback info.
__ Push(r4, r5, r6);
......@@ -3974,7 +3965,7 @@ void CompareICStub::GenerateMiss(MacroAssembler* masm) {
ExternalReference miss =
ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(r4, r3);
__ Push(r4, r3);
__ LoadSmiLiteral(r0, Smi::FromInt(op()));
......
......@@ -108,7 +108,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
RegList object_regs,
RegList non_object_regs) {
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
FrameScope scope(masm, StackFrame::INTERNAL);
// Load padding words on stack.
__ LoadSmiLiteral(ip, Smi::FromInt(LiveEdit::kFramePaddingValue));
......@@ -317,8 +317,7 @@ void DebugCodegen::GenerateFrameDropperLiveEdit(MacroAssembler* masm) {
__ LoadP(r4, MemOperand(fp, StandardFrameConstants::kConstantPoolOffset -
kPointerSize));
// Pop return address, frame and constant pool pointer (if
// FLAG_enable_ool_constant_pool).
// Pop return address and frame
__ LeaveFrame(StackFrame::INTERNAL);
// Load context from the function.
......
......@@ -356,13 +356,8 @@ void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) {
void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) {
#if V8_OOL_CONSTANT_POOL
DCHECK(FLAG_enable_ool_constant_pool);
SetFrameSlot(offset, value);
#else
// No out-of-line constant pool support.
UNREACHABLE();
#endif
}
......
......@@ -21,38 +21,22 @@ namespace internal {
Register JavaScriptFrame::fp_register() { return v8::internal::fp; }
Register JavaScriptFrame::context_register() { return cp; }
Register JavaScriptFrame::constant_pool_pointer_register() {
#if V8_OOL_CONSTANT_POOL
DCHECK(FLAG_enable_ool_constant_pool);
return kConstantPoolRegister;
#else
UNREACHABLE();
return no_reg;
#endif
}
Register StubFailureTrampolineFrame::fp_register() { return v8::internal::fp; }
Register StubFailureTrampolineFrame::context_register() { return cp; }
Register StubFailureTrampolineFrame::constant_pool_pointer_register() {
#if V8_OOL_CONSTANT_POOL
DCHECK(FLAG_enable_ool_constant_pool);
return kConstantPoolRegister;
#else
UNREACHABLE();
return no_reg;
#endif
}
Object*& ExitFrame::constant_pool_slot() const {
#if V8_OOL_CONSTANT_POOL
DCHECK(FLAG_enable_ool_constant_pool);
const int offset = ExitFrameConstants::kConstantPoolOffset;
return Memory::Object_at(fp() + offset);
#else
UNREACHABLE();
return Memory::Object_at(NULL);
#endif
}
}
} // namespace v8::internal
......
......@@ -57,15 +57,8 @@ const int kNumCalleeSaved = 18;
// Number of registers for which space is reserved in safepoints. Must be a
// multiple of 8.
// TODO(regis): Only 8 registers may actually be sufficient. Revisit.
const int kNumSafepointRegisters = 32;
// Define the list of registers actually saved at safepoints.
// Note that the number of saved registers may be smaller than the reserved
// space, i.e. kNumSafepointSavedRegisters <= kNumSafepointRegisters.
const RegList kSafepointSavedRegisters = kJSCallerSaved | kCalleeSaved;
const int kNumSafepointSavedRegisters = kNumJSCallerSaved + kNumCalleeSaved;
// The following constants describe the stack frame linkage area as
// defined by the ABI. Note that kNumRequiredStackFrameSlots must
// satisfy alignment requirements (rounding up if required).
......@@ -123,13 +116,8 @@ class EntryFrameConstants : public AllStatic {
class ExitFrameConstants : public AllStatic {
public:
#if V8_OOL_CONSTANT_POOL
static const int kFrameSize = 3 * kPointerSize;
static const int kConstantPoolOffset = -3 * kPointerSize;
#else
static const int kFrameSize = 2 * kPointerSize;
static const int kConstantPoolOffset = 0; // Not used.
#endif
static const int kCodeOffset = -2 * kPointerSize;
static const int kSPOffset = -1 * kPointerSize;
......
......@@ -467,9 +467,7 @@ void FullCodeGenerator::EmitReturnSequence() {
// With 64bit we may need nop() instructions to ensure we have
// enough space to SetDebugBreakAtReturn()
if (is_int16(sp_delta)) {
#if !V8_OOL_CONSTANT_POOL
masm_->nop();
#endif
masm_->nop();
}
#endif
......@@ -2294,13 +2292,7 @@ void FullCodeGenerator::EmitGeneratorResume(
Label slow_resume;
__ bne(&slow_resume, cr0);
__ LoadP(ip, FieldMemOperand(r7, JSFunction::kCodeEntryOffset));
#if V8_OOL_CONSTANT_POOL
{
ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
// Load the new code object's constant pool pointer.
__ LoadP(kConstantPoolRegister,
MemOperand(ip, Code::kConstantPoolOffset - Code::kHeaderSize));
#endif
__ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
__ SmiUntag(r5);
__ add(ip, ip, r5);
......@@ -2310,9 +2302,7 @@ void FullCodeGenerator::EmitGeneratorResume(
r0);
__ Jump(ip);
__ bind(&slow_resume);
#if V8_OOL_CONSTANT_POOL
}
#endif
} else {
__ beq(&call_resume, cr0);
}
......@@ -3783,8 +3773,9 @@ void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
// Check if the constructor in the map is a JS function.
__ LoadP(r3, FieldMemOperand(r3, Map::kConstructorOffset));
__ CompareObjectType(r3, r4, r4, JS_FUNCTION_TYPE);
Register instance_type = r5;
__ GetMapConstructor(r3, r3, r4, instance_type);
__ cmpi(instance_type, Operand(JS_FUNCTION_TYPE));
__ bne(&non_function_constructor);
// r3 now contains the constructor function. Grab the
......
......@@ -110,7 +110,7 @@ bool LCodeGen::GeneratePrologue() {
// r4: Callee's JS function.
// cp: Callee's context.
// pp: Callee's constant pool pointer (if FLAG_enable_ool_constant_pool)
// pp: Callee's constant pool pointer (if enabled)
// fp: Caller's frame pointer.
// lr: Caller's pc.
// ip: Our own function entry (required by the prologue)
......@@ -942,12 +942,6 @@ void LCodeGen::RecordSafepoint(LPointerMap* pointers, Safepoint::Kind kind,
safepoint.DefinePointerRegister(ToRegister(pointer), zone());
}
}
#if V8_OOL_CONSTANT_POOL
if (kind & Safepoint::kWithRegisters) {
// Register always contains a pointer to the constant pool.
safepoint.DefinePointerRegister(kConstantPoolRegister, zone());
}
#endif
}
......@@ -2788,10 +2782,11 @@ void LCodeGen::EmitClassOfTest(Label* is_true, Label* is_false,
// Now we are in the FIRST-LAST_NONCALLABLE_SPEC_OBJECT_TYPE range.
// Check if the constructor in the map is a function.
__ LoadP(temp, FieldMemOperand(temp, Map::kConstructorOffset));
Register instance_type = ip;
__ GetMapConstructor(temp, temp, temp2, instance_type);
// Objects with a non-function constructor have class 'Object'.
__ CompareObjectType(temp, temp2, temp2, JS_FUNCTION_TYPE);
__ cmpi(instance_type, Operand(JS_FUNCTION_TYPE));
if (class_name->IsOneByteEqualTo(STATIC_CHAR_VECTOR("Object"))) {
__ bne(is_true);
} else {
......@@ -3094,7 +3089,8 @@ void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
EmitVectorLoadICRegisters<LLoadGlobalGeneric>(instr);
}
ContextualMode mode = instr->for_typeof() ? NOT_CONTEXTUAL : CONTEXTUAL;
Handle<Code> ic = CodeFactory::LoadIC(isolate(), mode).code();
Handle<Code> ic = CodeFactory::LoadICInOptimizedCode(isolate(), mode,
PREMONOMORPHIC).code();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
}
......@@ -3236,7 +3232,9 @@ void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
if (FLAG_vector_ics) {
EmitVectorLoadICRegisters<LLoadNamedGeneric>(instr);
}
Handle<Code> ic = CodeFactory::LoadIC(isolate(), NOT_CONTEXTUAL).code();
Handle<Code> ic = CodeFactory::LoadICInOptimizedCode(
isolate(), NOT_CONTEXTUAL,
instr->hydrogen()->initialization_state()).code();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
}
......@@ -3591,7 +3589,9 @@ void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
EmitVectorLoadICRegisters<LLoadKeyedGeneric>(instr);
}
Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
Handle<Code> ic =
CodeFactory::KeyedLoadICInOptimizedCode(
isolate(), instr->hydrogen()->initialization_state()).code();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
}
......@@ -4530,7 +4530,9 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
__ mov(StoreDescriptor::NameRegister(), Operand(instr->name()));
Handle<Code> ic = StoreIC::initialize_stub(isolate(), instr->language_mode());
Handle<Code> ic =
StoreIC::initialize_stub(isolate(), instr->language_mode(),
instr->hydrogen()->initialization_state());
CallCode(ic, RelocInfo::CODE_TARGET, instr);
}
......@@ -4794,8 +4796,9 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
DCHECK(ToRegister(instr->key()).is(StoreDescriptor::NameRegister()));
DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
Handle<Code> ic =
CodeFactory::KeyedStoreIC(isolate(), instr->language_mode()).code();
Handle<Code> ic = CodeFactory::KeyedStoreICInOptimizedCode(
isolate(), instr->language_mode(),
instr->hydrogen()->initialization_state()).code();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
}
......@@ -5520,6 +5523,7 @@ void LCodeGen::DoCheckValue(LCheckValue* instr) {
void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
Register temp = ToRegister(instr->temp());
{
PushSafepointRegistersScope scope(this);
__ push(object);
......@@ -5527,9 +5531,9 @@ void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
__ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance);
RecordSafepointWithRegisters(instr->pointer_map(), 1,
Safepoint::kNoLazyDeopt);
__ StoreToSafepointRegisterSlot(r3, scratch0());
__ StoreToSafepointRegisterSlot(r3, temp);
}
__ TestIfSmi(scratch0(), r0);
__ TestIfSmi(temp, r0);
DeoptimizeIf(eq, instr, Deoptimizer::kInstanceMigrationFailed, cr0);
}
......@@ -5561,17 +5565,14 @@ void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
return;
}
Register map_reg = scratch0();
LOperand* input = instr->value();
DCHECK(input->IsRegister());
Register reg = ToRegister(input);
Register object = ToRegister(instr->value());
Register map_reg = ToRegister(instr->temp());
__ LoadP(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
__ LoadP(map_reg, FieldMemOperand(object, HeapObject::kMapOffset));
DeferredCheckMaps* deferred = NULL;
if (instr->hydrogen()->HasMigrationTarget()) {
deferred = new (zone()) DeferredCheckMaps(this, instr, reg);
deferred = new (zone()) DeferredCheckMaps(this, instr, object);
__ bind(deferred->check_maps());
}
......
......@@ -2029,7 +2029,9 @@ LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
if (instr->IsStabilityCheck()) return new (zone()) LCheckMaps;
LOperand* value = UseRegisterAtStart(instr->value());
LInstruction* result = AssignEnvironment(new (zone()) LCheckMaps(value));
LOperand* temp = TempRegister();
LInstruction* result =
AssignEnvironment(new (zone()) LCheckMaps(value, temp));
if (instr->HasMigrationTarget()) {
info()->MarkAsDeferredCalling();
result = AssignPointerMap(result);
......
......@@ -2319,11 +2319,15 @@ class LCheckInstanceType FINAL : public LTemplateInstruction<0, 1, 0> {
};
class LCheckMaps FINAL : public LTemplateInstruction<0, 1, 0> {
class LCheckMaps FINAL : public LTemplateInstruction<0, 1, 1> {
public:
explicit LCheckMaps(LOperand* value = NULL) { inputs_[0] = value; }
explicit LCheckMaps(LOperand* value = NULL, LOperand* temp = NULL) {
inputs_[0] = value;
temps_[0] = temp;
}
LOperand* value() { return inputs_[0]; }
LOperand* temp() { return temps_[0]; }
DECLARE_CONCRETE_INSTRUCTION(CheckMaps, "check-maps")
DECLARE_HYDROGEN_ACCESSOR(CheckMaps)
......
This diff is collapsed.
......@@ -102,9 +102,7 @@ class MacroAssembler : public Assembler {
MacroAssembler(Isolate* isolate, void* buffer, int size);
// Returns the size of a call in instructions. Note, the value returned is
// only valid as long as no entries are added to the constant pool between
// checking the call size and emitting the actual call.
// Returns the size of a call in instructions.
static int CallSize(Register target);
int CallSize(Address target, RelocInfo::Mode rmode, Condition cond = al);
static int CallSizeNotPredictableCodeSize(Address target,
......@@ -684,6 +682,11 @@ class MacroAssembler : public Assembler {
// ---------------------------------------------------------------------------
// Support functions.
// Machine code version of Map::GetConstructor().
// |temp| holds |result|'s map when done, and |temp2| its instance type.
void GetMapConstructor(Register result, Register map, Register temp,
Register temp2);
// Try to get function prototype of a function and puts the value in
// the result register. Checks that the function really is a
// function and jumps to the miss label if the fast checks fail. The
......@@ -1361,7 +1364,7 @@ class MacroAssembler : public Assembler {
// ---------------------------------------------------------------------------
// Patching helpers.
// Retrieve/patch the relocated value (lis/ori pair or constant pool load).
// Retrieve/patch the relocated value (lis/ori pair).
void GetRelocatedValue(Register location, Register result, Register scratch);
void SetRelocatedValue(Register location, Register scratch,
Register new_value);
......@@ -1485,18 +1488,14 @@ class MacroAssembler : public Assembler {
// it. See the implementation for register usage.
void JumpToHandlerEntry();
static const RegList kSafepointSavedRegisters;
static const int kNumSafepointSavedRegisters;
// Compute memory operands for safepoint stack slots.
static int SafepointRegisterStackIndex(int reg_code);
MemOperand SafepointRegisterSlot(Register reg);
MemOperand SafepointRegistersAndDoublesSlot(Register reg);
#if V8_OOL_CONSTANT_POOL
// Loads the constant pool pointer (kConstantPoolRegister).
enum CodeObjectAccessMethod { CAN_USE_IP, CONSTRUCT_INTERNAL_REFERENCE };
void LoadConstantPoolPointerRegister(CodeObjectAccessMethod access_method,
int ip_code_entry_delta = 0);
#endif
bool generating_stub_;
bool has_frame_;
// This handle will be patched with the code object on installation.
......
......@@ -401,5 +401,20 @@
['arch == ppc64', {
#issue 2857
'test-log/EquivalenceOfLoggingAndTraversal' : [SKIP],
#test has assumption that does not hold for larger PPC page sizes
'test-heap/FirstPageFitsStartup' : [SKIP],
}], # 'arch == ppc64'
##############################################################################
['arch == ppc and simulator_run == True or arch == ppc64 and simulator_run == True', {
# Pass but take too long with the simulator.
'test-api/Threading1': [SKIP],
'test-api/Threading2': [SKIP],
'test-api/ExternalArrays': [SKIP],
# isses to be investigated
'test-run-machops/RunWord64EqualInBranchP': [SKIP],
}], # 'arch == ppc64 and simulator_run == True'
]
......@@ -588,4 +588,15 @@
# Deopt every n garbage collections collides with deopt every n times.
'regress/regress-2653': [SKIP],
}], # 'deopt_fuzzer == True'
##############################################################################
['arch == ppc and simulator_run == True or arch == ppc64 and simulator_run == True', {
# take too long with the simulator.
'regress/regress-1132': [SKIP],
'asm/embenchen/box2d': [SKIP],
# issues to be investigate4d
'es6/collections': [SKIP],
}], # 'arch == ppc and simulator_run == True'
]
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment