Commit 9ef343c1 authored by jarin@chromium.org's avatar jarin@chromium.org

[Turbofan] Insert nops for lazy bailout patching, fix translation of literals.

The code for EnsureSpaceForLazyDeopt is taken from lithium-codegen-*.

BUG=
R=bmeurer@chromium.org

Review URL: https://codereview.chromium.org/562033003

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@24138 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 0a4e98ba
...@@ -137,6 +137,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { ...@@ -137,6 +137,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
switch (ArchOpcodeField::decode(instr->opcode())) { switch (ArchOpcodeField::decode(instr->opcode())) {
case kArchCallCodeObject: { case kArchCallCodeObject: {
EnsureSpaceForLazyDeopt();
if (instr->InputAt(0)->IsImmediate()) { if (instr->InputAt(0)->IsImmediate()) {
__ Call(Handle<Code>::cast(i.InputHeapObject(0)), __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
RelocInfo::CODE_TARGET); RelocInfo::CODE_TARGET);
...@@ -150,6 +151,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { ...@@ -150,6 +151,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break; break;
} }
case kArchCallJSFunction: { case kArchCallJSFunction: {
EnsureSpaceForLazyDeopt();
Register func = i.InputRegister(0); Register func = i.InputRegister(0);
if (FLAG_debug_code) { if (FLAG_debug_code) {
// Check the function's context matches the context argument. // Check the function's context matches the context argument.
...@@ -843,6 +845,27 @@ void CodeGenerator::AddNopForSmiCodeInlining() { ...@@ -843,6 +845,27 @@ void CodeGenerator::AddNopForSmiCodeInlining() {
// On 32-bit ARM we do not insert nops for inlined Smi code. // On 32-bit ARM we do not insert nops for inlined Smi code.
} }
void CodeGenerator::EnsureSpaceForLazyDeopt() {
int space_needed = Deoptimizer::patch_size();
if (!linkage()->info()->IsStub()) {
// Ensure that we have enough space after the previous lazy-bailout
// instruction for patching the code here.
int current_pc = masm()->pc_offset();
if (current_pc < last_lazy_deopt_pc_ + space_needed) {
// Block literal pool emission for duration of padding.
v8::internal::Assembler::BlockConstPoolScope block_const_pool(masm());
int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
DCHECK_EQ(0, padding_size % v8::internal::Assembler::kInstrSize);
while (padding_size > 0) {
__ nop();
padding_size -= v8::internal::Assembler::kInstrSize;
}
}
}
MarkLazyDeoptSite();
}
#undef __ #undef __
} // namespace compiler } // namespace compiler
......
...@@ -132,6 +132,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { ...@@ -132,6 +132,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
InstructionCode opcode = instr->opcode(); InstructionCode opcode = instr->opcode();
switch (ArchOpcodeField::decode(opcode)) { switch (ArchOpcodeField::decode(opcode)) {
case kArchCallCodeObject: { case kArchCallCodeObject: {
EnsureSpaceForLazyDeopt();
if (instr->InputAt(0)->IsImmediate()) { if (instr->InputAt(0)->IsImmediate()) {
__ Call(Handle<Code>::cast(i.InputHeapObject(0)), __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
RelocInfo::CODE_TARGET); RelocInfo::CODE_TARGET);
...@@ -144,6 +145,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { ...@@ -144,6 +145,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break; break;
} }
case kArchCallJSFunction: { case kArchCallJSFunction: {
EnsureSpaceForLazyDeopt();
Register func = i.InputRegister(0); Register func = i.InputRegister(0);
if (FLAG_debug_code) { if (FLAG_debug_code) {
// Check the function's context matches the context argument. // Check the function's context matches the context argument.
...@@ -844,6 +846,29 @@ void CodeGenerator::AssembleSwap(InstructionOperand* source, ...@@ -844,6 +846,29 @@ void CodeGenerator::AssembleSwap(InstructionOperand* source,
void CodeGenerator::AddNopForSmiCodeInlining() { __ movz(xzr, 0); } void CodeGenerator::AddNopForSmiCodeInlining() { __ movz(xzr, 0); }
void CodeGenerator::EnsureSpaceForLazyDeopt() {
int space_needed = Deoptimizer::patch_size();
if (!linkage()->info()->IsStub()) {
// Ensure that we have enough space after the previous lazy-bailout
// instruction for patching the code here.
intptr_t current_pc = masm()->pc_offset();
if (current_pc < (last_lazy_deopt_pc_ + space_needed)) {
intptr_t padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
DCHECK((padding_size % kInstructionSize) == 0);
InstructionAccurateScope instruction_accurate(
masm(), padding_size / kInstructionSize);
while (padding_size > 0) {
__ nop();
padding_size -= kInstructionSize;
}
}
}
MarkLazyDeoptSite();
}
#undef __ #undef __
} // namespace compiler } // namespace compiler
......
...@@ -21,7 +21,8 @@ CodeGenerator::CodeGenerator(InstructionSequence* code) ...@@ -21,7 +21,8 @@ CodeGenerator::CodeGenerator(InstructionSequence* code)
safepoints_(code->zone()), safepoints_(code->zone()),
deoptimization_states_(code->zone()), deoptimization_states_(code->zone()),
deoptimization_literals_(code->zone()), deoptimization_literals_(code->zone()),
translations_(code->zone()) {} translations_(code->zone()),
last_lazy_deopt_pc_(0) {}
Handle<Code> CodeGenerator::GenerateCode() { Handle<Code> CodeGenerator::GenerateCode() {
...@@ -242,6 +243,7 @@ void CodeGenerator::AddSafepointAndDeopt(Instruction* instr) { ...@@ -242,6 +243,7 @@ void CodeGenerator::AddSafepointAndDeopt(Instruction* instr) {
} }
if (needs_frame_state) { if (needs_frame_state) {
MarkLazyDeoptSite();
// If the frame state is present, it starts at argument 1 // If the frame state is present, it starts at argument 1
// (just after the code address). // (just after the code address).
InstructionOperandConverter converter(this, instr); InstructionOperandConverter converter(this, instr);
...@@ -387,8 +389,7 @@ void CodeGenerator::AddTranslationForOperand(Translation* translation, ...@@ -387,8 +389,7 @@ void CodeGenerator::AddTranslationForOperand(Translation* translation,
isolate()->factory()->NewNumberFromInt(constant.ToInt32()); isolate()->factory()->NewNumberFromInt(constant.ToInt32());
break; break;
case Constant::kFloat64: case Constant::kFloat64:
constant_object = constant_object = isolate()->factory()->NewNumber(constant.ToFloat64());
isolate()->factory()->NewHeapNumber(constant.ToFloat64());
break; break;
case Constant::kHeapObject: case Constant::kHeapObject:
constant_object = constant.ToHeapObject(); constant_object = constant.ToHeapObject();
...@@ -403,6 +404,11 @@ void CodeGenerator::AddTranslationForOperand(Translation* translation, ...@@ -403,6 +404,11 @@ void CodeGenerator::AddTranslationForOperand(Translation* translation,
} }
} }
void CodeGenerator::MarkLazyDeoptSite() {
last_lazy_deopt_pc_ = masm()->pc_offset();
}
#if !V8_TURBOFAN_BACKEND #if !V8_TURBOFAN_BACKEND
void CodeGenerator::AssembleArchInstruction(Instruction* instr) { void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
......
...@@ -98,8 +98,10 @@ class CodeGenerator FINAL : public GapResolver::Assembler { ...@@ -98,8 +98,10 @@ class CodeGenerator FINAL : public GapResolver::Assembler {
void AddTranslationForOperand(Translation* translation, Instruction* instr, void AddTranslationForOperand(Translation* translation, Instruction* instr,
InstructionOperand* op); InstructionOperand* op);
void AddNopForSmiCodeInlining(); void AddNopForSmiCodeInlining();
// =========================================================================== void EnsureSpaceForLazyDeopt();
void MarkLazyDeoptSite();
// ===========================================================================
struct DeoptimizationState : ZoneObject { struct DeoptimizationState : ZoneObject {
public: public:
BailoutId bailout_id() const { return bailout_id_; } BailoutId bailout_id() const { return bailout_id_; }
...@@ -126,6 +128,7 @@ class CodeGenerator FINAL : public GapResolver::Assembler { ...@@ -126,6 +128,7 @@ class CodeGenerator FINAL : public GapResolver::Assembler {
ZoneDeque<DeoptimizationState*> deoptimization_states_; ZoneDeque<DeoptimizationState*> deoptimization_states_;
ZoneDeque<Handle<Object> > deoptimization_literals_; ZoneDeque<Handle<Object> > deoptimization_literals_;
TranslationBuffer translations_; TranslationBuffer translations_;
int last_lazy_deopt_pc_;
}; };
} // namespace compiler } // namespace compiler
......
...@@ -112,6 +112,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { ...@@ -112,6 +112,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
switch (ArchOpcodeField::decode(instr->opcode())) { switch (ArchOpcodeField::decode(instr->opcode())) {
case kArchCallCodeObject: { case kArchCallCodeObject: {
EnsureSpaceForLazyDeopt();
if (HasImmediateInput(instr, 0)) { if (HasImmediateInput(instr, 0)) {
Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0)); Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
__ call(code, RelocInfo::CODE_TARGET); __ call(code, RelocInfo::CODE_TARGET);
...@@ -123,6 +124,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { ...@@ -123,6 +124,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break; break;
} }
case kArchCallJSFunction: { case kArchCallJSFunction: {
EnsureSpaceForLazyDeopt();
Register func = i.InputRegister(0); Register func = i.InputRegister(0);
if (FLAG_debug_code) { if (FLAG_debug_code) {
// Check the function's context matches the context argument. // Check the function's context matches the context argument.
...@@ -932,6 +934,21 @@ void CodeGenerator::AssembleSwap(InstructionOperand* source, ...@@ -932,6 +934,21 @@ void CodeGenerator::AssembleSwap(InstructionOperand* source,
void CodeGenerator::AddNopForSmiCodeInlining() { __ nop(); } void CodeGenerator::AddNopForSmiCodeInlining() { __ nop(); }
void CodeGenerator::EnsureSpaceForLazyDeopt() {
int space_needed = Deoptimizer::patch_size();
if (!linkage()->info()->IsStub()) {
// Ensure that we have enough space after the previous lazy-bailout
// instruction for patching the code here.
int current_pc = masm()->pc_offset();
if (current_pc < last_lazy_deopt_pc_ + space_needed) {
int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
__ Nop(padding_size);
}
}
MarkLazyDeoptSite();
}
#undef __ #undef __
} // namespace compiler } // namespace compiler
......
...@@ -205,6 +205,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { ...@@ -205,6 +205,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
switch (ArchOpcodeField::decode(instr->opcode())) { switch (ArchOpcodeField::decode(instr->opcode())) {
case kArchCallCodeObject: { case kArchCallCodeObject: {
EnsureSpaceForLazyDeopt();
if (HasImmediateInput(instr, 0)) { if (HasImmediateInput(instr, 0)) {
Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0)); Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
__ Call(code, RelocInfo::CODE_TARGET); __ Call(code, RelocInfo::CODE_TARGET);
...@@ -217,6 +218,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { ...@@ -217,6 +218,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break; break;
} }
case kArchCallJSFunction: { case kArchCallJSFunction: {
EnsureSpaceForLazyDeopt();
Register func = i.InputRegister(0); Register func = i.InputRegister(0);
if (FLAG_debug_code) { if (FLAG_debug_code) {
// Check the function's context matches the context argument. // Check the function's context matches the context argument.
...@@ -991,6 +993,21 @@ void CodeGenerator::AssembleSwap(InstructionOperand* source, ...@@ -991,6 +993,21 @@ void CodeGenerator::AssembleSwap(InstructionOperand* source,
void CodeGenerator::AddNopForSmiCodeInlining() { __ nop(); } void CodeGenerator::AddNopForSmiCodeInlining() { __ nop(); }
void CodeGenerator::EnsureSpaceForLazyDeopt() {
int space_needed = Deoptimizer::patch_size();
if (!linkage()->info()->IsStub()) {
// Ensure that we have enough space after the previous lazy-bailout
// instruction for patching the code here.
int current_pc = masm()->pc_offset();
if (current_pc < last_lazy_deopt_pc_ + space_needed) {
int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
__ Nop(padding_size);
}
}
MarkLazyDeoptSite();
}
#undef __ #undef __
} // namespace internal } // namespace internal
......
...@@ -25,7 +25,7 @@ ...@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax // Flags: --allow-natives-syntax --turbo-deoptimization
(function OneByteSeqStringSetCharDeoptOsr() { (function OneByteSeqStringSetCharDeoptOsr() {
function deopt() { function deopt() {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment