Commit 00e90b7e authored by jarin@chromium.org's avatar jarin@chromium.org

Remove deoptimization by patching the call stack.

We go back to patching the code for lazy deoptimization because ICs need the on-stack return address to read/update the IC address/state.

The change also fixes bunch of tests, mostly by adding more deoptimization points.

(We still need to add code to ensure lazy deopt patching does not overwrite ICs and other lazy deopts; this is coming next.)

BUG=
R=bmeurer@chromium.org

Review URL: https://codereview.chromium.org/568783002

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@23934 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent b2a8ad2c
......@@ -956,7 +956,8 @@ void AstGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
Node* attr = jsgraph()->Constant(NONE);
const Operator* op =
javascript()->Runtime(Runtime::kDefineAccessorPropertyUnchecked, 5);
NewNode(op, literal, name, getter, setter, attr);
Node* call = NewNode(op, literal, name, getter, setter, attr);
PrepareFrameState(call, it->first->id());
}
// Transform literals that contain functions to fast properties.
......@@ -1421,8 +1422,10 @@ void AstGraphBuilder::VisitCountOperation(CountOperation* expr) {
switch (assign_type) {
case VARIABLE: {
Variable* variable = expr->expression()->AsVariableProxy()->var();
environment()->Push(value);
BuildVariableAssignment(variable, value, expr->op(),
expr->AssignmentId());
environment()->Pop();
break;
}
case NAMED_PROPERTY: {
......@@ -1431,7 +1434,9 @@ void AstGraphBuilder::VisitCountOperation(CountOperation* expr) {
MakeUnique(property->key()->AsLiteral()->AsPropertyName());
Node* store =
NewNode(javascript()->StoreNamed(strict_mode(), name), object, value);
environment()->Push(value);
PrepareFrameState(store, expr->AssignmentId());
environment()->Pop();
break;
}
case KEYED_PROPERTY: {
......@@ -1439,7 +1444,9 @@ void AstGraphBuilder::VisitCountOperation(CountOperation* expr) {
Node* object = environment()->Pop();
Node* store = NewNode(javascript()->StoreProperty(strict_mode()), object,
key, value);
environment()->Push(value);
PrepareFrameState(store, expr->AssignmentId());
environment()->Pop();
break;
}
}
......
......@@ -19,7 +19,6 @@ CodeGenerator::CodeGenerator(InstructionSequence* code)
masm_(code->zone()->isolate(), NULL, 0),
resolver_(this),
safepoints_(code->zone()),
deoptimization_points_(code->zone()),
deoptimization_states_(code->zone()),
deoptimization_literals_(code->zone()),
translations_(code->zone()) {}
......@@ -47,10 +46,16 @@ Handle<Code> CodeGenerator::GenerateCode() {
AssembleInstruction(*i);
}
EmitLazyDeoptimizationCallTable();
FinishCode(masm());
// Ensure there is space for lazy deopt.
if (!info->IsStub()) {
int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
while (masm()->pc_offset() < target_offset) {
masm()->nop();
}
}
safepoints()->Emit(masm(), frame()->GetSpillSlotCount());
// TODO(titzer): what are the right code flags here?
......@@ -74,10 +79,9 @@ Handle<Code> CodeGenerator::GenerateCode() {
}
Safepoint::Id CodeGenerator::RecordSafepoint(PointerMap* pointers,
Safepoint::Kind kind,
int arguments,
Safepoint::DeoptMode deopt_mode) {
void CodeGenerator::RecordSafepoint(PointerMap* pointers, Safepoint::Kind kind,
int arguments,
Safepoint::DeoptMode deopt_mode) {
const ZoneList<InstructionOperand*>* operands =
pointers->GetNormalizedOperands();
Safepoint safepoint =
......@@ -91,7 +95,6 @@ Safepoint::Id CodeGenerator::RecordSafepoint(PointerMap* pointers,
safepoint.DefinePointerRegister(reg, zone());
}
}
return safepoint.id();
}
......@@ -172,19 +175,6 @@ void CodeGenerator::AssembleGap(GapInstruction* instr) {
}
void CodeGenerator::EmitLazyDeoptimizationCallTable() {
// ZoneDeque<DeoptimizationPoint*>::iterator iter;
int i = 0;
for (ZoneDeque<DeoptimizationPoint*>::iterator
iter = deoptimization_points_.begin();
iter != deoptimization_points_.end(); iter++, i++) {
int pc_offset = masm()->pc_offset();
AssembleDeoptimizerCall((*iter)->lazy_state_id());
safepoints()->SetDeoptimizationPc((*iter)->safepoint(), pc_offset);
}
}
void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
CompilationInfo* info = linkage()->info();
int deopt_count = static_cast<int>(deoptimization_states_.size());
......@@ -231,7 +221,7 @@ void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
data->SetTranslationIndex(
i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
data->SetArgumentsStackHeight(i, Smi::FromInt(0));
data->SetPc(i, Smi::FromInt(-1));
data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
}
code_object->set_deoptimization_data(*data);
......@@ -243,10 +233,14 @@ void CodeGenerator::AddSafepointAndDeopt(Instruction* instr) {
bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
Safepoint::Id safepoint_id = RecordSafepoint(
RecordSafepoint(
instr->pointer_map(), Safepoint::kSimple, 0,
needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
if (flags & CallDescriptor::kNeedsNopAfterCall) {
AddNopForSmiCodeInlining();
}
if (needs_frame_state) {
// If the frame state is present, it starts at argument 1
// (just after the code address).
......@@ -255,15 +249,19 @@ void CodeGenerator::AddSafepointAndDeopt(Instruction* instr) {
int frame_state_offset = 1;
FrameStateDescriptor* descriptor =
GetFrameStateDescriptor(instr, frame_state_offset);
int deopt_state_id =
BuildTranslation(instr, frame_state_offset, kIgnoreOutput);
int lazy_deopt_state_id = deopt_state_id;
int pc_offset = masm()->pc_offset();
int deopt_state_id = BuildTranslation(instr, pc_offset, frame_state_offset,
descriptor->state_combine());
// If the pre-call frame state differs from the post-call one, produce the
// pre-call frame state, too.
// TODO(jarin) We might want to avoid building the pre-call frame state
// because it is only used to get locals and arguments (by the debugger and
// f.arguments), and those are the same in the pre-call and post-call
// states.
if (descriptor->state_combine() != kIgnoreOutput) {
lazy_deopt_state_id = BuildTranslation(instr, frame_state_offset,
descriptor->state_combine());
deopt_state_id =
BuildTranslation(instr, -1, frame_state_offset, kIgnoreOutput);
}
deoptimization_points_.push_back(new (zone()) DeoptimizationPoint(
deopt_state_id, lazy_deopt_state_id, descriptor, safepoint_id));
#if DEBUG
// Make sure all the values live in stack slots or they are immediates.
// (The values should not live in register because registers are clobbered
......@@ -273,11 +271,7 @@ void CodeGenerator::AddSafepointAndDeopt(Instruction* instr) {
CHECK(op->IsStackSlot() || op->IsImmediate());
}
#endif
safepoints()->RecordLazyDeoptimizationIndex(lazy_deopt_state_id);
}
if (flags & CallDescriptor::kNeedsNopAfterCall) {
AddNopForSmiCodeInlining();
safepoints()->RecordLazyDeoptimizationIndex(deopt_state_id);
}
}
......@@ -340,7 +334,8 @@ void CodeGenerator::BuildTranslationForFrameStateDescriptor(
}
int CodeGenerator::BuildTranslation(Instruction* instr, int frame_state_offset,
int CodeGenerator::BuildTranslation(Instruction* instr, int pc_offset,
int frame_state_offset,
OutputFrameStateCombine state_combine) {
FrameStateDescriptor* descriptor =
GetFrameStateDescriptor(instr, frame_state_offset);
......@@ -354,7 +349,7 @@ int CodeGenerator::BuildTranslation(Instruction* instr, int frame_state_offset,
int deoptimization_id = static_cast<int>(deoptimization_states_.size());
deoptimization_states_.push_back(new (zone()) DeoptimizationState(
descriptor->bailout_id(), translation.index()));
descriptor->bailout_id(), translation.index(), pc_offset));
return deoptimization_id;
}
......
......@@ -46,8 +46,8 @@ class CodeGenerator FINAL : public GapResolver::Assembler {
}
// Record a safepoint with the given pointer map.
Safepoint::Id RecordSafepoint(PointerMap* pointers, Safepoint::Kind kind,
int arguments, Safepoint::DeoptMode deopt_mode);
void RecordSafepoint(PointerMap* pointers, Safepoint::Kind kind,
int arguments, Safepoint::DeoptMode deopt_mode);
// Assemble code for the specified instruction.
void AssembleInstruction(Instruction* instr);
......@@ -84,12 +84,12 @@ class CodeGenerator FINAL : public GapResolver::Assembler {
// ===========================================================================
// Deoptimization table construction
void AddSafepointAndDeopt(Instruction* instr);
void EmitLazyDeoptimizationCallTable();
void PopulateDeoptimizationData(Handle<Code> code);
int DefineDeoptimizationLiteral(Handle<Object> literal);
FrameStateDescriptor* GetFrameStateDescriptor(Instruction* instr,
int frame_state_offset);
int BuildTranslation(Instruction* instr, int frame_state_offset,
int BuildTranslation(Instruction* instr, int pc_offset,
int frame_state_offset,
OutputFrameStateCombine state_combine);
void BuildTranslationForFrameStateDescriptor(
FrameStateDescriptor* descriptor, Instruction* instr,
......@@ -100,39 +100,21 @@ class CodeGenerator FINAL : public GapResolver::Assembler {
void AddNopForSmiCodeInlining();
// ===========================================================================
class DeoptimizationPoint : public ZoneObject {
public:
int state_id() const { return state_id_; }
int lazy_state_id() const { return lazy_state_id_; }
FrameStateDescriptor* descriptor() const { return descriptor_; }
Safepoint::Id safepoint() const { return safepoint_; }
DeoptimizationPoint(int state_id, int lazy_state_id,
FrameStateDescriptor* descriptor,
Safepoint::Id safepoint)
: state_id_(state_id),
lazy_state_id_(lazy_state_id),
descriptor_(descriptor),
safepoint_(safepoint) {}
private:
int state_id_;
int lazy_state_id_;
FrameStateDescriptor* descriptor_;
Safepoint::Id safepoint_;
};
struct DeoptimizationState : ZoneObject {
public:
BailoutId bailout_id() const { return bailout_id_; }
int translation_id() const { return translation_id_; }
int pc_offset() const { return pc_offset_; }
DeoptimizationState(BailoutId bailout_id, int translation_id)
: bailout_id_(bailout_id), translation_id_(translation_id) {}
DeoptimizationState(BailoutId bailout_id, int translation_id, int pc_offset)
: bailout_id_(bailout_id),
translation_id_(translation_id),
pc_offset_(pc_offset) {}
private:
BailoutId bailout_id_;
int translation_id_;
int pc_offset_;
};
InstructionSequence* code_;
......@@ -141,7 +123,6 @@ class CodeGenerator FINAL : public GapResolver::Assembler {
MacroAssembler masm_;
GapResolver resolver_;
SafepointTableBuilder safepoints_;
ZoneDeque<DeoptimizationPoint*> deoptimization_points_;
ZoneDeque<DeoptimizationState*> deoptimization_states_;
ZoneDeque<Handle<Object> > deoptimization_literals_;
TranslationBuffer translations_;
......
......@@ -117,9 +117,11 @@ bool Linkage::NeedsFrameState(Runtime::FunctionId function) {
// few chosen runtime functions.
switch (function) {
case Runtime::kDebugBreak:
case Runtime::kDebugGetLoadedScripts:
case Runtime::kDeoptimizeFunction:
case Runtime::kInlineCallFunction:
case Runtime::kPrepareStep:
case Runtime::kSetScriptBreakPoint:
case Runtime::kDebugGetLoadedScripts:
case Runtime::kStackGuard:
return true;
default:
......
......@@ -353,7 +353,7 @@ void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
int deopt_index = safepoint.deoptimization_index();
// Turbofan deopt is checked when we are patching addresses on stack.
bool turbofanned = code->is_turbofanned();
bool turbofanned = code->is_turbofanned() && !FLAG_turbo_deoptimization;
bool safe_to_deopt =
deopt_index != Safepoint::kNoDeoptimizationIndex || turbofanned;
CHECK(topmost_optimized_code == NULL || safe_to_deopt || turbofanned);
......@@ -401,10 +401,6 @@ void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
element = next;
}
if (FLAG_turbo_deoptimization) {
PatchStackForMarkedCode(isolate);
}
// TODO(titzer): we need a handle scope only because of the macro assembler,
// which is only used in EnsureCodeForDeoptimizationEntry.
HandleScope scope(isolate);
......@@ -426,11 +422,7 @@ void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
shared->EvictFromOptimizedCodeMap(codes[i], "deoptimized code");
// Do platform-specific patching to force any activations to lazy deopt.
//
// We skip patching Turbofan code - we patch return addresses on stack.
// TODO(jarin) We should still zap the code object (but we have to
// be careful not to zap the deoptimization block).
if (!codes[i]->is_turbofanned()) {
if (!codes[i]->is_turbofanned() || FLAG_turbo_deoptimization) {
PatchCodeForDeoptimization(isolate, codes[i]);
// We might be in the middle of incremental marking with compaction.
......@@ -442,40 +434,6 @@ void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
}
// For all marked Turbofanned code on stack, change the return address to go
// to the deoptimization block.
void Deoptimizer::PatchStackForMarkedCode(Isolate* isolate) {
// TODO(jarin) We should tolerate missing patch entry for the topmost frame.
for (StackFrameIterator it(isolate, isolate->thread_local_top()); !it.done();
it.Advance()) {
StackFrame::Type type = it.frame()->type();
if (type == StackFrame::OPTIMIZED) {
Code* code = it.frame()->LookupCode();
if (code->is_turbofanned() && code->marked_for_deoptimization()) {
JSFunction* function =
static_cast<OptimizedFrame*>(it.frame())->function();
Address* pc_address = it.frame()->pc_address();
int pc_offset =
static_cast<int>(*pc_address - code->instruction_start());
SafepointEntry safepoint_entry = code->GetSafepointEntry(*pc_address);
unsigned new_pc_offset = safepoint_entry.deoptimization_pc();
if (FLAG_trace_deopt) {
CodeTracer::Scope scope(isolate->GetCodeTracer());
PrintF(scope.file(), "[patching stack address for function: ");
function->PrintName(scope.file());
PrintF(scope.file(), " (Pc offset %i -> %i)]\n", pc_offset,
new_pc_offset);
}
CHECK(new_pc_offset != Safepoint::kNoDeoptimizationPc);
*pc_address += static_cast<int>(new_pc_offset) - pc_offset;
}
}
}
}
void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
if (FLAG_trace_deopt) {
CodeTracer::Scope scope(isolate->GetCodeTracer());
......
......@@ -177,8 +177,6 @@ class Deoptimizer : public Malloced {
// refer to that code.
static void DeoptimizeMarkedCode(Isolate* isolate);
static void PatchStackForMarkedCode(Isolate* isolate);
// Visit all the known optimized functions in a given isolate.
static void VisitAllOptimizedFunctions(
Isolate* isolate, OptimizedFunctionVisitor* visitor);
......
......@@ -10937,13 +10937,6 @@ void Code::Disassemble(const char* name, OStream& os) { // NOLINT
} else {
os << "<none>";
}
if (entry.deoptimization_pc() != Safepoint::kNoDeoptimizationPc) {
Vector<char> buf2 = Vector<char>::New(30);
SNPrintF(buf2, "%6d", entry.deoptimization_pc());
os << buf2.start();
} else {
os << "<none>";
}
if (entry.argument_count() > 0) {
os << " argc: " << entry.argument_count();
}
......
......@@ -43,8 +43,8 @@ SafepointTable::SafepointTable(Code* code) {
length_ = Memory::uint32_at(header + kLengthOffset);
entry_size_ = Memory::uint32_at(header + kEntrySizeOffset);
pc_and_deoptimization_indexes_ = header + kHeaderSize;
entries_ =
pc_and_deoptimization_indexes_ + (length_ * kPcAndDeoptimizationInfoSize);
entries_ = pc_and_deoptimization_indexes_ +
(length_ * kPcAndDeoptimizationIndexSize);
DCHECK(entry_size_ > 0);
STATIC_ASSERT(SafepointEntry::DeoptimizationIndexField::kMax ==
Safepoint::kNoDeoptimizationIndex);
......@@ -56,7 +56,6 @@ SafepointEntry SafepointTable::FindEntry(Address pc) const {
for (unsigned i = 0; i < length(); i++) {
// TODO(kasperl): Replace the linear search with binary search.
if (GetPcOffset(i) == pc_offset) return GetEntry(i);
if (GetDeoptimizationPcOffset(i) == pc_offset) return GetEntry(i);
}
return SafepointEntry();
}
......@@ -111,8 +110,6 @@ Safepoint SafepointTableBuilder::DefineSafepoint(
info.pc = assembler->pc_offset();
info.arguments = arguments;
info.has_doubles = (kind & Safepoint::kWithDoubles);
info.deoptimization_pc = Safepoint::kNoDeoptimizationPc;
int safepoint_id = deoptimization_info_.length();
deoptimization_info_.Add(info, zone_);
deopt_index_list_.Add(Safepoint::kNoDeoptimizationIndex, zone_);
if (deopt_mode == Safepoint::kNoLazyDeopt) {
......@@ -123,7 +120,7 @@ Safepoint SafepointTableBuilder::DefineSafepoint(
? new(zone_) ZoneList<int>(4, zone_)
: NULL,
zone_);
return Safepoint(safepoint_id, indexes_.last(), registers_.last());
return Safepoint(indexes_.last(), registers_.last());
}
......@@ -162,7 +159,6 @@ void SafepointTableBuilder::Emit(Assembler* assembler, int bits_per_entry) {
assembler->dd(deoptimization_info_[i].pc);
assembler->dd(EncodeExceptPC(deoptimization_info_[i],
deopt_index_list_[i]));
assembler->dd(deoptimization_info_[i].deoptimization_pc);
}
// Emit table of bitmaps.
......
......@@ -17,10 +17,9 @@ struct Register;
class SafepointEntry BASE_EMBEDDED {
public:
SafepointEntry() : info_(0), deoptimization_pc_(0), bits_(NULL) {}
SafepointEntry() : info_(0), bits_(NULL) {}
SafepointEntry(unsigned info, unsigned deoptimization_pc, uint8_t* bits)
: info_(info), deoptimization_pc_(deoptimization_pc), bits_(bits) {
SafepointEntry(unsigned info, uint8_t* bits) : info_(info), bits_(bits) {
DCHECK(is_valid());
}
......@@ -40,11 +39,6 @@ class SafepointEntry BASE_EMBEDDED {
return DeoptimizationIndexField::decode(info_);
}
unsigned deoptimization_pc() const {
DCHECK(is_valid());
return deoptimization_pc_;
}
static const int kArgumentsFieldBits = 3;
static const int kSaveDoublesFieldBits = 1;
static const int kDeoptIndexBits =
......@@ -80,7 +74,6 @@ class SafepointEntry BASE_EMBEDDED {
private:
unsigned info_;
unsigned deoptimization_pc_;
uint8_t* bits_;
};
......@@ -91,7 +84,7 @@ class SafepointTable BASE_EMBEDDED {
int size() const {
return kHeaderSize +
(length_ * (kPcAndDeoptimizationInfoSize + entry_size_));
(length_ * (kPcAndDeoptimizationIndexSize + entry_size_));
}
unsigned length() const { return length_; }
unsigned entry_size() const { return entry_size_; }
......@@ -101,17 +94,11 @@ class SafepointTable BASE_EMBEDDED {
return Memory::uint32_at(GetPcOffsetLocation(index));
}
unsigned GetDeoptimizationPcOffset(unsigned index) const {
DCHECK(index < length_);
return Memory::uint32_at(GetDeoptimizationPcLocation(index));
}
SafepointEntry GetEntry(unsigned index) const {
DCHECK(index < length_);
unsigned info = Memory::uint32_at(GetInfoLocation(index));
unsigned deopt_pc = Memory::uint32_at(GetDeoptimizationPcLocation(index));
uint8_t* bits = &Memory::uint8_at(entries_ + (index * entry_size_));
return SafepointEntry(info, deopt_pc, bits);
return SafepointEntry(info, bits);
}
// Returns the entry for the given pc.
......@@ -128,23 +115,18 @@ class SafepointTable BASE_EMBEDDED {
static const int kPcSize = kIntSize;
static const int kDeoptimizationIndexSize = kIntSize;
static const int kDeoptimizationPcSize = kIntSize;
static const int kPcAndDeoptimizationInfoSize =
kPcSize + kDeoptimizationIndexSize + kDeoptimizationPcSize;
static const int kPcAndDeoptimizationIndexSize =
kPcSize + kDeoptimizationIndexSize;
Address GetPcOffsetLocation(unsigned index) const {
return pc_and_deoptimization_indexes_ +
(index * kPcAndDeoptimizationInfoSize);
(index * kPcAndDeoptimizationIndexSize);
}
Address GetInfoLocation(unsigned index) const {
return GetPcOffsetLocation(index) + kPcSize;
}
Address GetDeoptimizationPcLocation(unsigned index) const {
return GetInfoLocation(index) + kDeoptimizationIndexSize;
}
static void PrintBits(OStream& os, // NOLINT
uint8_t byte, int digits);
......@@ -177,30 +159,15 @@ class Safepoint BASE_EMBEDDED {
kLazyDeopt
};
class Id {
private:
explicit Id(int id) : id_(id) {}
int id_;
friend class SafepointTableBuilder;
friend class Safepoint;
};
static const int kNoDeoptimizationIndex =
(1 << (SafepointEntry::kDeoptIndexBits)) - 1;
static const unsigned kNoDeoptimizationPc = ~0U;
void DefinePointerSlot(int index, Zone* zone) { indexes_->Add(index, zone); }
void DefinePointerRegister(Register reg, Zone* zone);
Id id() const { return Id(id_); }
private:
Safepoint(int id, ZoneList<int>* indexes, ZoneList<int>* registers)
: id_(id), indexes_(indexes), registers_(registers) {}
int id_;
Safepoint(ZoneList<int>* indexes, ZoneList<int>* registers)
: indexes_(indexes), registers_(registers) {}
ZoneList<int>* indexes_;
ZoneList<int>* registers_;
......@@ -234,11 +201,6 @@ class SafepointTableBuilder BASE_EMBEDDED {
void BumpLastLazySafepointIndex() {
last_lazy_safepoint_ = deopt_index_list_.length();
}
void SetDeoptimizationPc(Safepoint::Id safepoint_id,
unsigned deoptimization_pc) {
deoptimization_info_[safepoint_id.id_].deoptimization_pc =
deoptimization_pc;
}
// Emit the safepoint table after the body. The number of bits per
// entry must be enough to hold all the pointer indexes.
......@@ -250,7 +212,6 @@ class SafepointTableBuilder BASE_EMBEDDED {
unsigned pc;
unsigned arguments;
bool has_doubles;
unsigned deoptimization_pc;
};
uint32_t EncodeExceptPC(const DeoptimizationInfo& info, unsigned index);
......
......@@ -67,6 +67,9 @@ class FunctionTester : public InitializedHandleScope {
Pipeline pipeline(&info);
Handle<Code> code = pipeline.GenerateCode();
if (FLAG_turbo_deoptimization) {
info.context()->native_context()->AddOptimizedCode(*code);
}
CHECK(!code.is_null());
function->ReplaceCode(*code);
......
......@@ -48,6 +48,24 @@ class JSTypedLoweringTester : public HandleAndZoneScope {
return n;
}
Node* UndefinedConstant() {
Unique<Object> unique =
Unique<Object>::CreateImmovable(isolate->factory()->undefined_value());
return graph.NewNode(common.HeapConstant(unique));
}
Node* EmptyFrameState(Node* context) {
Node* parameters = graph.NewNode(common.StateValues(0));
Node* locals = graph.NewNode(common.StateValues(0));
Node* stack = graph.NewNode(common.StateValues(0));
Node* state_node =
graph.NewNode(common.FrameState(BailoutId(0), kIgnoreOutput),
parameters, locals, stack, context, UndefinedConstant());
return state_node;
}
Node* reduce(Node* node) {
JSGraph jsgraph(&graph, &common, &javascript, &typer, &machine);
JSTypedLowering reducer(&jsgraph);
......@@ -775,12 +793,15 @@ TEST(UnaryNot) {
TEST(RemoveToNumberEffects) {
FLAG_turbo_deoptimization = true;
JSTypedLoweringTester R;
Node* effect_use = NULL;
for (int i = 0; i < 10; i++) {
Node* p0 = R.Parameter(Type::Number());
Node* ton = R.Unop(R.javascript.ToNumber(), p0);
Node* frame_state = R.EmptyFrameState(R.context());
effect_use = NULL;
switch (i) {
......@@ -796,11 +817,11 @@ TEST(RemoveToNumberEffects) {
effect_use = R.graph.NewNode(R.common.EffectPhi(1), ton, R.start());
case 3:
effect_use = R.graph.NewNode(R.javascript.Add(), ton, ton, R.context(),
ton, R.start());
frame_state, ton, R.start());
break;
case 4:
effect_use = R.graph.NewNode(R.javascript.Add(), p0, p0, R.context(),
ton, R.start());
frame_state, ton, R.start());
break;
case 5:
effect_use = R.graph.NewNode(R.common.Return(), p0, ton, R.start());
......
......@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax
// Flags: --allow-natives-syntax --turbo-deoptimization
var o1 = {x:1};
var o2 = {};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment