Commit e5081792 authored by jarin@chromium.org's avatar jarin@chromium.org

Initial support for debugger frame state in Turbofan.

Bunch of changes were necessary:
- refactor attaching the frame states/lazy bailouts in AstGraphBuilder
  (essentialy reland of r23096),
- attaching frame state to some JS nodes in a similar way to attaching
  context (this is quite ugly and we should take another look at this),
- new bailout point for the debugger statement,
- register allocation constraints for the frame states,
- generating translations and deopt entries, attaching them to
  safepoints,
- enabled one mjsunit test for debugger state that uses the generated
  frame state.

BUG=
R=bmeurer@chromium.org

Review URL: https://codereview.chromium.org/492203002

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@23270 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 558a05bf
......@@ -1324,8 +1324,14 @@ class DebuggerStatement V8_FINAL : public Statement {
public:
DECLARE_NODE_TYPE(DebuggerStatement)
BailoutId DebugBreakId() const { return debugger_id_; }
protected:
explicit DebuggerStatement(Zone* zone, int pos): Statement(zone, pos) {}
explicit DebuggerStatement(Zone* zone, int pos)
: Statement(zone, pos), debugger_id_(GetNextId(zone)) {}
private:
const BailoutId debugger_id_;
};
......
......@@ -150,7 +150,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break;
case kArchDeoptimize: {
int deoptimization_id = MiscField::decode(instr->opcode());
BuildTranslation(instr, deoptimization_id);
BuildTranslation(instr, 0, deoptimization_id);
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
isolate(), deoptimization_id, Deoptimizer::LAZY);
......@@ -240,20 +240,15 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
if (instr->InputAt(0)->IsImmediate()) {
Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
__ Call(code, RelocInfo::CODE_TARGET);
RecordSafepoint(instr->pointer_map(), Safepoint::kSimple, 0,
Safepoint::kNoLazyDeopt);
} else {
Register reg = i.InputRegister(0);
int entry = Code::kHeaderSize - kHeapObjectTag;
__ ldr(reg, MemOperand(reg, entry));
__ Call(reg);
RecordSafepoint(instr->pointer_map(), Safepoint::kSimple, 0,
Safepoint::kNoLazyDeopt);
}
bool lazy_deopt = (MiscField::decode(instr->opcode()) == 1);
if (lazy_deopt) {
RecordLazyDeoptimizationEntry(instr);
}
AddSafepointAndDeopt(instr);
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
}
......@@ -265,9 +260,8 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ ldr(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
__ Call(ip);
RecordSafepoint(instr->pointer_map(), Safepoint::kSimple, 0,
Safepoint::kNoLazyDeopt);
RecordLazyDeoptimizationEntry(instr);
AddSafepointAndDeopt(instr);
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
}
......
......@@ -785,7 +785,14 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
BasicBlock* deoptimization) {
ArmOperandGenerator g(this);
CallDescriptor* descriptor = OpParameter<CallDescriptor*>(call);
CallBuffer buffer(zone(), descriptor); // TODO(turbofan): temp zone here?
FrameStateDescriptor* frame_state_descriptor = NULL;
if (descriptor->NeedsFrameState()) {
frame_state_descriptor =
GetFrameStateDescriptor(call->InputAt(descriptor->InputCount()));
}
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
// Compute InstructionOperands for inputs and outputs.
// TODO(turbofan): on ARM64 it's probably better to use the code object in a
......@@ -796,17 +803,16 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
// TODO(dcarney): might be possible to use claim/poke instead
// Push any stack arguments.
for (int i = buffer.pushed_count - 1; i >= 0; --i) {
Node* input = buffer.pushed_nodes[i];
Emit(kArmPush, NULL, g.UseRegister(input));
for (NodeVectorRIter input = buffer.pushed_nodes.rbegin();
input != buffer.pushed_nodes.rend(); input++) {
Emit(kArmPush, NULL, g.UseRegister(*input));
}
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject: {
bool lazy_deopt = descriptor->CanLazilyDeoptimize();
opcode = kArmCallCodeObject | MiscField::encode(lazy_deopt ? 1 : 0);
opcode = kArmCallCodeObject;
break;
}
case CallDescriptor::kCallAddress:
......@@ -819,11 +825,12 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
UNREACHABLE();
return;
}
opcode |= MiscField::encode(descriptor->deoptimization_support());
// Emit the call instruction.
Instruction* call_instr =
Emit(opcode, buffer.output_count, buffer.outputs,
buffer.fixed_and_control_count(), buffer.fixed_and_control_args);
Emit(opcode, buffer.outputs.size(), &buffer.outputs.front(),
buffer.instruction_args.size(), &buffer.instruction_args.front());
call_instr->MarkAsCall();
if (deoptimization != NULL) {
......@@ -833,9 +840,9 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
// Caller clean up of stack for C-style calls.
if (descriptor->kind() == CallDescriptor::kCallAddress &&
buffer.pushed_count > 0) {
!buffer.pushed_nodes.empty()) {
DCHECK(deoptimization == NULL && continuation == NULL);
Emit(kArmDrop | MiscField::encode(buffer.pushed_count), NULL);
Emit(kArmDrop | MiscField::encode(buffer.pushed_nodes.size()), NULL);
}
}
......
......@@ -142,7 +142,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break;
case kArchDeoptimize: {
int deoptimization_id = MiscField::decode(instr->opcode());
BuildTranslation(instr, deoptimization_id);
BuildTranslation(instr, 0, deoptimization_id);
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
isolate(), deoptimization_id, Deoptimizer::LAZY);
......@@ -287,20 +287,14 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
if (instr->InputAt(0)->IsImmediate()) {
Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
__ Call(code, RelocInfo::CODE_TARGET);
RecordSafepoint(instr->pointer_map(), Safepoint::kSimple, 0,
Safepoint::kNoLazyDeopt);
} else {
Register reg = i.InputRegister(0);
int entry = Code::kHeaderSize - kHeapObjectTag;
__ Ldr(reg, MemOperand(reg, entry));
__ Call(reg);
RecordSafepoint(instr->pointer_map(), Safepoint::kSimple, 0,
Safepoint::kNoLazyDeopt);
}
bool lazy_deopt = (MiscField::decode(instr->opcode()) == 1);
if (lazy_deopt) {
RecordLazyDeoptimizationEntry(instr);
}
AddSafepointAndDeopt(instr);
// Meaningless instruction for ICs to overwrite.
AddNopForSmiCodeInlining();
break;
......@@ -313,9 +307,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
__ Call(x10);
RecordSafepoint(instr->pointer_map(), Safepoint::kSimple, 0,
Safepoint::kNoLazyDeopt);
RecordLazyDeoptimizationEntry(instr);
AddSafepointAndDeopt(instr);
break;
}
case kArm64CallAddress: {
......
......@@ -596,7 +596,14 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
BasicBlock* deoptimization) {
Arm64OperandGenerator g(this);
CallDescriptor* descriptor = OpParameter<CallDescriptor*>(call);
CallBuffer buffer(zone(), descriptor); // TODO(turbofan): temp zone here?
FrameStateDescriptor* frame_state_descriptor = NULL;
if (descriptor->NeedsFrameState()) {
frame_state_descriptor =
GetFrameStateDescriptor(call->InputAt(descriptor->InputCount()));
}
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
// Compute InstructionOperands for inputs and outputs.
// TODO(turbofan): on ARM64 it's probably better to use the code object in a
......@@ -607,8 +614,8 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
// Push the arguments to the stack.
bool is_c_frame = descriptor->kind() == CallDescriptor::kCallAddress;
bool pushed_count_uneven = buffer.pushed_count & 1;
int aligned_push_count = buffer.pushed_count;
bool pushed_count_uneven = buffer.pushed_nodes.size() & 1;
int aligned_push_count = buffer.pushed_nodes.size();
if (is_c_frame && pushed_count_uneven) {
aligned_push_count++;
}
......@@ -622,7 +629,7 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
}
// Move arguments to the stack.
{
int slot = buffer.pushed_count - 1;
int slot = buffer.pushed_nodes.size() - 1;
// Emit the uneven pushes.
if (pushed_count_uneven) {
Node* input = buffer.pushed_nodes[slot];
......@@ -642,8 +649,7 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
InstructionCode opcode;
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject: {
bool lazy_deopt = descriptor->CanLazilyDeoptimize();
opcode = kArm64CallCodeObject | MiscField::encode(lazy_deopt ? 1 : 0);
opcode = kArm64CallCodeObject;
break;
}
case CallDescriptor::kCallAddress:
......@@ -656,11 +662,12 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
UNREACHABLE();
return;
}
opcode |= MiscField::encode(descriptor->deoptimization_support());
// Emit the call instruction.
Instruction* call_instr =
Emit(opcode, buffer.output_count, buffer.outputs,
buffer.fixed_and_control_count(), buffer.fixed_and_control_args);
Emit(opcode, buffer.outputs.size(), &buffer.outputs.front(),
buffer.instruction_args.size(), &buffer.instruction_args.front());
call_instr->MarkAsCall();
if (deoptimization != NULL) {
......
This diff is collapsed.
......@@ -171,8 +171,18 @@ class AstGraphBuilder : public StructuredGraphBuilder, public AstVisitor {
// Dispatched from VisitForInStatement.
void VisitForInAssignment(Expression* expr, Node* value);
void BuildLazyBailout(Node* node, BailoutId ast_id);
void BuildLazyBailoutWithPushedNode(Node* node, BailoutId ast_id);
// Flag that describes how to combine the current environment with
// the output of a node to obtain a framestate for lazy bailout.
enum OutputFrameStateCombine {
PUSH_OUTPUT, // Push the output on the expression stack.
IGNORE_OUTPUT // Use the frame state as-is.
};
// Builds deoptimization for a given node.
void PrepareFrameState(Node* node, BailoutId ast_id,
OutputFrameStateCombine combine = IGNORE_OUTPUT);
OutputFrameStateCombine StateCombineFromAstContext();
DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
DISALLOW_COPY_AND_ASSIGN(AstGraphBuilder);
......@@ -206,11 +216,9 @@ class AstGraphBuilder::Environment
DCHECK(variable->IsStackAllocated());
if (variable->IsParameter()) {
values()->at(variable->index() + 1) = node;
parameters_dirty_ = true;
} else {
DCHECK(variable->IsStackLocal());
values()->at(variable->index() + parameters_count_) = node;
locals_dirty_ = true;
}
}
Node* Lookup(Variable* variable) {
......@@ -226,7 +234,6 @@ class AstGraphBuilder::Environment
// Operations on the operand stack.
void Push(Node* node) {
values()->push_back(node);
stack_dirty_ = true;
}
Node* Top() {
DCHECK(stack_height() > 0);
......@@ -236,7 +243,6 @@ class AstGraphBuilder::Environment
DCHECK(stack_height() > 0);
Node* back = values()->back();
values()->pop_back();
stack_dirty_ = true;
return back;
}
......@@ -245,7 +251,6 @@ class AstGraphBuilder::Environment
DCHECK(depth >= 0 && depth < stack_height());
int index = static_cast<int>(values()->size()) - depth - 1;
values()->at(index) = node;
stack_dirty_ = true;
}
Node* Peek(int depth) {
DCHECK(depth >= 0 && depth < stack_height());
......@@ -255,7 +260,6 @@ class AstGraphBuilder::Environment
void Drop(int depth) {
DCHECK(depth >= 0 && depth <= stack_height());
values()->erase(values()->end() - depth, values()->end());
stack_dirty_ = true;
}
// Preserve a checkpoint of the environment for the IR graph. Any
......@@ -263,14 +267,13 @@ class AstGraphBuilder::Environment
Node* Checkpoint(BailoutId ast_id);
private:
void UpdateStateValues(Node** state_values, int offset, int count);
int parameters_count_;
int locals_count_;
Node* parameters_node_;
Node* locals_node_;
Node* stack_node_;
bool parameters_dirty_;
bool locals_dirty_;
bool stack_dirty_;
};
......@@ -282,10 +285,15 @@ class AstGraphBuilder::AstContext BASE_EMBEDDED {
bool IsValue() const { return kind_ == Expression::kValue; }
bool IsTest() const { return kind_ == Expression::kTest; }
// Determines how to combine the frame state with the value
// that is about to be plugged into this AstContext.
AstGraphBuilder::OutputFrameStateCombine GetStateCombine() {
return IsEffect() ? IGNORE_OUTPUT : PUSH_OUTPUT;
}
// Plug a node into this expression context. Call this function in tail
// position in the Visit functions for expressions.
virtual void ProduceValue(Node* value) = 0;
virtual void ProduceValueWithLazyBailout(Node* value) = 0;
// Unplugs a node from this expression context. Call this to retrieve the
// result of another Visit function that already plugged the context.
......@@ -295,8 +303,7 @@ class AstGraphBuilder::AstContext BASE_EMBEDDED {
void ReplaceValue() { ProduceValue(ConsumeValue()); }
protected:
AstContext(AstGraphBuilder* owner, Expression::Context kind,
BailoutId bailout_id);
AstContext(AstGraphBuilder* owner, Expression::Context kind);
virtual ~AstContext();
AstGraphBuilder* owner() const { return owner_; }
......@@ -308,8 +315,6 @@ class AstGraphBuilder::AstContext BASE_EMBEDDED {
int original_height_;
#endif
BailoutId bailout_id_;
private:
Expression::Context kind_;
AstGraphBuilder* owner_;
......@@ -320,11 +325,10 @@ class AstGraphBuilder::AstContext BASE_EMBEDDED {
// Context to evaluate expression for its side effects only.
class AstGraphBuilder::AstEffectContext V8_FINAL : public AstContext {
public:
explicit AstEffectContext(AstGraphBuilder* owner, BailoutId bailout_id)
: AstContext(owner, Expression::kEffect, bailout_id) {}
explicit AstEffectContext(AstGraphBuilder* owner)
: AstContext(owner, Expression::kEffect) {}
virtual ~AstEffectContext();
virtual void ProduceValue(Node* value) V8_OVERRIDE;
virtual void ProduceValueWithLazyBailout(Node* value) V8_OVERRIDE;
virtual Node* ConsumeValue() V8_OVERRIDE;
};
......@@ -332,11 +336,10 @@ class AstGraphBuilder::AstEffectContext V8_FINAL : public AstContext {
// Context to evaluate expression for its value (and side effects).
class AstGraphBuilder::AstValueContext V8_FINAL : public AstContext {
public:
explicit AstValueContext(AstGraphBuilder* owner, BailoutId bailout_id)
: AstContext(owner, Expression::kValue, bailout_id) {}
explicit AstValueContext(AstGraphBuilder* owner)
: AstContext(owner, Expression::kValue) {}
virtual ~AstValueContext();
virtual void ProduceValue(Node* value) V8_OVERRIDE;
virtual void ProduceValueWithLazyBailout(Node* value) V8_OVERRIDE;
virtual Node* ConsumeValue() V8_OVERRIDE;
};
......@@ -344,11 +347,10 @@ class AstGraphBuilder::AstValueContext V8_FINAL : public AstContext {
// Context to evaluate expression for a condition value (and side effects).
class AstGraphBuilder::AstTestContext V8_FINAL : public AstContext {
public:
explicit AstTestContext(AstGraphBuilder* owner, BailoutId bailout_id)
: AstContext(owner, Expression::kTest, bailout_id) {}
explicit AstTestContext(AstGraphBuilder* owner)
: AstContext(owner, Expression::kTest) {}
virtual ~AstTestContext();
virtual void ProduceValue(Node* value) V8_OVERRIDE;
virtual void ProduceValueWithLazyBailout(Node* value) V8_OVERRIDE;
virtual Node* ConsumeValue() V8_OVERRIDE;
};
......
......@@ -233,6 +233,34 @@ void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
}
void CodeGenerator::AddSafepointAndDeopt(Instruction* instr) {
CallDescriptor::DeoptimizationSupport deopt =
static_cast<CallDescriptor::DeoptimizationSupport>(
MiscField::decode(instr->opcode()));
if ((deopt & CallDescriptor::kLazyDeoptimization) != 0) {
RecordLazyDeoptimizationEntry(instr);
}
bool needs_frame_state = (deopt & CallDescriptor::kNeedsFrameState) != 0;
RecordSafepoint(
instr->pointer_map(), Safepoint::kSimple, 0,
needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
if ((deopt & CallDescriptor::kNeedsFrameState) != 0) {
// If the frame state is present, it starts at argument 1
// (just after the code address).
InstructionOperandConverter converter(this, instr);
// Argument 1 is deoptimization id.
int deoptimization_id = converter.ToConstant(instr->InputAt(1)).ToInt32();
// The actual frame state values start with argument 2.
BuildTranslation(instr, 2, deoptimization_id);
safepoints()->RecordLazyDeoptimizationIndex(deoptimization_id);
}
}
void CodeGenerator::RecordLazyDeoptimizationEntry(Instruction* instr) {
InstructionOperandConverter i(this, instr);
......@@ -264,6 +292,7 @@ int CodeGenerator::DefineDeoptimizationLiteral(Handle<Object> literal) {
void CodeGenerator::BuildTranslation(Instruction* instr,
int first_argument_index,
int deoptimization_id) {
// We should build translation only once.
DCHECK_EQ(NULL, deoptimization_states_[deoptimization_id]);
......@@ -276,7 +305,8 @@ void CodeGenerator::BuildTranslation(Instruction* instr,
descriptor->size() - descriptor->parameters_count());
for (int i = 0; i < descriptor->size(); i++) {
AddTranslationForOperand(&translation, instr, instr->InputAt(i));
AddTranslationForOperand(&translation, instr,
instr->InputAt(i + first_argument_index));
}
deoptimization_states_[deoptimization_id] =
......
......@@ -81,10 +81,12 @@ class CodeGenerator V8_FINAL : public GapResolver::Assembler {
// ===========================================================================
// Deoptimization table construction
void AddSafepointAndDeopt(Instruction* instr);
void RecordLazyDeoptimizationEntry(Instruction* instr);
void PopulateDeoptimizationData(Handle<Code> code);
int DefineDeoptimizationLiteral(Handle<Object> literal);
void BuildTranslation(Instruction* instr, int deoptimization_id);
void BuildTranslation(Instruction* instr, int first_argument_index,
int deoptimization_id);
void AddTranslationForOperand(Translation* translation, Instruction* instr,
InstructionOperand* op);
void AddNopForSmiCodeInlining();
......
......@@ -35,7 +35,8 @@ class CallOperator : public Operator1<CallDescriptor*> {
public:
CallOperator(CallDescriptor* descriptor, const char* mnemonic)
: Operator1<CallDescriptor*>(
IrOpcode::kCall, descriptor->properties(), descriptor->InputCount(),
IrOpcode::kCall, descriptor->properties(),
descriptor->InputCount() + descriptor->FrameStateCount(),
descriptor->ReturnCount(), mnemonic, descriptor) {}
virtual OStream& PrintParameter(OStream& os) const { // NOLINT
......
......@@ -30,7 +30,10 @@ StructuredGraphBuilder::StructuredGraphBuilder(Graph* graph,
Node* StructuredGraphBuilder::MakeNode(Operator* op, int value_input_count,
Node** value_inputs) {
DCHECK(op->InputCount() == value_input_count);
bool has_context = OperatorProperties::HasContextInput(op);
bool has_framestate = OperatorProperties::HasFrameStateInput(op);
bool has_control = OperatorProperties::GetControlInputCount(op) == 1;
bool has_effect = OperatorProperties::GetEffectInputCount(op) == 1;
......@@ -43,6 +46,7 @@ Node* StructuredGraphBuilder::MakeNode(Operator* op, int value_input_count,
} else {
int input_count_with_deps = value_input_count;
if (has_context) ++input_count_with_deps;
if (has_framestate) ++input_count_with_deps;
if (has_control) ++input_count_with_deps;
if (has_effect) ++input_count_with_deps;
void* raw_buffer = alloca(kPointerSize * input_count_with_deps);
......@@ -52,6 +56,12 @@ Node* StructuredGraphBuilder::MakeNode(Operator* op, int value_input_count,
if (has_context) {
*current_input++ = current_context();
}
if (has_framestate) {
// The frame state will be inserted later. Here we misuse
// the dead_control node as a sentinel to be later overwritten
// with the real frame state.
*current_input++ = dead_control();
}
if (has_effect) {
*current_input++ = environment_->GetEffectDependency();
}
......
......@@ -166,6 +166,10 @@ void GraphVisualizer::AnnotateNode(Node* node) {
++i, j--) {
os_ << "|<I" << i.index() << ">X #" << (*i)->id();
}
for (int j = OperatorProperties::GetFrameStateInputCount(node->op()); j > 0;
++i, j--) {
os_ << "|<I" << i.index() << ">X #" << (*i)->id();
}
for (int j = OperatorProperties::GetEffectInputCount(node->op()); j > 0;
++i, j--) {
os_ << "|<I" << i.index() << ">E #" << (*i)->id();
......
......@@ -122,7 +122,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break;
case kArchDeoptimize: {
int deoptimization_id = MiscField::decode(instr->opcode());
BuildTranslation(instr, deoptimization_id);
BuildTranslation(instr, 0, deoptimization_id);
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
isolate(), deoptimization_id, Deoptimizer::LAZY);
......@@ -246,13 +246,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
int entry = Code::kHeaderSize - kHeapObjectTag;
__ call(Operand(reg, entry));
}
RecordSafepoint(instr->pointer_map(), Safepoint::kSimple, 0,
Safepoint::kNoLazyDeopt);
bool lazy_deopt = (MiscField::decode(instr->opcode()) == 1);
if (lazy_deopt) {
RecordLazyDeoptimizationEntry(instr);
}
AddSafepointAndDeopt(instr);
AddNopForSmiCodeInlining();
break;
}
......@@ -277,9 +273,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ mov(esi, FieldOperand(func, JSFunction::kContextOffset));
__ call(FieldOperand(func, JSFunction::kCodeEntryOffset));
RecordSafepoint(instr->pointer_map(), Safepoint::kSimple, 0,
Safepoint::kNoLazyDeopt);
RecordLazyDeoptimizationEntry(instr);
AddSafepointAndDeopt(instr);
break;
}
case kSSEFloat64Cmp:
......
......@@ -512,25 +512,32 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
BasicBlock* deoptimization) {
IA32OperandGenerator g(this);
CallDescriptor* descriptor = OpParameter<CallDescriptor*>(call);
CallBuffer buffer(zone(), descriptor);
FrameStateDescriptor* frame_state_descriptor = NULL;
if (descriptor->NeedsFrameState()) {
frame_state_descriptor =
GetFrameStateDescriptor(call->InputAt(descriptor->InputCount()));
}
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
// Compute InstructionOperands for inputs and outputs.
InitializeCallBuffer(call, &buffer, true, true, continuation, deoptimization);
// Push any stack arguments.
for (int i = buffer.pushed_count - 1; i >= 0; --i) {
Node* input = buffer.pushed_nodes[i];
for (NodeVectorRIter input = buffer.pushed_nodes.rbegin();
input != buffer.pushed_nodes.rend(); input++) {
// TODO(titzer): handle pushing double parameters.
Emit(kIA32Push, NULL,
g.CanBeImmediate(input) ? g.UseImmediate(input) : g.Use(input));
g.CanBeImmediate(*input) ? g.UseImmediate(*input) : g.Use(*input));
}
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject: {
bool lazy_deopt = descriptor->CanLazilyDeoptimize();
opcode = kIA32CallCodeObject | MiscField::encode(lazy_deopt ? 1 : 0);
opcode = kIA32CallCodeObject;
break;
}
case CallDescriptor::kCallAddress:
......@@ -543,11 +550,12 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
UNREACHABLE();
return;
}
opcode |= MiscField::encode(descriptor->deoptimization_support());
// Emit the call instruction.
Instruction* call_instr =
Emit(opcode, buffer.output_count, buffer.outputs,
buffer.fixed_and_control_count(), buffer.fixed_and_control_args);
Emit(opcode, buffer.outputs.size(), &buffer.outputs.front(),
buffer.instruction_args.size(), &buffer.instruction_args.front());
call_instr->MarkAsCall();
if (deoptimization != NULL) {
......@@ -557,9 +565,9 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
// Caller clean up of stack for C-style calls.
if (descriptor->kind() == CallDescriptor::kCallAddress &&
buffer.pushed_count > 0) {
buffer.pushed_nodes.size() > 0) {
DCHECK(deoptimization == NULL && continuation == NULL);
Emit(kPopStack | MiscField::encode(buffer.pushed_count), NULL);
Emit(kPopStack | MiscField::encode(buffer.pushed_nodes.size()), NULL);
}
}
......
......@@ -346,22 +346,29 @@ class FlagsContinuation V8_FINAL {
// TODO(bmeurer): Get rid of the CallBuffer business and make
// InstructionSelector::VisitCall platform independent instead.
struct CallBuffer {
CallBuffer(Zone* zone, CallDescriptor* descriptor);
CallBuffer(Zone* zone, CallDescriptor* descriptor,
FrameStateDescriptor* frame_state);
int output_count;
CallDescriptor* descriptor;
Node** output_nodes;
InstructionOperand** outputs;
InstructionOperand** fixed_and_control_args;
int fixed_count;
Node** pushed_nodes;
int pushed_count;
FrameStateDescriptor* frame_state_descriptor;
NodeVector output_nodes;
InstructionOperandVector outputs;
InstructionOperandVector instruction_args;
NodeVector pushed_nodes;
int input_count() { return descriptor->InputCount(); }
int input_count() const { return descriptor->InputCount(); }
int control_count() { return descriptor->CanLazilyDeoptimize() ? 2 : 0; }
int frame_state_count() const { return descriptor->FrameStateCount(); }
int fixed_and_control_count() { return fixed_count + control_count(); }
int frame_state_value_count() const {
return (frame_state_descriptor == NULL)
? 0
: (frame_state_descriptor->size() + 1);
}
int control_count() const {
return descriptor->CanLazilyDeoptimize() ? 2 : 0;
}
};
} // namespace compiler
......
This diff is collapsed.
......@@ -142,6 +142,10 @@ class InstructionSelector V8_FINAL {
bool call_address_immediate, BasicBlock* cont_node,
BasicBlock* deopt_node);
FrameStateDescriptor* GetFrameStateDescriptor(Node* node);
void AddFrameStateInputs(Node* state, InstructionOperandVector* inputs,
FrameStateDescriptor* descriptor);
// ===========================================================================
// ============= Architecture-specific graph covering methods. ===============
// ===========================================================================
......
......@@ -89,6 +89,9 @@ class InstructionOperand : public ZoneObject {
unsigned value_;
};
typedef std::vector<InstructionOperand*, zone_allocator<InstructionOperand*> >
InstructionOperandVector;
OStream& operator<<(OStream& os, const InstructionOperand& op);
class UnallocatedOperand : public InstructionOperand {
......
......@@ -288,9 +288,14 @@ REPLACE_UNIMPLEMENTED(JSDebugger)
static CallDescriptor::DeoptimizationSupport DeoptimizationSupportForNode(
Node* node) {
return OperatorProperties::CanLazilyDeoptimize(node->op())
? CallDescriptor::kCanDeoptimize
: CallDescriptor::kCannotDeoptimize;
int result = CallDescriptor::kNoDeoptimization;
if (OperatorProperties::CanLazilyDeoptimize(node->op())) {
result |= CallDescriptor::kLazyDeoptimization;
}
if (OperatorProperties::HasFrameStateInput(node->op())) {
result |= CallDescriptor::kNeedsFrameState;
}
return static_cast<CallDescriptor::DeoptimizationSupport>(result);
}
......
......@@ -64,7 +64,7 @@ class LinkageHelper {
locations, // locations
Operator::kNoProperties, // properties
kNoCalleeSaved, // callee-saved registers
CallDescriptor::kCanDeoptimize); // deoptimization
CallDescriptor::kLazyDeoptimization); // deoptimization
}
......@@ -196,7 +196,7 @@ class LinkageHelper {
return new (zone) CallDescriptor(
CallDescriptor::kCallAddress, 1, num_params, num_params + 1, locations,
Operator::kNoProperties, LinkageTraits::CCalleeSaveRegisters(),
CallDescriptor::kCannotDeoptimize); // TODO(jarin) should deoptimize!
CallDescriptor::kNoDeoptimization); // TODO(jarin) should deoptimize!
}
};
}
......
......@@ -34,8 +34,8 @@ OStream& operator<<(OStream& os, const CallDescriptor::Kind& k) {
OStream& operator<<(OStream& os, const CallDescriptor& d) {
// TODO(svenpanne) Output properties etc. and be less cryptic.
return os << d.kind() << ":" << d.debug_name() << ":r" << d.ReturnCount()
<< "p" << d.ParameterCount() << "i" << d.InputCount()
<< (d.CanLazilyDeoptimize() ? "deopt" : "");
<< "p" << d.ParameterCount() << "i" << d.InputCount() << "f"
<< d.FrameStateCount() << (d.CanLazilyDeoptimize() ? "deopt" : "");
}
......
......@@ -43,7 +43,12 @@ class CallDescriptor : public ZoneObject {
// or an address--all of which require different machine sequences to call.
enum Kind { kCallCodeObject, kCallJSFunction, kCallAddress };
enum DeoptimizationSupport { kCanDeoptimize, kCannotDeoptimize };
// TODO(jarin) kLazyDeoptimization and kNeedsFrameState should be unified.
enum DeoptimizationSupport {
kNoDeoptimization = 0,
kLazyDeoptimization = 1,
kNeedsFrameState = 2
};
CallDescriptor(Kind kind, int8_t return_count, int16_t parameter_count,
int16_t input_count, LinkageLocation* locations,
......@@ -74,8 +79,18 @@ class CallDescriptor : public ZoneObject {
int InputCount() const { return input_count_; }
int FrameStateCount() const { return NeedsFrameState() ? 1 : 0; }
bool CanLazilyDeoptimize() const {
return deoptimization_support_ == kCanDeoptimize;
return (deoptimization_support() & kLazyDeoptimization) != 0;
}
bool NeedsFrameState() const {
return (deoptimization_support() & kNeedsFrameState) != 0;
}
DeoptimizationSupport deoptimization_support() const {
return deoptimization_support_;
}
LinkageLocation GetReturnLocation(int index) {
......@@ -141,7 +156,7 @@ class Linkage : public ZoneObject {
Runtime::FunctionId function, int parameter_count,
Operator::Property properties,
CallDescriptor::DeoptimizationSupport can_deoptimize =
CallDescriptor::kCannotDeoptimize);
CallDescriptor::kNoDeoptimization);
static CallDescriptor* GetRuntimeCallDescriptor(
Runtime::FunctionId function, int parameter_count,
Operator::Property properties,
......@@ -150,7 +165,7 @@ class Linkage : public ZoneObject {
CallDescriptor* GetStubCallDescriptor(
CodeStubInterfaceDescriptor* descriptor, int stack_parameter_count = 0,
CallDescriptor::DeoptimizationSupport can_deoptimize =
CallDescriptor::kCannotDeoptimize);
CallDescriptor::kNoDeoptimization);
static CallDescriptor* GetStubCallDescriptor(
CodeStubInterfaceDescriptor* descriptor, int stack_parameter_count,
CallDescriptor::DeoptimizationSupport can_deoptimize, Zone* zone);
......
......@@ -29,10 +29,14 @@ inline int NodeProperties::FirstContextIndex(Node* node) {
return PastValueIndex(node);
}
inline int NodeProperties::FirstEffectIndex(Node* node) {
inline int NodeProperties::FirstFrameStateIndex(Node* node) {
return PastContextIndex(node);
}
inline int NodeProperties::FirstEffectIndex(Node* node) {
return PastFrameStateIndex(node);
}
inline int NodeProperties::FirstControlIndex(Node* node) {
return PastEffectIndex(node);
}
......@@ -48,6 +52,11 @@ inline int NodeProperties::PastContextIndex(Node* node) {
OperatorProperties::GetContextInputCount(node->op());
}
inline int NodeProperties::PastFrameStateIndex(Node* node) {
return FirstFrameStateIndex(node) +
OperatorProperties::GetFrameStateInputCount(node->op());
}
inline int NodeProperties::PastEffectIndex(Node* node) {
return FirstEffectIndex(node) +
OperatorProperties::GetEffectInputCount(node->op());
......@@ -73,6 +82,11 @@ inline Node* NodeProperties::GetContextInput(Node* node) {
return node->InputAt(FirstContextIndex(node));
}
inline Node* NodeProperties::GetFrameStateInput(Node* node) {
DCHECK(OperatorProperties::HasFrameStateInput(node->op()));
return node->InputAt(FirstFrameStateIndex(node));
}
inline Node* NodeProperties::GetEffectInput(Node* node, int index) {
DCHECK(0 <= index &&
index < OperatorProperties::GetEffectInputCount(node->op()));
......@@ -85,6 +99,10 @@ inline Node* NodeProperties::GetControlInput(Node* node, int index) {
return node->InputAt(FirstControlIndex(node) + index);
}
inline int NodeProperties::GetFrameStateIndex(Node* node) {
DCHECK(OperatorProperties::HasFrameStateInput(node->op()));
return FirstFrameStateIndex(node);
}
// -----------------------------------------------------------------------------
// Edge kinds.
......
......@@ -19,9 +19,12 @@ class NodeProperties {
public:
static inline Node* GetValueInput(Node* node, int index);
static inline Node* GetContextInput(Node* node);
static inline Node* GetFrameStateInput(Node* node);
static inline Node* GetEffectInput(Node* node, int index = 0);
static inline Node* GetControlInput(Node* node, int index = 0);
static inline int GetFrameStateIndex(Node* node);
static inline bool IsValueEdge(Node::Edge edge);
static inline bool IsContextEdge(Node::Edge edge);
static inline bool IsEffectEdge(Node::Edge edge);
......@@ -42,10 +45,12 @@ class NodeProperties {
private:
static inline int FirstValueIndex(Node* node);
static inline int FirstContextIndex(Node* node);
static inline int FirstFrameStateIndex(Node* node);
static inline int FirstEffectIndex(Node* node);
static inline int FirstControlIndex(Node* node);
static inline int PastValueIndex(Node* node);
static inline int PastContextIndex(Node* node);
static inline int PastFrameStateIndex(Node* node);
static inline int PastEffectIndex(Node* node);
static inline int PastControlIndex(Node* node);
......
......@@ -10,15 +10,11 @@ namespace v8 {
namespace internal {
namespace compiler {
void Node::CollectProjections(int projection_count, Node** projections) {
for (int i = 0; i < projection_count; ++i) projections[i] = NULL;
void Node::CollectProjections(NodeVector* projections) {
for (UseIter i = uses().begin(); i != uses().end(); ++i) {
if ((*i)->opcode() != IrOpcode::kProjection) continue;
int32_t index = OpParameter<int32_t>(*i);
DCHECK_GE(index, 0);
DCHECK_LT(index, projection_count);
DCHECK_EQ(NULL, projections[index]);
projections[index] = *i;
DCHECK_GE(OpParameter<int32_t>(*i), 0);
projections->push_back(*i);
}
}
......
......@@ -54,7 +54,8 @@ class Node : public GenericNode<NodeData, Node> {
void Initialize(Operator* op) { set_op(op); }
void CollectProjections(int projection_count, Node** projections);
void CollectProjections(
std::vector<Node*, zone_allocator<Node*> >* projections);
Node* FindProjection(int32_t projection_index);
};
......
......@@ -31,6 +31,33 @@ inline bool OperatorProperties::HasControlInput(Operator* op) {
return OperatorProperties::GetControlInputCount(op) > 0;
}
inline bool OperatorProperties::HasFrameStateInput(Operator* op) {
if (!FLAG_turbo_deoptimization) {
return false;
}
switch (op->opcode()) {
case IrOpcode::kJSCallFunction:
return true;
case IrOpcode::kJSCallRuntime: {
Runtime::FunctionId function =
reinterpret_cast<Operator1<Runtime::FunctionId>*>(op)->parameter();
// TODO(jarin) At the moment, we only add frame state for
// few chosen runtime functions.
switch (function) {
case Runtime::kDebugBreak:
case Runtime::kDeoptimizeFunction:
return true;
default:
return false;
}
UNREACHABLE();
}
default:
return false;
}
}
inline int OperatorProperties::GetValueInputCount(Operator* op) {
return op->InputCount();
......@@ -40,6 +67,10 @@ inline int OperatorProperties::GetContextInputCount(Operator* op) {
return OperatorProperties::HasContextInput(op) ? 1 : 0;
}
inline int OperatorProperties::GetFrameStateInputCount(Operator* op) {
return OperatorProperties::HasFrameStateInput(op) ? 1 : 0;
}
inline int OperatorProperties::GetEffectInputCount(Operator* op) {
if (op->opcode() == IrOpcode::kEffectPhi ||
op->opcode() == IrOpcode::kFinish) {
......@@ -77,7 +108,8 @@ inline int OperatorProperties::GetControlInputCount(Operator* op) {
inline int OperatorProperties::GetTotalInputCount(Operator* op) {
return GetValueInputCount(op) + GetContextInputCount(op) +
GetEffectInputCount(op) + GetControlInputCount(op);
GetFrameStateInputCount(op) + GetEffectInputCount(op) +
GetControlInputCount(op);
}
// -----------------------------------------------------------------------------
......@@ -142,8 +174,15 @@ inline bool OperatorProperties::CanLazilyDeoptimize(Operator* op) {
Runtime::FunctionId function =
reinterpret_cast<Operator1<Runtime::FunctionId>*>(op)->parameter();
// TODO(jarin) At the moment, we only support lazy deoptimization for
// the %DeoptimizeFunction runtime function.
return function == Runtime::kDeoptimizeFunction;
// a few chosen runtime functions.
switch (function) {
case Runtime::kDebugBreak:
case Runtime::kDeoptimizeFunction:
return true;
default:
return false;
}
UNREACHABLE();
}
// JS function calls
......
......@@ -19,11 +19,13 @@ class OperatorProperties {
static inline bool HasContextInput(Operator* node);
static inline bool HasEffectInput(Operator* node);
static inline bool HasControlInput(Operator* node);
static inline bool HasFrameStateInput(Operator* node);
static inline int GetValueInputCount(Operator* op);
static inline int GetContextInputCount(Operator* op);
static inline int GetEffectInputCount(Operator* op);
static inline int GetControlInputCount(Operator* op);
static inline int GetFrameStateInputCount(Operator* op);
static inline int GetTotalInputCount(Operator* op);
static inline bool HasValueOutput(Operator* op);
......
......@@ -97,9 +97,9 @@ Node* RawMachineAssembler::CallJS0(Node* function, Node* receiver,
Node* RawMachineAssembler::CallRuntime1(Runtime::FunctionId function,
Node* arg0, Label* continuation,
Label* deoptimization) {
CallDescriptor* descriptor =
Linkage::GetRuntimeCallDescriptor(function, 1, Operator::kNoProperties,
CallDescriptor::kCanDeoptimize, zone());
CallDescriptor* descriptor = Linkage::GetRuntimeCallDescriptor(
function, 1, Operator::kNoProperties, CallDescriptor::kLazyDeoptimization,
zone());
Node* centry = HeapConstant(CEntryStub(isolate(), 1).GetCode());
Node* ref = NewNode(
......
......@@ -58,11 +58,14 @@ class Verifier::Visitor : public NullNodeVisitor {
GenericGraphVisit::Control Verifier::Visitor::Pre(Node* node) {
int value_count = OperatorProperties::GetValueInputCount(node->op());
int context_count = OperatorProperties::GetContextInputCount(node->op());
int frame_state_count =
OperatorProperties::GetFrameStateInputCount(node->op());
int effect_count = OperatorProperties::GetEffectInputCount(node->op());
int control_count = OperatorProperties::GetControlInputCount(node->op());
// Verify number of inputs matches up.
int input_count = value_count + context_count + effect_count + control_count;
int input_count = value_count + context_count + frame_state_count +
effect_count + control_count;
CHECK_EQ(input_count, node->InputCount());
// Verify all value inputs actually produce a value.
......
......@@ -215,7 +215,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
break;
case kArchDeoptimize: {
int deoptimization_id = MiscField::decode(instr->opcode());
BuildTranslation(instr, deoptimization_id);
BuildTranslation(instr, 0, deoptimization_id);
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
isolate(), deoptimization_id, Deoptimizer::LAZY);
......@@ -418,12 +418,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
int entry = Code::kHeaderSize - kHeapObjectTag;
__ Call(Operand(reg, entry));
}
RecordSafepoint(instr->pointer_map(), Safepoint::kSimple, 0,
Safepoint::kNoLazyDeopt);
bool lazy_deopt = (MiscField::decode(instr->opcode()) == 1);
if (lazy_deopt) {
RecordLazyDeoptimizationEntry(instr);
}
AddSafepointAndDeopt(instr);
AddNopForSmiCodeInlining();
break;
}
......@@ -448,9 +445,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ movp(rsi, FieldOperand(func, JSFunction::kContextOffset));
__ Call(FieldOperand(func, JSFunction::kCodeEntryOffset));
RecordSafepoint(instr->pointer_map(), Safepoint::kSimple, 0,
Safepoint::kNoLazyDeopt);
RecordLazyDeoptimizationEntry(instr);
AddSafepointAndDeopt(instr);
break;
}
case kSSEFloat64Cmp: {
......
......@@ -676,7 +676,14 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
BasicBlock* deoptimization) {
X64OperandGenerator g(this);
CallDescriptor* descriptor = OpParameter<CallDescriptor*>(call);
CallBuffer buffer(zone(), descriptor); // TODO(turbofan): temp zone here?
FrameStateDescriptor* frame_state_descriptor = NULL;
if (descriptor->NeedsFrameState()) {
frame_state_descriptor =
GetFrameStateDescriptor(call->InputAt(descriptor->InputCount()));
}
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
// Compute InstructionOperands for inputs and outputs.
InitializeCallBuffer(call, &buffer, true, true, continuation, deoptimization);
......@@ -684,13 +691,13 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
// TODO(dcarney): stack alignment for c calls.
// TODO(dcarney): shadow space on window for c calls.
// Push any stack arguments.
for (int i = buffer.pushed_count - 1; i >= 0; --i) {
Node* input = buffer.pushed_nodes[i];
for (NodeVectorRIter input = buffer.pushed_nodes.rbegin();
input != buffer.pushed_nodes.rend(); input++) {
// TODO(titzer): handle pushing double parameters.
if (g.CanBeImmediate(input)) {
Emit(kX64PushI, NULL, g.UseImmediate(input));
if (g.CanBeImmediate(*input)) {
Emit(kX64PushI, NULL, g.UseImmediate(*input));
} else {
Emit(kX64Push, NULL, g.Use(input));
Emit(kX64Push, NULL, g.Use(*input));
}
}
......@@ -698,8 +705,7 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
InstructionCode opcode;
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject: {
bool lazy_deopt = descriptor->CanLazilyDeoptimize();
opcode = kX64CallCodeObject | MiscField::encode(lazy_deopt ? 1 : 0);
opcode = kX64CallCodeObject;
break;
}
case CallDescriptor::kCallAddress:
......@@ -712,11 +718,12 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
UNREACHABLE();
return;
}
opcode |= MiscField::encode(descriptor->deoptimization_support());
// Emit the call instruction.
Instruction* call_instr =
Emit(opcode, buffer.output_count, buffer.outputs,
buffer.fixed_and_control_count(), buffer.fixed_and_control_args);
Emit(opcode, buffer.outputs.size(), &buffer.outputs.front(),
buffer.instruction_args.size(), &buffer.instruction_args.front());
call_instr->MarkAsCall();
if (deoptimization != NULL) {
......@@ -726,9 +733,11 @@ void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
// Caller clean up of stack for C-style calls.
if (descriptor->kind() == CallDescriptor::kCallAddress &&
buffer.pushed_count > 0) {
!buffer.pushed_nodes.empty()) {
DCHECK(deoptimization == NULL && continuation == NULL);
Emit(kPopStack | MiscField::encode(buffer.pushed_count), NULL);
Emit(kPopStack |
MiscField::encode(static_cast<int>(buffer.pushed_nodes.size())),
NULL);
}
}
......
......@@ -1472,6 +1472,8 @@ void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
__ DebugBreak();
// Ignore the return value.
PrepareForBailoutForId(stmt->DebugBreakId(), NO_REGISTERS);
}
......
......@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug
// Flags: --expose-debug-as debug --turbo-deoptimization
// Get the Debug object exposed from the debug context global object.
Debug = debug.Debug
......
......@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug
// Flags: --expose-debug-as debug --turbo-deoptimization
// Get the Debug object exposed from the debug context global object.
Debug = debug.Debug;
......
......@@ -96,7 +96,6 @@
# Support for %GetFrameDetails is missing and requires checkpoints.
'debug-backtrace-text': [PASS, NO_VARIANTS],
'debug-break-inline': [PASS, NO_VARIANTS],
'debug-evaluate-arguments': [PASS, NO_VARIANTS],
'debug-evaluate-bool-constructor': [PASS, NO_VARIANTS],
'debug-evaluate-closure': [PASS, NO_VARIANTS],
'debug-evaluate-const': [PASS, NO_VARIANTS],
......@@ -107,7 +106,6 @@
'debug-evaluate-with': [PASS, NO_VARIANTS],
'debug-liveedit-double-call': [PASS, NO_VARIANTS],
'debug-liveedit-restart-frame': [PASS, NO_VARIANTS],
'debug-receiver': [PASS, NO_VARIANTS],
'debug-return-value': [PASS, NO_VARIANTS],
'debug-scopes': [PASS, NO_VARIANTS],
'debug-set-variable-value': [PASS, NO_VARIANTS],
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment