Commit 1d0f872e authored by erik.corry@gmail.com's avatar erik.corry@gmail.com

Fix full code generator to not use --debug-code if it is in

mksnapshot or a VM that is booted from a snapshot.  --debug-code
can still have an effect on stub and optimized code and it still
works on the full code generator when running without snapshots.

The deoptimizer generates full-code-generator code and relies on it having
the same layout as last time.  This means that the code the full code
generator makes for the snapshot should be the same as the code it makes
later.  This change makes the full code generator create more consistent
code between mksnapshot time and run time.

This is a bug fix and a step towards making the snapshot code more robust.
Review URL: https://chromiumcodereview.appspot.com/10834085

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12239 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent cd95464d
...@@ -648,6 +648,9 @@ class Assembler : public AssemblerBase { ...@@ -648,6 +648,9 @@ class Assembler : public AssemblerBase {
// Overrides the default provided by FLAG_debug_code. // Overrides the default provided by FLAG_debug_code.
void set_emit_debug_code(bool value) { emit_debug_code_ = value; } void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
// Dummy for cross platform compatibility.
void set_predictable_code_size(bool value) { }
// GetCode emits any pending (non-emitted) code and fills the descriptor // GetCode emits any pending (non-emitted) code and fills the descriptor
// desc. GetCode() is idempotent; it returns the same result if no other // desc. GetCode() is idempotent; it returns the same result if no other
// Assembler functions are invoked in between GetCode() calls. // Assembler functions are invoked in between GetCode() calls.
......
...@@ -785,7 +785,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { ...@@ -785,7 +785,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
// The variable in the declaration always resides in the current function // The variable in the declaration always resides in the current function
// context. // context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
if (FLAG_debug_code) { if (generate_debug_code_) {
// Check that we're not inside a with or catch context. // Check that we're not inside a with or catch context.
__ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
__ CompareRoot(r1, Heap::kWithContextMapRootIndex); __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
...@@ -2147,7 +2147,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, ...@@ -2147,7 +2147,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// in harmony mode. // in harmony mode.
if (var->IsStackAllocated() || var->IsContextSlot()) { if (var->IsStackAllocated() || var->IsContextSlot()) {
MemOperand location = VarOperand(var, r1); MemOperand location = VarOperand(var, r1);
if (FLAG_debug_code && op == Token::INIT_LET) { if (generate_debug_code_ && op == Token::INIT_LET) {
// Check for an uninitialized let binding. // Check for an uninitialized let binding.
__ ldr(r2, location); __ ldr(r2, location);
__ CompareRoot(r2, Heap::kTheHoleValueRootIndex); __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
...@@ -2711,7 +2711,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( ...@@ -2711,7 +2711,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
context()->PrepareTest(&materialize_true, &materialize_false, context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through); &if_true, &if_false, &fall_through);
if (FLAG_debug_code) __ AbortIfSmi(r0); if (generate_debug_code_) __ AbortIfSmi(r0);
__ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset)); __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
...@@ -3575,7 +3575,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { ...@@ -3575,7 +3575,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
ASSERT(args->length() == 1); ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0)); VisitForAccumulatorValue(args->at(0));
if (FLAG_debug_code) { if (generate_debug_code_) {
__ AbortIfNotString(r0); __ AbortIfNotString(r0);
} }
...@@ -3649,7 +3649,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { ...@@ -3649,7 +3649,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// string_length: Accumulated sum of string lengths (smi). // string_length: Accumulated sum of string lengths (smi).
// element: Current array element. // element: Current array element.
// elements_end: Array end. // elements_end: Array end.
if (FLAG_debug_code) { if (generate_debug_code_) {
__ cmp(array_length, Operand(0)); __ cmp(array_length, Operand(0));
__ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin"); __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
} }
......
...@@ -699,7 +699,7 @@ static bool InstallFullCode(CompilationInfo* info) { ...@@ -699,7 +699,7 @@ static bool InstallFullCode(CompilationInfo* info) {
shared->set_dont_inline(lit->flags()->Contains(kDontInline)); shared->set_dont_inline(lit->flags()->Contains(kDontInline));
shared->set_ast_node_count(lit->ast_node_count()); shared->set_ast_node_count(lit->ast_node_count());
if (V8::UseCrankshaft()&& if (V8::UseCrankshaft() &&
!function.is_null() && !function.is_null() &&
!shared->optimization_disabled()) { !shared->optimization_disabled()) {
// If we're asked to always optimize, we compile the optimized // If we're asked to always optimize, we compile the optimized
......
...@@ -36,6 +36,7 @@ ...@@ -36,6 +36,7 @@
#include "prettyprinter.h" #include "prettyprinter.h"
#include "scopes.h" #include "scopes.h"
#include "scopeinfo.h" #include "scopeinfo.h"
#include "snapshot.h"
#include "stub-cache.h" #include "stub-cache.h"
namespace v8 { namespace v8 {
...@@ -382,6 +383,20 @@ void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) { ...@@ -382,6 +383,20 @@ void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
} }
void FullCodeGenerator::Initialize() {
// The generation of debug code must match between the snapshot code and the
// code that is generated later. This is assumed by the debugger when it is
// calculating PC offsets after generating a debug version of code. Therefore
// we disable the production of debug code in the full compiler if we are
// either generating a snapshot or we booted from a snapshot.
generate_debug_code_ = FLAG_debug_code &&
!Serializer::enabled() &&
!Snapshot::HaveASnapshotToStartFrom();
masm_->set_emit_debug_code(generate_debug_code_);
masm_->set_predictable_code_size(true);
}
void FullCodeGenerator::PopulateTypeFeedbackCells(Handle<Code> code) { void FullCodeGenerator::PopulateTypeFeedbackCells(Handle<Code> code) {
if (type_feedback_cells_.is_empty()) return; if (type_feedback_cells_.is_empty()) return;
int length = type_feedback_cells_.length(); int length = type_feedback_cells_.length();
......
...@@ -93,7 +93,11 @@ class FullCodeGenerator: public AstVisitor { ...@@ -93,7 +93,11 @@ class FullCodeGenerator: public AstVisitor {
? info->function()->ast_node_count() : 0, ? info->function()->ast_node_count() : 0,
info->zone()), info->zone()),
ic_total_count_(0), ic_total_count_(0),
zone_(info->zone()) { } zone_(info->zone()) {
Initialize();
}
void Initialize();
static bool MakeCode(CompilationInfo* info); static bool MakeCode(CompilationInfo* info);
...@@ -806,6 +810,7 @@ class FullCodeGenerator: public AstVisitor { ...@@ -806,6 +810,7 @@ class FullCodeGenerator: public AstVisitor {
int ic_total_count_; int ic_total_count_;
Handle<FixedArray> handler_table_; Handle<FixedArray> handler_table_;
Handle<JSGlobalPropertyCell> profiling_counter_; Handle<JSGlobalPropertyCell> profiling_counter_;
bool generate_debug_code_;
Zone* zone_; Zone* zone_;
friend class NestedStatement; friend class NestedStatement;
......
...@@ -587,6 +587,11 @@ class Assembler : public AssemblerBase { ...@@ -587,6 +587,11 @@ class Assembler : public AssemblerBase {
// Overrides the default provided by FLAG_debug_code. // Overrides the default provided by FLAG_debug_code.
void set_emit_debug_code(bool value) { emit_debug_code_ = value; } void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
// Avoids using instructions that vary in size in unpredictable ways between
// the snapshot and the running VM. This is needed by the full compiler so
// that it can recompile code with debug support and fix the PC.
void set_predictable_code_size(bool value) { predictable_code_size_ = value; }
// GetCode emits any pending (non-emitted) code and fills the descriptor // GetCode emits any pending (non-emitted) code and fills the descriptor
// desc. GetCode() is idempotent; it returns the same result if no other // desc. GetCode() is idempotent; it returns the same result if no other
// Assembler functions are invoked in between GetCode() calls. // Assembler functions are invoked in between GetCode() calls.
...@@ -1111,6 +1116,7 @@ class Assembler : public AssemblerBase { ...@@ -1111,6 +1116,7 @@ class Assembler : public AssemblerBase {
protected: protected:
bool emit_debug_code() const { return emit_debug_code_; } bool emit_debug_code() const { return emit_debug_code_; }
bool predictable_code_size() const { return predictable_code_size_ ; }
void movsd(XMMRegister dst, const Operand& src); void movsd(XMMRegister dst, const Operand& src);
void movsd(const Operand& dst, XMMRegister src); void movsd(const Operand& dst, XMMRegister src);
...@@ -1186,6 +1192,7 @@ class Assembler : public AssemblerBase { ...@@ -1186,6 +1192,7 @@ class Assembler : public AssemblerBase {
PositionsRecorder positions_recorder_; PositionsRecorder positions_recorder_;
bool emit_debug_code_; bool emit_debug_code_;
bool predictable_code_size_;
friend class PositionsRecorder; friend class PositionsRecorder;
}; };
......
...@@ -754,7 +754,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { ...@@ -754,7 +754,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
// The variable in the declaration always resides in the current function // The variable in the declaration always resides in the current function
// context. // context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
if (FLAG_debug_code) { if (generate_debug_code_) {
// Check that we're not inside a with or catch context. // Check that we're not inside a with or catch context.
__ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset)); __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
__ cmp(ebx, isolate()->factory()->with_context_map()); __ cmp(ebx, isolate()->factory()->with_context_map());
...@@ -2091,7 +2091,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, ...@@ -2091,7 +2091,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// in harmony mode. // in harmony mode.
if (var->IsStackAllocated() || var->IsContextSlot()) { if (var->IsStackAllocated() || var->IsContextSlot()) {
MemOperand location = VarOperand(var, ecx); MemOperand location = VarOperand(var, ecx);
if (FLAG_debug_code && op == Token::INIT_LET) { if (generate_debug_code_ && op == Token::INIT_LET) {
// Check for an uninitialized let binding. // Check for an uninitialized let binding.
__ mov(edx, location); __ mov(edx, location);
__ cmp(edx, isolate()->factory()->the_hole_value()); __ cmp(edx, isolate()->factory()->the_hole_value());
...@@ -2640,7 +2640,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( ...@@ -2640,7 +2640,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
context()->PrepareTest(&materialize_true, &materialize_false, context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through); &if_true, &if_false, &fall_through);
if (FLAG_debug_code) __ AbortIfSmi(eax); if (generate_debug_code_) __ AbortIfSmi(eax);
// Check whether this map has already been checked to be safe for default // Check whether this map has already been checked to be safe for default
// valueOf. // valueOf.
...@@ -2865,7 +2865,7 @@ void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { ...@@ -2865,7 +2865,7 @@ void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
__ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset)); __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ bind(&exit); __ bind(&exit);
if (FLAG_debug_code) __ AbortIfNotSmi(eax); if (generate_debug_code_) __ AbortIfNotSmi(eax);
context()->Plug(eax); context()->Plug(eax);
} }
...@@ -3485,7 +3485,7 @@ void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { ...@@ -3485,7 +3485,7 @@ void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
VisitForAccumulatorValue(args->at(0)); VisitForAccumulatorValue(args->at(0));
if (FLAG_debug_code) { if (generate_debug_code_) {
__ AbortIfNotString(eax); __ AbortIfNotString(eax);
} }
...@@ -3510,7 +3510,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { ...@@ -3510,7 +3510,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
ASSERT(args->length() == 1); ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0)); VisitForAccumulatorValue(args->at(0));
if (FLAG_debug_code) { if (generate_debug_code_) {
__ AbortIfNotString(eax); __ AbortIfNotString(eax);
} }
...@@ -3586,7 +3586,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { ...@@ -3586,7 +3586,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// Loop condition: while (index < length). // Loop condition: while (index < length).
// Live loop registers: index, array_length, string, // Live loop registers: index, array_length, string,
// scratch, string_length, elements. // scratch, string_length, elements.
if (FLAG_debug_code) { if (generate_debug_code_) {
__ cmp(index, array_length); __ cmp(index, array_length);
__ Assert(less, "No empty arrays here in EmitFastAsciiArrayJoin"); __ Assert(less, "No empty arrays here in EmitFastAsciiArrayJoin");
} }
......
...@@ -85,7 +85,7 @@ void MacroAssembler::RememberedSetHelper( ...@@ -85,7 +85,7 @@ void MacroAssembler::RememberedSetHelper(
SaveFPRegsMode save_fp, SaveFPRegsMode save_fp,
MacroAssembler::RememberedSetFinalAction and_then) { MacroAssembler::RememberedSetFinalAction and_then) {
Label done; Label done;
if (FLAG_debug_code) { if (emit_debug_code()) {
Label ok; Label ok;
JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear); JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
int3(); int3();
...@@ -317,7 +317,7 @@ void MacroAssembler::RecordWrite(Register object, ...@@ -317,7 +317,7 @@ void MacroAssembler::RecordWrite(Register object,
return; return;
} }
if (FLAG_debug_code) { if (emit_debug_code()) {
Label ok; Label ok;
cmp(value, Operand(address, 0)); cmp(value, Operand(address, 0));
j(equal, &ok, Label::kNear); j(equal, &ok, Label::kNear);
...@@ -2793,7 +2793,7 @@ void MacroAssembler::EnsureNotWhite( ...@@ -2793,7 +2793,7 @@ void MacroAssembler::EnsureNotWhite(
test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize)); test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
j(not_zero, &done, Label::kNear); j(not_zero, &done, Label::kNear);
if (FLAG_debug_code) { if (emit_debug_code()) {
// Check for impossible bit pattern. // Check for impossible bit pattern.
Label ok; Label ok;
push(mask_scratch); push(mask_scratch);
...@@ -2868,7 +2868,7 @@ void MacroAssembler::EnsureNotWhite( ...@@ -2868,7 +2868,7 @@ void MacroAssembler::EnsureNotWhite(
and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask)); and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
length); length);
if (FLAG_debug_code) { if (emit_debug_code()) {
mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset)); mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset)); cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
Check(less_equal, "Live Bytes Count overflow chunk size"); Check(less_equal, "Live Bytes Count overflow chunk size");
......
...@@ -525,6 +525,9 @@ class Assembler : public AssemblerBase { ...@@ -525,6 +525,9 @@ class Assembler : public AssemblerBase {
// Overrides the default provided by FLAG_debug_code. // Overrides the default provided by FLAG_debug_code.
void set_emit_debug_code(bool value) { emit_debug_code_ = value; } void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
// Dummy for cross platform compatibility.
void set_predictable_code_size(bool value) { }
// GetCode emits any pending (non-emitted) code and fills the descriptor // GetCode emits any pending (non-emitted) code and fills the descriptor
// desc. GetCode() is idempotent; it returns the same result if no other // desc. GetCode() is idempotent; it returns the same result if no other
// Assembler functions are invoked in between GetCode() calls. // Assembler functions are invoked in between GetCode() calls.
......
...@@ -791,7 +791,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { ...@@ -791,7 +791,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
// The variable in the declaration always resides in the current function // The variable in the declaration always resides in the current function
// context. // context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
if (FLAG_debug_code) { if (generate_debug_code_) {
// Check that we're not inside a with or catch context. // Check that we're not inside a with or catch context.
__ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset)); __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
__ LoadRoot(t0, Heap::kWithContextMapRootIndex); __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
...@@ -2164,7 +2164,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, ...@@ -2164,7 +2164,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// in harmony mode. // in harmony mode.
if (var->IsStackAllocated() || var->IsContextSlot()) { if (var->IsStackAllocated() || var->IsContextSlot()) {
MemOperand location = VarOperand(var, a1); MemOperand location = VarOperand(var, a1);
if (FLAG_debug_code && op == Token::INIT_LET) { if (generate_debug_code_ && op == Token::INIT_LET) {
// Check for an uninitialized let binding. // Check for an uninitialized let binding.
__ lw(a2, location); __ lw(a2, location);
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex); __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
...@@ -2734,7 +2734,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( ...@@ -2734,7 +2734,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
context()->PrepareTest(&materialize_true, &materialize_false, context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through); &if_true, &if_false, &fall_through);
if (FLAG_debug_code) __ AbortIfSmi(v0); if (generate_debug_code_) __ AbortIfSmi(v0);
__ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
__ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset)); __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset));
...@@ -3609,7 +3609,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { ...@@ -3609,7 +3609,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
ASSERT(args->length() == 1); ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0)); VisitForAccumulatorValue(args->at(0));
if (FLAG_debug_code) { if (generate_debug_code_) {
__ AbortIfNotString(v0); __ AbortIfNotString(v0);
} }
...@@ -3685,7 +3685,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { ...@@ -3685,7 +3685,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// string_length: Accumulated sum of string lengths (smi). // string_length: Accumulated sum of string lengths (smi).
// element: Current array element. // element: Current array element.
// elements_end: Array end. // elements_end: Array end.
if (FLAG_debug_code) { if (generate_debug_code_) {
__ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin", __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin",
array_length, Operand(zero_reg)); array_length, Operand(zero_reg));
} }
......
...@@ -511,6 +511,18 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { ...@@ -511,6 +511,18 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
UNCLASSIFIED, UNCLASSIFIED,
47, 47,
"date_cache_stamp"); "date_cache_stamp");
Add(ExternalReference::address_of_pending_message_obj(isolate).address(),
UNCLASSIFIED,
48,
"address_of_pending_message_obj");
Add(ExternalReference::address_of_has_pending_message(isolate).address(),
UNCLASSIFIED,
49,
"address_of_has_pending_message");
Add(ExternalReference::address_of_pending_message_script(isolate).address(),
UNCLASSIFIED,
50,
"pending_message_script");
} }
......
...@@ -350,7 +350,8 @@ Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size) ...@@ -350,7 +350,8 @@ Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
: AssemblerBase(arg_isolate), : AssemblerBase(arg_isolate),
code_targets_(100), code_targets_(100),
positions_recorder_(this), positions_recorder_(this),
emit_debug_code_(FLAG_debug_code) { emit_debug_code_(FLAG_debug_code),
predictable_code_size_(false) {
if (buffer == NULL) { if (buffer == NULL) {
// Do our own buffer management. // Do our own buffer management.
if (buffer_size <= kMinimalBufferSize) { if (buffer_size <= kMinimalBufferSize) {
...@@ -1234,7 +1235,16 @@ void Assembler::j(Condition cc, Label* L, Label::Distance distance) { ...@@ -1234,7 +1235,16 @@ void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
const int long_size = 6; const int long_size = 6;
int offs = L->pos() - pc_offset(); int offs = L->pos() - pc_offset();
ASSERT(offs <= 0); ASSERT(offs <= 0);
if (is_int8(offs - short_size)) { // Determine whether we can use 1-byte offsets for backwards branches,
// which have a max range of 128 bytes.
// We also need to check the predictable_code_size_ flag here, because
// on x64, when the full code generator recompiles code for debugging, some
// places need to be padded out to a certain size. The debugger is keeping
// track of how often it did this so that it can adjust return addresses on
// the stack, but if the size of jump instructions can also change, that's
// not enough and the calculated offsets would be incorrect.
if (is_int8(offs - short_size) && !predictable_code_size_) {
// 0111 tttn #8-bit disp. // 0111 tttn #8-bit disp.
emit(0x70 | cc); emit(0x70 | cc);
emit((offs - short_size) & 0xFF); emit((offs - short_size) & 0xFF);
...@@ -1291,7 +1301,7 @@ void Assembler::jmp(Label* L, Label::Distance distance) { ...@@ -1291,7 +1301,7 @@ void Assembler::jmp(Label* L, Label::Distance distance) {
if (L->is_bound()) { if (L->is_bound()) {
int offs = L->pos() - pc_offset() - 1; int offs = L->pos() - pc_offset() - 1;
ASSERT(offs <= 0); ASSERT(offs <= 0);
if (is_int8(offs - short_size)) { if (is_int8(offs - short_size) && !predictable_code_size_) {
// 1110 1011 #8-bit disp. // 1110 1011 #8-bit disp.
emit(0xEB); emit(0xEB);
emit((offs - short_size) & 0xFF); emit((offs - short_size) & 0xFF);
......
...@@ -561,6 +561,11 @@ class Assembler : public AssemblerBase { ...@@ -561,6 +561,11 @@ class Assembler : public AssemblerBase {
// Overrides the default provided by FLAG_debug_code. // Overrides the default provided by FLAG_debug_code.
void set_emit_debug_code(bool value) { emit_debug_code_ = value; } void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
// Avoids using instructions that vary in size in unpredictable ways between
// the snapshot and the running VM. This is needed by the full compiler so
// that it can recompile code with debug support and fix the PC.
void set_predictable_code_size(bool value) { predictable_code_size_ = value; }
// GetCode emits any pending (non-emitted) code and fills the descriptor // GetCode emits any pending (non-emitted) code and fills the descriptor
// desc. GetCode() is idempotent; it returns the same result if no other // desc. GetCode() is idempotent; it returns the same result if no other
// Assembler functions are invoked in between GetCode() calls. // Assembler functions are invoked in between GetCode() calls.
...@@ -1433,6 +1438,7 @@ class Assembler : public AssemblerBase { ...@@ -1433,6 +1438,7 @@ class Assembler : public AssemblerBase {
protected: protected:
bool emit_debug_code() const { return emit_debug_code_; } bool emit_debug_code() const { return emit_debug_code_; }
bool predictable_code_size() const { return predictable_code_size_; }
private: private:
byte* addr_at(int pos) { return buffer_ + pos; } byte* addr_at(int pos) { return buffer_ + pos; }
...@@ -1637,6 +1643,7 @@ class Assembler : public AssemblerBase { ...@@ -1637,6 +1643,7 @@ class Assembler : public AssemblerBase {
PositionsRecorder positions_recorder_; PositionsRecorder positions_recorder_;
bool emit_debug_code_; bool emit_debug_code_;
bool predictable_code_size_;
friend class PositionsRecorder; friend class PositionsRecorder;
}; };
......
...@@ -759,7 +759,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { ...@@ -759,7 +759,7 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
// The variable in the declaration always resides in the current function // The variable in the declaration always resides in the current function
// context. // context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
if (FLAG_debug_code) { if (generate_debug_code_) {
// Check that we're not inside a with or catch context. // Check that we're not inside a with or catch context.
__ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset)); __ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
__ CompareRoot(rbx, Heap::kWithContextMapRootIndex); __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
...@@ -2072,7 +2072,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, ...@@ -2072,7 +2072,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// in harmony mode. // in harmony mode.
if (var->IsStackAllocated() || var->IsContextSlot()) { if (var->IsStackAllocated() || var->IsContextSlot()) {
MemOperand location = VarOperand(var, rcx); MemOperand location = VarOperand(var, rcx);
if (FLAG_debug_code && op == Token::INIT_LET) { if (generate_debug_code_ && op == Token::INIT_LET) {
// Check for an uninitialized let binding. // Check for an uninitialized let binding.
__ movq(rdx, location); __ movq(rdx, location);
__ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
...@@ -2612,7 +2612,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( ...@@ -2612,7 +2612,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
context()->PrepareTest(&materialize_true, &materialize_false, context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through); &if_true, &if_false, &fall_through);
if (FLAG_debug_code) __ AbortIfSmi(rax); if (generate_debug_code_) __ AbortIfSmi(rax);
// Check whether this map has already been checked to be safe for default // Check whether this map has already been checked to be safe for default
// valueOf. // valueOf.
...@@ -2836,7 +2836,7 @@ void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { ...@@ -2836,7 +2836,7 @@ void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
__ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); __ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ bind(&exit); __ bind(&exit);
if (FLAG_debug_code) __ AbortIfNotSmi(rax); if (generate_debug_code_) __ AbortIfNotSmi(rax);
context()->Plug(rax); context()->Plug(rax);
} }
...@@ -3480,7 +3480,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { ...@@ -3480,7 +3480,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
ASSERT(args->length() == 1); ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0)); VisitForAccumulatorValue(args->at(0));
if (FLAG_debug_code) { if (generate_debug_code_) {
__ AbortIfNotString(rax); __ AbortIfNotString(rax);
} }
...@@ -3560,7 +3560,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { ...@@ -3560,7 +3560,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// Loop condition: while (index < array_length). // Loop condition: while (index < array_length).
// Live loop registers: index(int32), array_length(int32), string(String*), // Live loop registers: index(int32), array_length(int32), string(String*),
// scratch, string_length(int32), elements(FixedArray*). // scratch, string_length(int32), elements(FixedArray*).
if (FLAG_debug_code) { if (generate_debug_code_) {
__ cmpq(index, array_length); __ cmpq(index, array_length);
__ Assert(below, "No empty arrays here in EmitFastAsciiArrayJoin"); __ Assert(below, "No empty arrays here in EmitFastAsciiArrayJoin");
} }
......
...@@ -53,9 +53,17 @@ MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) ...@@ -53,9 +53,17 @@ MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
} }
static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) { static const int kInvalidRootRegisterDelta = -1;
intptr_t MacroAssembler::RootRegisterDelta(ExternalReference other) {
if (predictable_code_size() &&
(other.address() < reinterpret_cast<Address>(isolate()) ||
other.address() >= reinterpret_cast<Address>(isolate() + 1))) {
return kInvalidRootRegisterDelta;
}
Address roots_register_value = kRootRegisterBias + Address roots_register_value = kRootRegisterBias +
reinterpret_cast<Address>(isolate->heap()->roots_array_start()); reinterpret_cast<Address>(isolate()->heap()->roots_array_start());
intptr_t delta = other.address() - roots_register_value; intptr_t delta = other.address() - roots_register_value;
return delta; return delta;
} }
...@@ -64,8 +72,8 @@ static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) { ...@@ -64,8 +72,8 @@ static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
Operand MacroAssembler::ExternalOperand(ExternalReference target, Operand MacroAssembler::ExternalOperand(ExternalReference target,
Register scratch) { Register scratch) {
if (root_array_available_ && !Serializer::enabled()) { if (root_array_available_ && !Serializer::enabled()) {
intptr_t delta = RootRegisterDelta(target, isolate()); intptr_t delta = RootRegisterDelta(target);
if (is_int32(delta)) { if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
Serializer::TooLateToEnableNow(); Serializer::TooLateToEnableNow();
return Operand(kRootRegister, static_cast<int32_t>(delta)); return Operand(kRootRegister, static_cast<int32_t>(delta));
} }
...@@ -77,8 +85,8 @@ Operand MacroAssembler::ExternalOperand(ExternalReference target, ...@@ -77,8 +85,8 @@ Operand MacroAssembler::ExternalOperand(ExternalReference target,
void MacroAssembler::Load(Register destination, ExternalReference source) { void MacroAssembler::Load(Register destination, ExternalReference source) {
if (root_array_available_ && !Serializer::enabled()) { if (root_array_available_ && !Serializer::enabled()) {
intptr_t delta = RootRegisterDelta(source, isolate()); intptr_t delta = RootRegisterDelta(source);
if (is_int32(delta)) { if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
Serializer::TooLateToEnableNow(); Serializer::TooLateToEnableNow();
movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta))); movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
return; return;
...@@ -96,8 +104,8 @@ void MacroAssembler::Load(Register destination, ExternalReference source) { ...@@ -96,8 +104,8 @@ void MacroAssembler::Load(Register destination, ExternalReference source) {
void MacroAssembler::Store(ExternalReference destination, Register source) { void MacroAssembler::Store(ExternalReference destination, Register source) {
if (root_array_available_ && !Serializer::enabled()) { if (root_array_available_ && !Serializer::enabled()) {
intptr_t delta = RootRegisterDelta(destination, isolate()); intptr_t delta = RootRegisterDelta(destination);
if (is_int32(delta)) { if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
Serializer::TooLateToEnableNow(); Serializer::TooLateToEnableNow();
movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source); movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
return; return;
...@@ -116,8 +124,8 @@ void MacroAssembler::Store(ExternalReference destination, Register source) { ...@@ -116,8 +124,8 @@ void MacroAssembler::Store(ExternalReference destination, Register source) {
void MacroAssembler::LoadAddress(Register destination, void MacroAssembler::LoadAddress(Register destination,
ExternalReference source) { ExternalReference source) {
if (root_array_available_ && !Serializer::enabled()) { if (root_array_available_ && !Serializer::enabled()) {
intptr_t delta = RootRegisterDelta(source, isolate()); intptr_t delta = RootRegisterDelta(source);
if (is_int32(delta)) { if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
Serializer::TooLateToEnableNow(); Serializer::TooLateToEnableNow();
lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta))); lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
return; return;
...@@ -133,8 +141,8 @@ int MacroAssembler::LoadAddressSize(ExternalReference source) { ...@@ -133,8 +141,8 @@ int MacroAssembler::LoadAddressSize(ExternalReference source) {
// This calculation depends on the internals of LoadAddress. // This calculation depends on the internals of LoadAddress.
// It's correctness is ensured by the asserts in the Call // It's correctness is ensured by the asserts in the Call
// instruction below. // instruction below.
intptr_t delta = RootRegisterDelta(source, isolate()); intptr_t delta = RootRegisterDelta(source);
if (is_int32(delta)) { if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
Serializer::TooLateToEnableNow(); Serializer::TooLateToEnableNow();
// Operand is lea(scratch, Operand(kRootRegister, delta)); // Operand is lea(scratch, Operand(kRootRegister, delta));
// Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7. // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
...@@ -216,7 +224,7 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests. ...@@ -216,7 +224,7 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
Register scratch, Register scratch,
SaveFPRegsMode save_fp, SaveFPRegsMode save_fp,
RememberedSetFinalAction and_then) { RememberedSetFinalAction and_then) {
if (FLAG_debug_code) { if (emit_debug_code()) {
Label ok; Label ok;
JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear); JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
int3(); int3();
...@@ -397,7 +405,7 @@ void MacroAssembler::RecordWrite(Register object, ...@@ -397,7 +405,7 @@ void MacroAssembler::RecordWrite(Register object,
return; return;
} }
if (FLAG_debug_code) { if (emit_debug_code()) {
Label ok; Label ok;
cmpq(value, Operand(address, 0)); cmpq(value, Operand(address, 0));
j(equal, &ok, Label::kNear); j(equal, &ok, Label::kNear);
...@@ -3992,7 +4000,7 @@ void MacroAssembler::CopyBytes(Register destination, ...@@ -3992,7 +4000,7 @@ void MacroAssembler::CopyBytes(Register destination,
int min_length, int min_length,
Register scratch) { Register scratch) {
ASSERT(min_length >= 0); ASSERT(min_length >= 0);
if (FLAG_debug_code) { if (emit_debug_code()) {
cmpl(length, Immediate(min_length)); cmpl(length, Immediate(min_length));
Assert(greater_equal, "Invalid min_length"); Assert(greater_equal, "Invalid min_length");
} }
...@@ -4369,7 +4377,7 @@ void MacroAssembler::EnsureNotWhite( ...@@ -4369,7 +4377,7 @@ void MacroAssembler::EnsureNotWhite(
testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch); testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
j(not_zero, &done, Label::kNear); j(not_zero, &done, Label::kNear);
if (FLAG_debug_code) { if (emit_debug_code()) {
// Check for impossible bit pattern. // Check for impossible bit pattern.
Label ok; Label ok;
push(mask_scratch); push(mask_scratch);
......
...@@ -1328,6 +1328,8 @@ class MacroAssembler: public Assembler { ...@@ -1328,6 +1328,8 @@ class MacroAssembler: public Assembler {
// modified. It may be the "smi 1 constant" register. // modified. It may be the "smi 1 constant" register.
Register GetSmiConstant(Smi* value); Register GetSmiConstant(Smi* value);
intptr_t RootRegisterDelta(ExternalReference other);
// Moves the smi value to the destination register. // Moves the smi value to the destination register.
void LoadSmiConstant(Register dst, Smi* value); void LoadSmiConstant(Register dst, Smi* value);
......
...@@ -1902,6 +1902,9 @@ void SimulateFullSpace(PagedSpace* space); ...@@ -1902,6 +1902,9 @@ void SimulateFullSpace(PagedSpace* space);
TEST(ReleaseOverReservedPages) { TEST(ReleaseOverReservedPages) {
i::FLAG_trace_gc = true; i::FLAG_trace_gc = true;
// The optimizer can allocate stuff, messing up the test.
i::FLAG_crankshaft = false;
i::FLAG_always_opt = false;
InitializeVM(); InitializeVM();
v8::HandleScope scope; v8::HandleScope scope;
static const int number_of_test_pages = 20; static const int number_of_test_pages = 20;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment