PredictableCodeSizeScope checks the expected size now.

We still have some problems on ARM, so the size check currently
optional. Furthermore, we don't use PredictableCodeSizeScope at all
place where we should do it. Both issues are cleaned up in upcoming
CLs.

Review URL: https://codereview.chromium.org/11348195

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@13037 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 72db2287
...@@ -7549,7 +7549,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { ...@@ -7549,7 +7549,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (entry_hook_ != NULL) { if (entry_hook_ != NULL) {
PredictableCodeSizeScope predictable(masm); PredictableCodeSizeScope predictable(masm, 4 * Assembler::kInstrSize);
ProfileEntryHookStub stub; ProfileEntryHookStub stub;
__ push(lr); __ push(lr);
__ CallStub(&stub); __ CallStub(&stub);
......
...@@ -290,7 +290,7 @@ void FullCodeGenerator::Generate() { ...@@ -290,7 +290,7 @@ void FullCodeGenerator::Generate() {
__ LoadRoot(ip, Heap::kStackLimitRootIndex); __ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip)); __ cmp(sp, Operand(ip));
__ b(hs, &ok); __ b(hs, &ok);
PredictableCodeSizeScope predictable(masm_); PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
StackCheckStub stub; StackCheckStub stub;
__ CallStub(&stub); __ CallStub(&stub);
__ bind(&ok); __ bind(&ok);
...@@ -368,7 +368,7 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt, ...@@ -368,7 +368,7 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
__ LoadRoot(ip, Heap::kStackLimitRootIndex); __ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip)); __ cmp(sp, Operand(ip));
__ b(hs, &ok); __ b(hs, &ok);
PredictableCodeSizeScope predictable(masm_); PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
StackCheckStub stub; StackCheckStub stub;
__ CallStub(&stub); __ CallStub(&stub);
} }
...@@ -442,7 +442,8 @@ void FullCodeGenerator::EmitReturnSequence() { ...@@ -442,7 +442,8 @@ void FullCodeGenerator::EmitReturnSequence() {
// tool from instrumenting as we rely on the code size here. // tool from instrumenting as we rely on the code size here.
int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize; int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
PredictableCodeSizeScope predictable(masm_); // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
PredictableCodeSizeScope predictable(masm_, -1);
__ RecordJSReturn(); __ RecordJSReturn();
masm_->mov(sp, fp); masm_->mov(sp, fp);
masm_->ldm(ia_w, sp, fp.bit() | lr.bit()); masm_->ldm(ia_w, sp, fp.bit() | lr.bit());
......
...@@ -2547,7 +2547,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { ...@@ -2547,7 +2547,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
// We use Factory::the_hole_value() on purpose instead of loading from the // We use Factory::the_hole_value() on purpose instead of loading from the
// root array to force relocation to be able to later patch with // root array to force relocation to be able to later patch with
// the cached map. // the cached map.
PredictableCodeSizeScope predictable(masm_); PredictableCodeSizeScope predictable(masm_, 5 * Assembler::kInstrSize);
Handle<JSGlobalPropertyCell> cell = Handle<JSGlobalPropertyCell> cell =
factory()->NewJSGlobalPropertyCell(factory()->the_hole_value()); factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
__ mov(ip, Operand(Handle<Object>(cell))); __ mov(ip, Operand(Handle<Object>(cell)));
...@@ -2611,7 +2611,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, ...@@ -2611,7 +2611,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
static const int kAdditionalDelta = 5; static const int kAdditionalDelta = 5;
// Make sure that code size is predicable, since we use specific constants // Make sure that code size is predicable, since we use specific constants
// offsets in the code to find embedded values.. // offsets in the code to find embedded values..
PredictableCodeSizeScope predictable(masm_); PredictableCodeSizeScope predictable(masm_, 6 * Assembler::kInstrSize);
int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
Label before_push_delta; Label before_push_delta;
__ bind(&before_push_delta); __ bind(&before_push_delta);
...@@ -5640,7 +5640,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) { ...@@ -5640,7 +5640,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
__ cmp(sp, Operand(ip)); __ cmp(sp, Operand(ip));
__ b(hs, &done); __ b(hs, &done);
StackCheckStub stub; StackCheckStub stub;
PredictableCodeSizeScope predictable(masm_); PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
EnsureSpaceForLazyDeopt(); EnsureSpaceForLazyDeopt();
__ bind(&done); __ bind(&done);
......
...@@ -151,6 +151,28 @@ AssemblerBase::~AssemblerBase() { ...@@ -151,6 +151,28 @@ AssemblerBase::~AssemblerBase() {
} }
// -----------------------------------------------------------------------------
// Implementation of PredictableCodeSizeScope
PredictableCodeSizeScope::PredictableCodeSizeScope(AssemblerBase* assembler,
int expected_size)
: assembler_(assembler),
expected_size_(expected_size),
start_offset_(assembler->pc_offset()),
old_value_(assembler->predictable_code_size()) {
assembler_->set_predictable_code_size(true);
}
PredictableCodeSizeScope::~PredictableCodeSizeScope() {
// TODO(svenpanne) Remove the 'if' when everything works.
if (expected_size_ >= 0) {
CHECK_EQ(expected_size_, assembler_->pc_offset() - start_offset_);
}
assembler_->set_predictable_code_size(old_value_);
}
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// Implementation of Label // Implementation of Label
......
...@@ -98,18 +98,13 @@ class AssemblerBase: public Malloced { ...@@ -98,18 +98,13 @@ class AssemblerBase: public Malloced {
// snapshot and the running VM. // snapshot and the running VM.
class PredictableCodeSizeScope { class PredictableCodeSizeScope {
public: public:
explicit PredictableCodeSizeScope(AssemblerBase* assembler) PredictableCodeSizeScope(AssemblerBase* assembler, int expected_size);
: assembler_(assembler) { ~PredictableCodeSizeScope();
old_value_ = assembler_->predictable_code_size();
assembler_->set_predictable_code_size(true);
}
~PredictableCodeSizeScope() {
assembler_->set_predictable_code_size(old_value_);
}
private: private:
AssemblerBase* assembler_; AssemblerBase* assembler_;
int expected_size_;
int start_offset_;
bool old_value_; bool old_value_;
}; };
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment