Commit 45c63ffa authored by ricow@chromium.org's avatar ricow@chromium.org

Add more generic version of reloc info padding to ensure enough space for...

Add more generic version of reloc info padding to ensure enough space for reloc patching during deoptimization (fixes issue 1174).

The old version only added extra space when we did indirect calls, but
the problem remains the same with normal calls that can be represented
as a single byte. When doing patching each call will always be at
least 2 bytes long because we use RUNTIME_ENTY as the reloc mode.


Review URL: http://codereview.chromium.org/6541053

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@6894 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 8162d902
......@@ -228,6 +228,7 @@ void RelocInfoWriter::Write(const RelocInfo* rinfo) {
WriteTaggedPC(pc_delta, kEmbeddedObjectTag);
} else if (rmode == RelocInfo::CODE_TARGET) {
WriteTaggedPC(pc_delta, kCodeTargetTag);
ASSERT(begin_pos - pos_ <= RelocInfo::kMaxCallSize);
} else if (RelocInfo::IsPosition(rmode)) {
// Use signed delta-encoding for data.
intptr_t data_delta = rinfo->data() - last_data_;
......@@ -251,6 +252,7 @@ void RelocInfoWriter::Write(const RelocInfo* rinfo) {
WriteExtraTaggedPC(pc_delta, kPCJumpTag);
WriteExtraTaggedData(rinfo->data() - last_data_, kCommentTag);
last_data_ = rinfo->data();
ASSERT(begin_pos - pos_ == RelocInfo::kRelocCommentSize);
} else {
// For all other modes we simply use the mode as the extra tag.
// None of these modes need a data component.
......
......@@ -184,6 +184,14 @@ class RelocInfo BASE_EMBEDDED {
// we do not normally record relocation info.
static const char* kFillerCommentString;
// The size of a comment is equal to tree bytes for the extra tagged pc +
// the tag for the data, and kPointerSize for the actual pointer to the
// comment.
static const int kRelocCommentSize = 3 + kPointerSize;
// The maximum size for a call instruction including pc-jump.
static const int kMaxCallSize = 6;
enum Mode {
// Please note the order is important (see IsCodeTarget, IsGCRelocMode).
CONSTRUCT_CALL, // code target that is a call to a JavaScript constructor.
......
......@@ -55,7 +55,7 @@ class SafepointGenerator : public PostCallGenerator {
// Ensure that we have enough space in the reloc info to patch
// this with calls when doing deoptimization.
if (ensure_reloc_space_) {
codegen_->masm()->RecordComment(RelocInfo::kFillerCommentString, true);
codegen_->EnsureRelocSpaceForDeoptimization();
}
codegen_->RecordSafepoint(pointers_, deoptimization_index_);
}
......@@ -78,6 +78,7 @@ bool LCodeGen::GenerateCode() {
return GeneratePrologue() &&
GenerateBody() &&
GenerateDeferredCode() &&
GenerateRelocPadding() &&
GenerateSafepointTable();
}
......@@ -122,6 +123,16 @@ void LCodeGen::Comment(const char* format, ...) {
}
bool LCodeGen::GenerateRelocPadding() {
int reloc_size = masm()->relocation_writer_size();
while (reloc_size < deoptimization_reloc_size.min_size) {
__ RecordComment(RelocInfo::kFillerCommentString, true);
reloc_size += RelocInfo::kRelocCommentSize;
}
return !is_aborted();
}
bool LCodeGen::GeneratePrologue() {
ASSERT(is_generating());
......@@ -335,6 +346,22 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
}
void LCodeGen::EnsureRelocSpaceForDeoptimization() {
// Since we patch the reloc info with RUNTIME_ENTRY calls every patch
// site will take up 2 bytes + any pc-jumps.
// We are conservative and always reserver 6 bytes in case where a
// simple pc-jump is not enough.
uint32_t pc_delta =
masm()->pc_offset() - deoptimization_reloc_size.last_pc_offset;
if (is_uintn(pc_delta, 6)) {
deoptimization_reloc_size.min_size += 2;
} else {
deoptimization_reloc_size.min_size += 6;
}
deoptimization_reloc_size.last_pc_offset = masm()->pc_offset();
}
void LCodeGen::AddToTranslation(Translation* translation,
LOperand* op,
bool is_tagged) {
......@@ -382,10 +409,13 @@ void LCodeGen::CallCode(Handle<Code> code,
ASSERT(instr != NULL);
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
if (!adjusted) {
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
__ call(code, mode);
EnsureRelocSpaceForDeoptimization();
RegisterLazyDeoptimization(instr);
// Signal that we don't inline smi code before these stubs in the
......@@ -2300,11 +2330,8 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
if (*function == *graph()->info()->closure()) {
__ CallSelf();
} else {
// This is an indirect call and will not be recorded in the reloc info.
// Add a comment to the reloc info in case we need to patch this during
// deoptimization.
__ RecordComment(RelocInfo::kFillerCommentString, true);
__ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
EnsureRelocSpaceForDeoptimization();
}
// Setup deoptimization.
......
......@@ -60,6 +60,7 @@ class LCodeGen BASE_EMBEDDED {
status_(UNUSED),
deferred_(8),
osr_pc_offset_(-1),
deoptimization_reloc_size(),
resolver_(this) {
PopulateDeoptimizationLiteralsWithInlinedFunctions();
}
......@@ -102,6 +103,8 @@ class LCodeGen BASE_EMBEDDED {
// Emit frame translation commands for an environment.
void WriteTranslation(LEnvironment* environment, Translation* translation);
void EnsureRelocSpaceForDeoptimization();
// Declare methods that deal with the individual node types.
#define DECLARE_DO(type) void Do##type(L##type* node);
LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
......@@ -151,6 +154,9 @@ class LCodeGen BASE_EMBEDDED {
bool GeneratePrologue();
bool GenerateBody();
bool GenerateDeferredCode();
// Pad the reloc info to ensure that we have enough space to patch during
// deoptimization.
bool GenerateRelocPadding();
bool GenerateSafepointTable();
void CallCode(Handle<Code> code, RelocInfo::Mode mode, LInstruction* instr,
......@@ -251,6 +257,13 @@ class LCodeGen BASE_EMBEDDED {
ZoneList<LDeferredCode*> deferred_;
int osr_pc_offset_;
struct DeoptimizationRelocSize {
int min_size;
int last_pc_offset;
};
DeoptimizationRelocSize deoptimization_reloc_size;
// Builder that keeps track of safepoints in the code. The table
// itself is emitted at the end of the generated code.
SafepointTableBuilder safepoints_;
......
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax
// Test that we do not crash when doing deoptimization of a function that has
// reloc info that only take up 1 byte per call (like KeyedStoreIC).
function Regular() {
this[0] >>= 0;
this[1] ^= 1;
}
function foo() {
var regular = new Regular();
%DeoptimizeFunction(Regular);
}
foo();
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment