Commit 4d23256c authored by danno@chromium.org's avatar danno@chromium.org

MIPS: Add code again to allow reclaiming old unexecuted functions.

Port r12898 (69ff6e50)

Original commit message:
When code objects in the heap for FUNCTIONs and OPTIMIZED_FUNCTIONs are marked by the GC, their prologue is patched with a call to a stub that removes the patch. This allows the collector to quickly identify code objects that haven't been executed since the last full collection (they are the ones that sill contain the patch). The functionality is currently disabled, but can be activated by specifying the "--age-code".

BUG=
TEST=

Review URL: https://codereview.chromium.org/11358252
Patch from Akos Palfi <palfia@homejinni.com>.

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12965 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent d6a31725
// Copyright (c) 1994-2006 Sun Microsystems Inc.
// All Rights Reserved.
//
......@@ -231,6 +232,24 @@ void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
}
static const int kNoCodeAgeSequenceLength = 7;
Code* RelocInfo::code_age_stub() {
ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
return Code::GetCodeFromTargetAddress(
Memory::Address_at(pc_ + Assembler::kInstrSize *
(kNoCodeAgeSequenceLength - 1)));
}
void RelocInfo::set_code_age_stub(Code* stub) {
ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
Memory::Address_at(pc_ + Assembler::kInstrSize *
(kNoCodeAgeSequenceLength - 1)) =
stub->instruction_start();
}
Address RelocInfo::call_address() {
ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
......@@ -292,17 +311,6 @@ bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
}
Code* RelocInfo::code_age_stub() {
UNIMPLEMENTED();
return NULL;
}
void RelocInfo::set_code_age_stub(Code* stub) {
UNIMPLEMENTED();
}
void RelocInfo::Visit(ObjectVisitor* visitor) {
RelocInfo::Mode mode = rmode();
if (mode == RelocInfo::EMBEDDED_OBJECT) {
......@@ -313,6 +321,8 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
visitor->VisitGlobalPropertyCell(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
visitor->VisitExternalReference(this);
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
visitor->VisitCodeAgeSequence(this);
#ifdef ENABLE_DEBUGGER_SUPPORT
// TODO(isolates): Get a cached isolate below.
} else if (((RelocInfo::IsJSReturn(mode) &&
......@@ -339,6 +349,8 @@ void RelocInfo::Visit(Heap* heap) {
StaticVisitor::VisitGlobalPropertyCell(heap, this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
StaticVisitor::VisitExternalReference(this);
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
StaticVisitor::VisitCodeAgeSequence(heap, this);
#ifdef ENABLE_DEBUGGER_SUPPORT
} else if (heap->isolate()->debug()->has_break_points() &&
((RelocInfo::IsJSReturn(mode) &&
......
......@@ -663,7 +663,9 @@ class Assembler : public AssemblerBase {
PROPERTY_ACCESS_INLINED_CONTEXT_DONT_DELETE,
// Helper values.
LAST_CODE_MARKER,
FIRST_IC_MARKER = PROPERTY_ACCESS_INLINED
FIRST_IC_MARKER = PROPERTY_ACCESS_INLINED,
// Code aging
CODE_AGE_MARKER_NOP = 6
};
// Type == 0 is the default non-marking nop. For mips this is a
......
......@@ -1255,12 +1255,35 @@ void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
}
// Just a dummy for now, so snapshot generation works.
static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
__ Ret();
// For now, we are relying on the fact that make_code_young doesn't do any
// garbage collection which allows us to save/restore the registers without
// worrying about which of them contain pointers. We also don't build an
// internal frame to make the code faster, since we shouldn't have to do stack
// crawls in MakeCodeYoung. This seems a bit fragile.
__ mov(a0, ra);
// Adjust a0 to point to the head of the PlatformCodeAge sequence
__ Subu(a0, a0,
Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize));
// Restore the original return address of the function
__ mov(ra, at);
// The following registers must be saved and restored when calling through to
// the runtime:
// a0 - contains return address (beginning of patch sequence)
// a1 - function object
RegList saved_regs =
(a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
FrameScope scope(masm, StackFrame::MANUAL);
__ MultiPush(saved_regs);
__ PrepareCallCFunction(1, 0, a1);
__ CallCFunction(
ExternalReference::get_make_code_young_function(masm->isolate()), 1);
__ MultiPop(saved_regs);
__ Jump(a0);
}
#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
MacroAssembler* masm) { \
......
......@@ -446,32 +446,102 @@ void StringCharLoadGenerator::Generate(MacroAssembler* masm,
__ bind(&done);
}
#undef __
// nop(CODE_AGE_MARKER_NOP)
static const uint32_t kCodeAgePatchFirstInstruction = 0x00010180;
static byte* GetNoCodeAgeSequence(uint32_t* length) {
// The sequence of instructions that is patched out for aging code is the
// following boilerplate stack-building prologue that is found in FUNCTIONS
static bool initialized = false;
static uint32_t sequence[kNoCodeAgeSequenceLength];
byte* byte_sequence = reinterpret_cast<byte*>(sequence);
*length = kNoCodeAgeSequenceLength * Assembler::kInstrSize;
if (!initialized) {
CodePatcher patcher(byte_sequence, kNoCodeAgeSequenceLength);
patcher.masm()->Push(ra, fp, cp, a1);
patcher.masm()->LoadRoot(at, Heap::kUndefinedValueRootIndex);
patcher.masm()->Addu(fp, sp, Operand(2 * kPointerSize));
initialized = true;
}
return byte_sequence;
}
byte* Code::FindPlatformCodeAgeSequence() {
UNIMPLEMENTED();
return NULL;
byte* start = instruction_start();
uint32_t young_length;
byte* young_sequence = GetNoCodeAgeSequence(&young_length);
if (!memcmp(start, young_sequence, young_length) ||
Memory::uint32_at(start) == kCodeAgePatchFirstInstruction) {
return start;
} else {
byte* start_after_strict = NULL;
if (kind() == FUNCTION) {
start_after_strict = start + kSizeOfFullCodegenStrictModePrologue;
} else {
ASSERT(kind() == OPTIMIZED_FUNCTION);
start_after_strict = start + kSizeOfOptimizedStrictModePrologue;
}
ASSERT(!memcmp(start_after_strict, young_sequence, young_length) ||
Memory::uint32_at(start_after_strict) ==
kCodeAgePatchFirstInstruction);
return start_after_strict;
}
}
bool Code::IsYoungSequence(byte* sequence) {
UNIMPLEMENTED();
return false;
uint32_t young_length;
byte* young_sequence = GetNoCodeAgeSequence(&young_length);
bool result = !memcmp(sequence, young_sequence, young_length);
ASSERT(result ||
Memory::uint32_at(sequence) == kCodeAgePatchFirstInstruction);
return result;
}
void Code::GetCodeAgeAndParity(byte* sequence, Age* age,
MarkingParity* parity) {
if (IsYoungSequence(sequence)) {
*age = kNoAge;
*parity = NO_MARKING_PARITY;
} else {
Address target_address = Memory::Address_at(
sequence + Assembler::kInstrSize * (kNoCodeAgeSequenceLength - 1));
Code* stub = GetCodeFromTargetAddress(target_address);
GetCodeAgeAndParity(stub, age, parity);
}
}
void Code::PatchPlatformCodeAge(byte* sequence,
Code::Age age,
MarkingParity parity) {
UNIMPLEMENTED();
uint32_t young_length;
byte* young_sequence = GetNoCodeAgeSequence(&young_length);
if (age == kNoAge) {
memcpy(sequence, young_sequence, young_length);
CPU::FlushICache(sequence, young_length);
} else {
Code* stub = GetCodeAgeStub(age, parity);
CodePatcher patcher(sequence, young_length / Assembler::kInstrSize);
// Mark this code sequence for FindPlatformCodeAgeSequence()
patcher.masm()->nop(Assembler::CODE_AGE_MARKER_NOP);
// Save the function's original return address
// (it will be clobbered by Call(t9))
patcher.masm()->mov(at, ra);
// Load the stub address to t9 and call it
patcher.masm()->li(t9,
Operand(reinterpret_cast<uint32_t>(stub->instruction_start())));
patcher.masm()->Call(t9);
// Record the stub address in the empty space for GetCodeAgeAndParity()
patcher.masm()->dd(reinterpret_cast<uint32_t>(stub->instruction_start()));
}
}
#undef __
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_MIPS
......@@ -36,6 +36,9 @@
namespace v8 {
namespace internal {
static const int kSizeOfFullCodegenStrictModePrologue = 16;
static const int kSizeOfOptimizedStrictModePrologue = 16;
// Forward declarations
class CompilationInfo;
......
......@@ -158,11 +158,14 @@ void FullCodeGenerator::Generate() {
// function calls.
if (!info->is_classic_mode() || info->is_native()) {
Label ok;
Label begin;
__ bind(&begin);
__ Branch(&ok, eq, t1, Operand(zero_reg));
int receiver_offset = info->scope()->num_parameters() * kPointerSize;
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
__ sw(a2, MemOperand(sp, receiver_offset));
__ bind(&ok);
ASSERT_EQ(kSizeOfFullCodegenStrictModePrologue, ok.pos() - begin.pos());
}
// Open a frame scope to indicate that there is a frame on the stack. The
......@@ -172,12 +175,12 @@ void FullCodeGenerator::Generate() {
int locals_count = info->scope()->num_stack_slots();
// The following three instructions must remain together and unmodified for
// code aging to work properly.
__ Push(ra, fp, cp, a1);
if (locals_count > 0) {
// Load undefined value here, so the value is ready for the loop
// below.
__ LoadRoot(at, Heap::kUndefinedValueRootIndex);
}
// Load undefined value here, so the value is ready for the loop
// below.
__ LoadRoot(at, Heap::kUndefinedValueRootIndex);
// Adjust fp to point to caller's fp.
__ Addu(fp, sp, Operand(2 * kPointerSize));
......
......@@ -136,15 +136,23 @@ bool LCodeGen::GeneratePrologue() {
// function calls.
if (!info_->is_classic_mode() || info_->is_native()) {
Label ok;
Label begin;
__ bind(&begin);
__ Branch(&ok, eq, t1, Operand(zero_reg));
int receiver_offset = scope()->num_parameters() * kPointerSize;
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
__ sw(a2, MemOperand(sp, receiver_offset));
__ bind(&ok);
ASSERT_EQ(kSizeOfOptimizedStrictModePrologue, ok.pos() - begin.pos());
}
// The following three instructions must remain together and unmodified for
// code aging to work properly.
__ Push(ra, fp, cp, a1);
// Add unused load of ip to ensure prologue sequence is identical for
// full-codegen and lithium-codegen.
__ LoadRoot(at, Heap::kUndefinedValueRootIndex);
__ Addu(fp, sp, Operand(2 * kPointerSize)); // Adj. FP to point to saved FP.
// Reserve space for the stack slots needed by the code.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment