Enable optimizing JSFunctions that are in new-space.

It avoids strange performance behavior where hot functions do not
get optimized if there is no GC to promote them, and it does not seem
to hurt us otherwise.

Review URL: http://codereview.chromium.org/6594073

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7016 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 3b5256b3
......@@ -844,8 +844,6 @@ void Heap::MarkCompactPrologue(bool is_compacting) {
ContextSlotCache::Clear();
DescriptorLookupCache::Clear();
RuntimeProfiler::MarkCompactPrologue(is_compacting);
CompilationCache::MarkCompactPrologue();
CompletelyClearInstanceofCache();
......@@ -1056,20 +1054,13 @@ void Heap::Scavenge() {
// Scavenge object reachable from the global contexts list directly.
scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
// Scavenge objects reachable from the runtime-profiler sampler
// window directly.
Object** sampler_window_address = RuntimeProfiler::SamplerWindowAddress();
int sampler_window_size = RuntimeProfiler::SamplerWindowSize();
scavenge_visitor.VisitPointers(
sampler_window_address,
sampler_window_address + sampler_window_size);
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
UpdateNewSpaceReferencesInExternalStringTable(
&UpdateNewSpaceReferenceInExternalStringTableEntry);
LiveObjectList::UpdateReferencesForScavengeGC();
RuntimeProfiler::UpdateSamplesAfterScavenge();
ASSERT(new_space_front == new_space_.top());
......
......@@ -1353,6 +1353,9 @@ void MarkCompactCollector::MarkLiveObjects() {
// Flush code from collected candidates.
FlushCode::ProcessCandidates();
// Clean up dead objects from the runtime profiler.
RuntimeProfiler::RemoveDeadSamples();
}
......@@ -1937,6 +1940,9 @@ static void SweepNewSpace(NewSpace* space) {
// All pointers were updated. Update auxiliary allocation info.
Heap::IncrementYoungSurvivorsCounter(survivors_size);
space->set_age_mark(space->top());
// Update JSFunction pointers from the runtime profiler.
RuntimeProfiler::UpdateSamplesAfterScavenge();
}
......@@ -2535,6 +2541,7 @@ void MarkCompactCollector::UpdatePointers() {
state_ = UPDATE_POINTERS;
#endif
UpdatingVisitor updating_visitor;
RuntimeProfiler::UpdateSamplesAfterCompact(&updating_visitor);
Heap::IterateRoots(&updating_visitor, VISIT_ONLY_STRONG);
GlobalHandles::IterateWeakRoots(&updating_visitor);
......
......@@ -35,6 +35,7 @@
#include "deoptimizer.h"
#include "execution.h"
#include "global-handles.h"
#include "mark-compact.h"
#include "scopeinfo.h"
#include "top.h"
......@@ -100,11 +101,6 @@ static int sampler_ticks_until_threshold_adjustment =
// The ratio of ticks spent in JS code in percent.
static Atomic32 js_ratio;
// The JSFunctions in the sampler window are not GC safe. Old-space
// pointers are not cleared during mark-sweep collection and therefore
// the window might contain stale pointers. The window is updated on
// scavenges and (parts of it) cleared on mark-sweep and
// mark-sweep-compact.
static Object* sampler_window[kSamplerWindowSize] = { NULL, };
static int sampler_window_position = 0;
static int sampler_window_weight[kSamplerWindowSize] = { 0, };
......@@ -134,7 +130,6 @@ void PendingListNode::WeakCallback(v8::Persistent<v8::Value>, void* data) {
static bool IsOptimizable(JSFunction* function) {
if (Heap::InNewSpace(function)) return false;
Code* code = function->code();
return code->kind() == Code::FUNCTION && code->optimizable();
}
......@@ -208,16 +203,6 @@ static void ClearSampleBuffer() {
}
static void ClearSampleBufferNewSpaceEntries() {
for (int i = 0; i < kSamplerWindowSize; i++) {
if (Heap::InNewSpace(sampler_window[i])) {
sampler_window[i] = NULL;
sampler_window_weight[i] = 0;
}
}
}
static int LookupSample(JSFunction* function) {
int weight = 0;
for (int i = 0; i < kSamplerWindowSize; i++) {
......@@ -372,24 +357,6 @@ void RuntimeProfiler::NotifyTick() {
}
void RuntimeProfiler::MarkCompactPrologue(bool is_compacting) {
if (is_compacting) {
// Clear all samples before mark-sweep-compact because every
// function might move.
ClearSampleBuffer();
} else {
// Clear only new space entries on mark-sweep since none of the
// old-space functions will move.
ClearSampleBufferNewSpaceEntries();
}
}
bool IsEqual(void* first, void* second) {
return first == second;
}
void RuntimeProfiler::Setup() {
ClearSampleBuffer();
// If the ticker hasn't already started, make sure to do so to get
......@@ -411,13 +378,41 @@ void RuntimeProfiler::TearDown() {
}
Object** RuntimeProfiler::SamplerWindowAddress() {
return sampler_window;
int RuntimeProfiler::SamplerWindowSize() {
return kSamplerWindowSize;
}
int RuntimeProfiler::SamplerWindowSize() {
return kSamplerWindowSize;
// Update the pointers in the sampler window after a GC.
void RuntimeProfiler::UpdateSamplesAfterScavenge() {
for (int i = 0; i < kSamplerWindowSize; i++) {
Object* function = sampler_window[i];
if (function != NULL && Heap::InNewSpace(function)) {
MapWord map_word = HeapObject::cast(function)->map_word();
if (map_word.IsForwardingAddress()) {
sampler_window[i] = map_word.ToForwardingAddress();
} else {
sampler_window[i] = NULL;
}
}
}
}
void RuntimeProfiler::RemoveDeadSamples() {
for (int i = 0; i < kSamplerWindowSize; i++) {
Object* function = sampler_window[i];
if (function != NULL && !HeapObject::cast(function)->IsMarked()) {
sampler_window[i] = NULL;
}
}
}
void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) {
for (int i = 0; i < kSamplerWindowSize; i++) {
visitor->VisitPointer(&sampler_window[i]);
}
}
......
......@@ -47,9 +47,10 @@ class RuntimeProfiler : public AllStatic {
static void Reset();
static void TearDown();
static void MarkCompactPrologue(bool is_compacting);
static Object** SamplerWindowAddress();
static int SamplerWindowSize();
static void UpdateSamplesAfterScavenge();
static void RemoveDeadSamples();
static void UpdateSamplesAfterCompact(ObjectVisitor* visitor);
};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment