Enable incremental code flushing.

This enables code flushing even with incremental marking enabled and
fully shares the function link field in JSFunctions between candidates
for code flushing and the optimized functions list. If a candidate for
code flushing gets optimized, it will be evicted from the candidates
list.

R=ulan@chromium.org
BUG=v8:1609

Review URL: https://codereview.chromium.org/11140025

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12796 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 362e8cb0
......@@ -251,8 +251,6 @@ void Context::AddOptimizedFunction(JSFunction* function) {
}
}
CHECK(function->next_function_link()->IsUndefined());
// Check that the context belongs to the weak native contexts list.
bool found = false;
Object* context = GetHeap()->native_contexts_list();
......@@ -265,6 +263,16 @@ void Context::AddOptimizedFunction(JSFunction* function) {
}
CHECK(found);
#endif
// If the function link field is already used then the function was
// enqueued as a code flushing candidate and we remove it now.
if (!function->next_function_link()->IsUndefined()) {
CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
flusher->EvictCandidate(function);
}
ASSERT(function->next_function_link()->IsUndefined());
function->set_next_function_link(get(OPTIMIZED_FUNCTIONS_LIST));
set(OPTIMIZED_FUNCTIONS_LIST, function);
}
......
......@@ -420,6 +420,14 @@ void Heap::GarbageCollectionPrologue() {
gc_count_++;
unflattened_strings_length_ = 0;
bool should_enable_code_flushing = FLAG_flush_code;
#ifdef ENABLE_DEBUGGER_SUPPORT
if (isolate_->debug()->IsLoaded() || isolate_->debug()->has_break_points()) {
should_enable_code_flushing = false;
}
#endif
mark_compact_collector()->EnableCodeFlushing(should_enable_code_flushing);
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
Verify();
......
......@@ -926,6 +926,32 @@ void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
}
void CodeFlusher::EvictCandidate(JSFunction* function) {
ASSERT(!function->next_function_link()->IsUndefined());
Object* undefined = isolate_->heap()->undefined_value();
JSFunction* candidate = jsfunction_candidates_head_;
JSFunction* next_candidate;
if (candidate == function) {
next_candidate = GetNextCandidate(function);
jsfunction_candidates_head_ = next_candidate;
ClearNextCandidate(function, undefined);
} else {
while (candidate != NULL) {
next_candidate = GetNextCandidate(candidate);
if (next_candidate == function) {
next_candidate = GetNextCandidate(function);
SetNextCandidate(candidate, next_candidate);
ClearNextCandidate(function, undefined);
}
candidate = next_candidate;
}
}
}
MarkCompactCollector::~MarkCompactCollector() {
if (code_flusher_ != NULL) {
delete code_flusher_;
......@@ -1430,21 +1456,8 @@ void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
void MarkCompactCollector::PrepareForCodeFlushing() {
ASSERT(heap() == Isolate::Current()->heap());
// TODO(1609) Currently incremental marker does not support code flushing.
if (!FLAG_flush_code || was_marked_incrementally_) {
EnableCodeFlushing(false);
return;
}
#ifdef ENABLE_DEBUGGER_SUPPORT
if (heap()->isolate()->debug()->IsLoaded() ||
heap()->isolate()->debug()->has_break_points()) {
EnableCodeFlushing(false);
return;
}
#endif
EnableCodeFlushing(true);
// If code flushing is disabled, there is no need to prepare for it.
if (!is_code_flushing_enabled()) return;
// Ensure that empty descriptor array is marked. Method MarkDescriptorArray
// relies on it being marked before any other descriptor array.
......@@ -2005,9 +2018,6 @@ void MarkCompactCollector::AfterMarking() {
// Flush code from collected candidates.
if (is_code_flushing_enabled()) {
code_flusher_->ProcessCandidates();
// TODO(1609) Currently incremental marker does not support code flushing,
// we need to disable it before incremental marking steps for next cycle.
EnableCodeFlushing(false);
}
if (!FLAG_watch_ic_patching) {
......
......@@ -420,17 +420,22 @@ class CodeFlusher {
shared_function_info_candidates_head_(NULL) {}
void AddCandidate(SharedFunctionInfo* shared_info) {
SetNextCandidate(shared_info, shared_function_info_candidates_head_);
shared_function_info_candidates_head_ = shared_info;
if (GetNextCandidate(shared_info) == NULL) {
SetNextCandidate(shared_info, shared_function_info_candidates_head_);
shared_function_info_candidates_head_ = shared_info;
}
}
void AddCandidate(JSFunction* function) {
ASSERT(function->code() == function->shared()->code());
ASSERT(function->next_function_link()->IsUndefined());
SetNextCandidate(function, jsfunction_candidates_head_);
jsfunction_candidates_head_ = function;
if (GetNextCandidate(function)->IsUndefined()) {
SetNextCandidate(function, jsfunction_candidates_head_);
jsfunction_candidates_head_ = function;
}
}
void EvictCandidate(JSFunction* function);
void ProcessCandidates() {
ProcessSharedFunctionInfoCandidates();
ProcessJSFunctionCandidates();
......
......@@ -23,6 +23,19 @@ static void InitializeVM() {
}
// Go through all incremental marking steps in one swoop.
static void SimulateIncrementalMarking() {
IncrementalMarking* marking = HEAP->incremental_marking();
CHECK(marking->IsStopped());
marking->Start();
CHECK(marking->IsMarking());
while (!marking->IsComplete()) {
marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
}
CHECK(marking->IsComplete());
}
static void CheckMap(Map* map, int type, int instance_size) {
CHECK(map->IsHeapObject());
#ifdef DEBUG
......@@ -942,9 +955,9 @@ TEST(Regression39128) {
TEST(TestCodeFlushing) {
i::FLAG_allow_natives_syntax = true;
// If we do not flush code this test is invalid.
if (!FLAG_flush_code) return;
i::FLAG_allow_natives_syntax = true;
InitializeVM();
v8::HandleScope scope;
const char* source = "function foo() {"
......@@ -967,18 +980,16 @@ TEST(TestCodeFlushing) {
Handle<JSFunction> function(JSFunction::cast(func_value));
CHECK(function->shared()->is_compiled());
// TODO(1609) Currently incremental marker does not support code flushing.
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
// The code will survive at least two GCs.
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
CHECK(function->shared()->is_compiled());
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
// Simulate several GCs that use full marking.
const int kAgingThreshold = 6;
for (int i = 0; i < kAgingThreshold; i++) {
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
}
// foo should no longer be in the compilation cache
CHECK(!function->shared()->is_compiled() || function->IsOptimized());
......@@ -990,6 +1001,74 @@ TEST(TestCodeFlushing) {
}
TEST(TestCodeFlushingIncremental) {
// If we do not flush code this test is invalid.
if (!FLAG_flush_code) return;
i::FLAG_allow_natives_syntax = true;
InitializeVM();
v8::HandleScope scope;
const char* source = "function foo() {"
" var x = 42;"
" var y = 42;"
" var z = x + y;"
"};"
"foo()";
Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo");
// This compile will add the code to the compilation cache.
{ v8::HandleScope scope;
CompileRun(source);
}
// Check function is compiled.
Object* func_value = Isolate::Current()->context()->global_object()->
GetProperty(*foo_name)->ToObjectChecked();
CHECK(func_value->IsJSFunction());
Handle<JSFunction> function(JSFunction::cast(func_value));
CHECK(function->shared()->is_compiled());
// The code will survive at least two GCs.
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
CHECK(function->shared()->is_compiled());
// Simulate several GCs that use incremental marking.
const int kAgingThreshold = 6;
for (int i = 0; i < kAgingThreshold; i++) {
HEAP->incremental_marking()->Abort();
SimulateIncrementalMarking();
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
}
CHECK(!function->shared()->is_compiled() || function->IsOptimized());
CHECK(!function->is_compiled() || function->IsOptimized());
// This compile will compile the function again.
{ v8::HandleScope scope;
CompileRun("foo();");
}
// Simulate several GCs that use incremental marking but make sure
// the loop breaks once the function is enqueued as a candidate.
for (int i = 0; i < kAgingThreshold; i++) {
HEAP->incremental_marking()->Abort();
SimulateIncrementalMarking();
if (!function->next_function_link()->IsUndefined()) break;
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
}
// Force optimization while incremental marking is active and while
// the function is enqueued as a candidate.
{ v8::HandleScope scope;
CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
}
// Simulate one final GC to make sure the candidate queue is sane.
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
CHECK(function->shared()->is_compiled() || !function->IsOptimized());
CHECK(function->is_compiled() || !function->IsOptimized());
}
// Count the number of native contexts in the weak list of native contexts.
int CountNativeContexts() {
int count = 0;
......@@ -1767,19 +1846,6 @@ static int CountMapTransitions(Map* map) {
}
// Go through all incremental marking steps in one swoop.
static void SimulateIncrementalMarking() {
IncrementalMarking* marking = HEAP->incremental_marking();
CHECK(marking->IsStopped());
marking->Start();
CHECK(marking->IsMarking());
while (!marking->IsComplete()) {
marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
}
CHECK(marking->IsComplete());
}
// Test that map transitions are cleared and maps are collected with
// incremental marking as well.
TEST(Regress1465) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment