Commit bfd04817 authored by vegorov@chromium.org's avatar vegorov@chromium.org

Notify collector about lazily deoptimized code objects.

All slots that were recorded on these objects during incremental marking should be ignored as they are no longer valid.

To filter such invalidated slots out during slots buffers iteration we set all markbits under the invalidated code object to 1 after the code space was swept and before slots buffers are processed.

R=erik.corry@gmail.com
BUG=v8:1713
TEST=test/mjsunit/regress/regress-1713.js

Review URL: http://codereview.chromium.org/7983045

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@9402 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 7ab81a14
......@@ -118,6 +118,11 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
node->set_next(data->deoptimizing_code_list_);
data->deoptimizing_code_list_ = node;
// We might be in the middle of incremental marking with compaction.
// Tell collector to treat this code object in a special way and
// ignore all slots that might have been recorded on it.
isolate->heap()->mark_compact_collector()->InvalidateCode(code);
// Set the code for the function to non-optimized version.
function->ReplaceCode(function->shared()->code());
......
......@@ -206,6 +206,11 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
node->set_next(data->deoptimizing_code_list_);
data->deoptimizing_code_list_ = node;
// We might be in the middle of incremental marking with compaction.
// Tell collector to treat this code object in a special way and
// ignore all slots that might have been recorded on it.
isolate->heap()->mark_compact_collector()->InvalidateCode(code);
// Set the code for the function to non-optimized version.
function->ReplaceCode(function->shared()->code());
......
......@@ -453,6 +453,19 @@ void IncrementalMarking::StartMarking() {
MarkObjectGreyDoNotEnqueue(heap_->polymorphic_code_cache());
}
if (is_compacting_) {
// It's difficult to filter out slots recorded for large objects.
LargeObjectIterator it(heap_->lo_space());
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
if (obj->IsFixedArray() || obj->IsCode()) {
Page* p = Page::FromAddress(obj->address());
if (p->size() > static_cast<size_t>(Page::kPageSize)) {
p->SetFlag(Page::RESCAN_ON_EVACUATION);
}
}
}
}
// Mark strong roots grey.
IncrementalMarkingRootMarkingVisitor visitor(heap_, this);
heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
......@@ -605,6 +618,16 @@ void IncrementalMarking::Abort() {
PatchIncrementalMarkingRecordWriteStubs(heap_,
RecordWriteStub::STORE_BUFFER_ONLY);
DeactivateIncrementalWriteBarrier();
if (is_compacting_) {
LargeObjectIterator it(heap_->lo_space());
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
Page* p = Page::FromAddress(obj->address());
if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) {
p->ClearFlag(Page::RESCAN_ON_EVACUATION);
}
}
}
}
heap_->isolate()->stack_guard()->Continue(GC_REQUEST);
state_ = STOPPED;
......
This diff is collapsed.
......@@ -324,6 +324,8 @@ class SlotsBuffer {
void UpdateSlots(Heap* heap);
void UpdateSlotsWithFilter(Heap* heap);
SlotsBuffer* next() { return next_; }
static int SizeOfChain(SlotsBuffer* buffer) {
......@@ -340,9 +342,15 @@ class SlotsBuffer {
return idx_ < kNumberOfElements - 1;
}
static void UpdateSlotsRecordedIn(Heap* heap, SlotsBuffer* buffer) {
static void UpdateSlotsRecordedIn(Heap* heap,
SlotsBuffer* buffer,
bool code_slots_filtering_required) {
while (buffer != NULL) {
buffer->UpdateSlots(heap);
if (code_slots_filtering_required) {
buffer->UpdateSlotsWithFilter(heap);
} else {
buffer->UpdateSlots(heap);
}
buffer = buffer->next();
}
}
......@@ -546,10 +554,17 @@ class MarkCompactCollector {
encountered_weak_maps_ = weak_map;
}
void InvalidateCode(Code* code);
private:
MarkCompactCollector();
~MarkCompactCollector();
bool MarkInvalidatedCode();
void RemoveDeadInvalidatedCode();
void ProcessInvalidatedCode(ObjectVisitor* visitor);
#ifdef DEBUG
enum CollectorState {
IDLE,
......@@ -573,6 +588,8 @@ class MarkCompactCollector {
// candidates.
bool compacting_;
bool was_marked_incrementally_;
bool collect_maps_;
// A pointer to the current stack-allocated GC tracer object during a full
......@@ -762,6 +779,7 @@ class MarkCompactCollector {
Object* encountered_weak_maps_;
List<Page*> evacuation_candidates_;
List<Code*> invalidated_code_;
friend class Heap;
};
......
......@@ -1930,8 +1930,10 @@ void PagedSpace::PrepareForMarkCompact() {
Page* last = last_unswept_page_->next_page();
Page* p = first_unswept_page_;
do {
if (ShouldBeSweptLazily(p)) {
ASSERT(!p->WasSwept());
// Do not use ShouldBeSweptLazily predicate here.
// New evacuation candidates were selected but they still have
// to be swept before collection starts.
if (!p->WasSwept()) {
Bitmap::Clear(p);
if (FLAG_gc_verbose) {
PrintF("Sweeping 0x%" V8PRIxPTR " lazily abandoned.\n",
......
......@@ -187,7 +187,7 @@ class Bitmap {
}
static int SizeFor(int cells_count) {
return sizeof(MarkBit::CellType)*cells_count;
return sizeof(MarkBit::CellType) * cells_count;
}
INLINE(static uint32_t IndexToCell(uint32_t index)) {
......@@ -1585,7 +1585,8 @@ class PagedSpace : public Space {
(ratio > ratio_threshold) ? "[fragmented]" : "");
}
return (ratio > ratio_threshold) || FLAG_always_compact;
return (ratio > ratio_threshold) ||
(FLAG_always_compact && sizes[3] != Page::kObjectAreaSize);
}
void EvictEvacuationCandidatesFromFreeLists();
......
......@@ -204,6 +204,11 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
node->set_next(data->deoptimizing_code_list_);
data->deoptimizing_code_list_ = node;
// We might be in the middle of incremental marking with compaction.
// Tell collector to treat this code object in a special way and
// ignore all slots that might have been recorded on it.
isolate->heap()->mark_compact_collector()->InvalidateCode(code);
// Set the code for the function to non-optimized version.
function->ReplaceCode(function->shared()->code());
......
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax --always-compact --expose-gc
var O = { get f() { return 0; } };
var CODE = [];
var R = [];
function Allocate4Kb(N) {
var arr = [];
do {arr.push(new Array(1024));} while (--N > 0);
return arr;
}
function AllocateXMb(X) {
return Allocate4Kb((1024 * X) / 4);
}
function Node(v, next) { this.v = v; this.next = next; }
Node.prototype.execute = function (O) {
var n = this;
while (n.next !== null) n = n.next;
n.v(O);
};
function LongList(N, x) {
if (N == 0) return new Node(x, null);
return new Node(new Array(1024), LongList(N - 1, x));
}
var L = LongList(1024, function (O) {
for (var i = 0; i < 5; i++) O.f;
});
function Incremental(O, x) {
if (!x) {
return;
}
function CreateCode(i) {
var f = new Function("return O.f_" + i);
CODE.push(f);
f(); // compile
f(); // compile
f(); // compile
}
for (var i = 0; i < 1e4; i++) CreateCode(i);
gc();
gc();
gc();
print(">>> 1 <<<");
L.execute(O);
try {} catch (e) {}
L = null;
print(">>> 2 <<<");
AllocateXMb(8);
//rint("1");
//llocateXMb(8);
//rint("1");
//llocateXMb(8);
}
function foo(O, x) {
Incremental(O, x);
print('f');
for (var i = 0; i < 5; i++) O.f;
print('g');
bar(x);
}
function bar(x) {
if (!x) return;
%DeoptimizeFunction(foo);
AllocateXMb(8);
AllocateXMb(8);
}
var O1 = {};
var O2 = {};
var O3 = {};
var O4 = {f:0};
foo(O1, false);
foo(O2, false);
foo(O3, false);
%OptimizeFunctionOnNextCall(foo);
foo(O4, true);
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment