Commit 598de609 authored by ager@chromium.org's avatar ager@chromium.org

Revert r5455 from bleeding_edge: attempt to collect more garbage

before panicking with out of memory.

TBR=antonm@chromium.org
Review URL: http://codereview.chromium.org/4034002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5681 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 9bcdac5f
...@@ -372,14 +372,13 @@ void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) { ...@@ -372,14 +372,13 @@ void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) {
int post_gc_processing_count = 0; int post_gc_processing_count = 0;
bool GlobalHandles::PostGarbageCollectionProcessing() { void GlobalHandles::PostGarbageCollectionProcessing() {
// Process weak global handle callbacks. This must be done after the // Process weak global handle callbacks. This must be done after the
// GC is completely done, because the callbacks may invoke arbitrary // GC is completely done, because the callbacks may invoke arbitrary
// API functions. // API functions.
// At the same time deallocate all DESTROYED nodes. // At the same time deallocate all DESTROYED nodes.
ASSERT(Heap::gc_state() == Heap::NOT_IN_GC); ASSERT(Heap::gc_state() == Heap::NOT_IN_GC);
const int initial_post_gc_processing_count = ++post_gc_processing_count; const int initial_post_gc_processing_count = ++post_gc_processing_count;
bool weak_callback_invoked = false;
Node** p = &head_; Node** p = &head_;
while (*p != NULL) { while (*p != NULL) {
if ((*p)->PostGarbageCollectionProcessing()) { if ((*p)->PostGarbageCollectionProcessing()) {
...@@ -390,7 +389,6 @@ bool GlobalHandles::PostGarbageCollectionProcessing() { ...@@ -390,7 +389,6 @@ bool GlobalHandles::PostGarbageCollectionProcessing() {
// restart the processing). // restart the processing).
break; break;
} }
weak_callback_invoked = true;
} }
if ((*p)->state_ == Node::DESTROYED) { if ((*p)->state_ == Node::DESTROYED) {
// Delete the link. // Delete the link.
...@@ -409,7 +407,6 @@ bool GlobalHandles::PostGarbageCollectionProcessing() { ...@@ -409,7 +407,6 @@ bool GlobalHandles::PostGarbageCollectionProcessing() {
if (first_deallocated()) { if (first_deallocated()) {
first_deallocated()->set_next(head()); first_deallocated()->set_next(head());
} }
return weak_callback_invoked;
} }
......
...@@ -95,9 +95,8 @@ class GlobalHandles : public AllStatic { ...@@ -95,9 +95,8 @@ class GlobalHandles : public AllStatic {
// Tells whether global handle is weak. // Tells whether global handle is weak.
static bool IsWeak(Object** location); static bool IsWeak(Object** location);
// Process pending weak handles. Returns true if any weak handle // Process pending weak handles.
// callback has been invoked. static void PostGarbageCollectionProcessing();
static bool PostGarbageCollectionProcessing();
// Iterates over all strong handles. // Iterates over all strong handles.
static void IterateStrongRoots(ObjectVisitor* v); static void IterateStrongRoots(ObjectVisitor* v);
......
...@@ -35,16 +35,6 @@ ...@@ -35,16 +35,6 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
void Heap::UpdateOldSpaceLimits() {
intptr_t old_gen_size = PromotedSpaceSize();
old_gen_promotion_limit_ =
old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
old_gen_allocation_limit_ =
old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
old_gen_exhausted_ = false;
}
int Heap::MaxObjectSizeInPagedSpace() { int Heap::MaxObjectSizeInPagedSpace() {
return Page::kMaxHeapObjectSize; return Page::kMaxHeapObjectSize;
} }
...@@ -421,7 +411,7 @@ void Heap::SetLastScriptId(Object* last_script_id) { ...@@ -421,7 +411,7 @@ void Heap::SetLastScriptId(Object* last_script_id) {
} \ } \
if (!__object__->IsRetryAfterGC()) RETURN_EMPTY; \ if (!__object__->IsRetryAfterGC()) RETURN_EMPTY; \
Counters::gc_last_resort_from_handles.Increment(); \ Counters::gc_last_resort_from_handles.Increment(); \
Heap::CollectAllAvailableGarbage(); \ Heap::CollectAllGarbage(false); \
{ \ { \
AlwaysAllocateScope __scope__; \ AlwaysAllocateScope __scope__; \
__object__ = FUNCTION_CALL; \ __object__ = FUNCTION_CALL; \
......
...@@ -56,6 +56,7 @@ String* Heap::hidden_symbol_; ...@@ -56,6 +56,7 @@ String* Heap::hidden_symbol_;
Object* Heap::roots_[Heap::kRootListLength]; Object* Heap::roots_[Heap::kRootListLength];
Object* Heap::global_contexts_list_; Object* Heap::global_contexts_list_;
NewSpace Heap::new_space_; NewSpace Heap::new_space_;
OldSpace* Heap::old_pointer_space_ = NULL; OldSpace* Heap::old_pointer_space_ = NULL;
OldSpace* Heap::old_data_space_ = NULL; OldSpace* Heap::old_data_space_ = NULL;
...@@ -64,6 +65,9 @@ MapSpace* Heap::map_space_ = NULL; ...@@ -64,6 +65,9 @@ MapSpace* Heap::map_space_ = NULL;
CellSpace* Heap::cell_space_ = NULL; CellSpace* Heap::cell_space_ = NULL;
LargeObjectSpace* Heap::lo_space_ = NULL; LargeObjectSpace* Heap::lo_space_ = NULL;
static const intptr_t kMinimumPromotionLimit = 2 * MB;
static const intptr_t kMinimumAllocationLimit = 8 * MB;
intptr_t Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit; intptr_t Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
intptr_t Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit; intptr_t Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
...@@ -415,25 +419,17 @@ void Heap::GarbageCollectionEpilogue() { ...@@ -415,25 +419,17 @@ void Heap::GarbageCollectionEpilogue() {
} }
void Heap::CollectAllGarbage(bool force_compaction, void Heap::CollectAllGarbage(bool force_compaction) {
CollectionPolicy collectionPolicy) {
// Since we are ignoring the return value, the exact choice of space does // Since we are ignoring the return value, the exact choice of space does
// not matter, so long as we do not specify NEW_SPACE, which would not // not matter, so long as we do not specify NEW_SPACE, which would not
// cause a full GC. // cause a full GC.
MarkCompactCollector::SetForceCompaction(force_compaction); MarkCompactCollector::SetForceCompaction(force_compaction);
CollectGarbage(OLD_POINTER_SPACE, collectionPolicy); CollectGarbage(OLD_POINTER_SPACE);
MarkCompactCollector::SetForceCompaction(false); MarkCompactCollector::SetForceCompaction(false);
} }
void Heap::CollectAllAvailableGarbage() { void Heap::CollectGarbage(AllocationSpace space) {
CompilationCache::Clear();
CollectAllGarbage(true, AGGRESSIVE);
}
void Heap::CollectGarbage(AllocationSpace space,
CollectionPolicy collectionPolicy) {
// The VM is in the GC state until exiting this function. // The VM is in the GC state until exiting this function.
VMState state(GC); VMState state(GC);
...@@ -460,7 +456,7 @@ void Heap::CollectGarbage(AllocationSpace space, ...@@ -460,7 +456,7 @@ void Heap::CollectGarbage(AllocationSpace space,
? &Counters::gc_scavenger ? &Counters::gc_scavenger
: &Counters::gc_compactor; : &Counters::gc_compactor;
rate->Start(); rate->Start();
PerformGarbageCollection(collector, &tracer, collectionPolicy); PerformGarbageCollection(collector, &tracer);
rate->Stop(); rate->Stop();
GarbageCollectionEpilogue(); GarbageCollectionEpilogue();
...@@ -476,7 +472,7 @@ void Heap::CollectGarbage(AllocationSpace space, ...@@ -476,7 +472,7 @@ void Heap::CollectGarbage(AllocationSpace space,
void Heap::PerformScavenge() { void Heap::PerformScavenge() {
GCTracer tracer; GCTracer tracer;
PerformGarbageCollection(SCAVENGER, &tracer, NORMAL); PerformGarbageCollection(SCAVENGER, &tracer);
} }
...@@ -661,8 +657,7 @@ void Heap::UpdateSurvivalRateTrend(int start_new_space_size) { ...@@ -661,8 +657,7 @@ void Heap::UpdateSurvivalRateTrend(int start_new_space_size) {
} }
void Heap::PerformGarbageCollection(GarbageCollector collector, void Heap::PerformGarbageCollection(GarbageCollector collector,
GCTracer* tracer, GCTracer* tracer) {
CollectionPolicy collectionPolicy) {
if (collector != SCAVENGER) { if (collector != SCAVENGER) {
PROFILE(CodeMovingGCEvent()); PROFILE(CodeMovingGCEvent());
} }
...@@ -696,28 +691,12 @@ void Heap::PerformGarbageCollection(GarbageCollector collector, ...@@ -696,28 +691,12 @@ void Heap::PerformGarbageCollection(GarbageCollector collector,
UpdateSurvivalRateTrend(start_new_space_size); UpdateSurvivalRateTrend(start_new_space_size);
UpdateOldSpaceLimits(); intptr_t old_gen_size = PromotedSpaceSize();
old_gen_promotion_limit_ =
// Major GC would invoke weak handle callbacks on weakly reachable old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
// handles, but won't collect weakly reachable objects until next old_gen_allocation_limit_ =
// major GC. Therefore if we collect aggressively and weak handle callback old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
// has been invoked, we rerun major GC to release objects which become
// garbage.
if (collectionPolicy == AGGRESSIVE) {
// Note: as weak callbacks can execute arbitrary code, we cannot
// hope that eventually there will be no weak callbacks invocations.
// Therefore stop recollecting after several attempts.
const int kMaxNumberOfAttempts = 7;
for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
{ DisableAssertNoAllocation allow_allocation;
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
if (!GlobalHandles::PostGarbageCollectionProcessing()) break;
}
MarkCompact(tracer);
// Weak handle callbacks can allocate data, so keep limits correct.
UpdateOldSpaceLimits();
}
} else {
if (high_survival_rate_during_scavenges && if (high_survival_rate_during_scavenges &&
IsStableOrIncreasingSurvivalTrend()) { IsStableOrIncreasingSurvivalTrend()) {
// Stable high survival rates of young objects both during partial and // Stable high survival rates of young objects both during partial and
...@@ -729,12 +708,8 @@ void Heap::PerformGarbageCollection(GarbageCollector collector, ...@@ -729,12 +708,8 @@ void Heap::PerformGarbageCollection(GarbageCollector collector,
old_gen_promotion_limit_ *= 2; old_gen_promotion_limit_ *= 2;
old_gen_allocation_limit_ *= 2; old_gen_allocation_limit_ *= 2;
} }
}
{ DisableAssertNoAllocation allow_allocation; old_gen_exhausted_ = false;
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
GlobalHandles::PostGarbageCollectionProcessing();
}
} else { } else {
tracer_ = tracer; tracer_ = tracer;
Scavenge(); Scavenge();
...@@ -745,6 +720,12 @@ void Heap::PerformGarbageCollection(GarbageCollector collector, ...@@ -745,6 +720,12 @@ void Heap::PerformGarbageCollection(GarbageCollector collector,
Counters::objs_since_last_young.Set(0); Counters::objs_since_last_young.Set(0);
if (collector == MARK_COMPACTOR) {
DisableAssertNoAllocation allow_allocation;
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
GlobalHandles::PostGarbageCollectionProcessing();
}
// Update relocatables. // Update relocatables.
Relocatable::PostGarbageCollectionProcessing(); Relocatable::PostGarbageCollectionProcessing();
......
...@@ -693,20 +693,13 @@ class Heap : public AllStatic { ...@@ -693,20 +693,13 @@ class Heap : public AllStatic {
static void GarbageCollectionPrologue(); static void GarbageCollectionPrologue();
static void GarbageCollectionEpilogue(); static void GarbageCollectionEpilogue();
enum CollectionPolicy { NORMAL, AGGRESSIVE };
// Performs garbage collection operation. // Performs garbage collection operation.
// Returns whether required_space bytes are available after the collection. // Returns whether required_space bytes are available after the collection.
static void CollectGarbage(AllocationSpace space, static void CollectGarbage(AllocationSpace space);
CollectionPolicy collectionPolicy = NORMAL);
// Performs a full garbage collection. Force compaction if the // Performs a full garbage collection. Force compaction if the
// parameter is true. // parameter is true.
static void CollectAllGarbage(bool force_compaction, static void CollectAllGarbage(bool force_compaction);
CollectionPolicy collectionPolicy = NORMAL);
// Last hope GC, should try to squeeze as much as possible.
static void CollectAllAvailableGarbage();
// Notify the heap that a context has been disposed. // Notify the heap that a context has been disposed.
static int NotifyContextDisposed() { return ++contexts_disposed_; } static int NotifyContextDisposed() { return ++contexts_disposed_; }
...@@ -1242,13 +1235,7 @@ class Heap : public AllStatic { ...@@ -1242,13 +1235,7 @@ class Heap : public AllStatic {
// Performs garbage collection // Performs garbage collection
static void PerformGarbageCollection(GarbageCollector collector, static void PerformGarbageCollection(GarbageCollector collector,
GCTracer* tracer, GCTracer* tracer);
CollectionPolicy collectionPolicy);
static const intptr_t kMinimumPromotionLimit = 2 * MB;
static const intptr_t kMinimumAllocationLimit = 8 * MB;
inline static void UpdateOldSpaceLimits();
// Allocate an uninitialized object in map space. The behavior is identical // Allocate an uninitialized object in map space. The behavior is identical
// to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment