Commit 6d3cdbbf authored by vegorov@chromium.org's avatar vegorov@chromium.org

Merging scavenge into sweeping phase of mark-sweep(-compact) collector.

Should reduce full GC pauses. 
Review URL: http://codereview.chromium.org/1217011

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@4318 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent dde48831
......@@ -240,6 +240,27 @@ void Heap::CopyBlock(Object** dst, Object** src, int byte_size) {
}
void Heap::MoveBlock(Object** dst, Object** src, size_t byte_size) {
ASSERT(IsAligned<size_t>(byte_size, kPointerSize));
int size_in_words = byte_size / kPointerSize;
if ((dst < src) || (dst >= (src + size_in_words))) {
ASSERT((dst >= (src + size_in_words)) ||
((OffsetFrom(reinterpret_cast<Address>(src)) -
OffsetFrom(reinterpret_cast<Address>(dst))) >= kPointerSize));
Object** end = src + size_in_words;
while (src != end) {
*dst++ = *src++;
}
} else {
memmove(dst, src, byte_size);
}
}
void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
ASSERT(InFromSpace(object));
......
......@@ -562,23 +562,18 @@ void Heap::PerformGarbageCollection(AllocationSpace space,
EnsureFromSpaceIsCommitted();
// Perform mark-sweep with optional compaction.
if (collector == MARK_COMPACTOR) {
// Perform mark-sweep with optional compaction.
MarkCompact(tracer);
}
// Always perform a scavenge to make room in new space.
Scavenge();
// Update the old space promotion limits after the scavenge due to
// promotions during scavenge.
if (collector == MARK_COMPACTOR) {
int old_gen_size = PromotedSpaceSize();
old_gen_promotion_limit_ =
old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
old_gen_allocation_limit_ =
old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
old_gen_exhausted_ = false;
} else {
Scavenge();
}
Counters::objs_since_last_young.Set(0);
......@@ -764,6 +759,17 @@ static void VerifyNonPointerSpacePointers() {
#endif
void Heap::CheckNewSpaceExpansionCriteria() {
if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
survived_since_last_expansion_ > new_space_.Capacity()) {
// Grow the size of new space if there is room to grow and enough
// data has survived scavenge since the last expansion.
new_space_.Grow();
survived_since_last_expansion_ = 0;
}
}
void Heap::Scavenge() {
#ifdef DEBUG
if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers();
......@@ -780,13 +786,7 @@ void Heap::Scavenge() {
// Used for updating survived_since_last_expansion_ at function end.
int survived_watermark = PromotedSpaceSize();
if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
survived_since_last_expansion_ > new_space_.Capacity()) {
// Grow the size of new space if there is room to grow and enough
// data has survived scavenge since the last expansion.
new_space_.Grow();
survived_since_last_expansion_ = 0;
}
CheckNewSpaceExpansionCriteria();
// Flip the semispaces. After flipping, to space is empty, from space has
// live objects.
......@@ -837,15 +837,17 @@ void Heap::Scavenge() {
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
ScavengeExternalStringTable();
UpdateNewSpaceReferencesInExternalStringTable(
&UpdateNewSpaceReferenceInExternalStringTableEntry);
ASSERT(new_space_front == new_space_.top());
// Set age mark.
new_space_.set_age_mark(new_space_.top());
// Update how much has survived scavenge.
survived_since_last_expansion_ +=
(PromotedSpaceSize() - survived_watermark) + new_space_.Size();
IncrementYoungSurvivorsCounter(
(PromotedSpaceSize() - survived_watermark) + new_space_.Size());
LOG(ResourceEvent("scavenge", "end"));
......@@ -853,7 +855,22 @@ void Heap::Scavenge() {
}
void Heap::ScavengeExternalStringTable() {
String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Object** p) {
MapWord first_word = HeapObject::cast(*p)->map_word();
if (!first_word.IsForwardingAddress()) {
// Unreachable external string can be finalized.
FinalizeExternalString(String::cast(*p));
return NULL;
}
// String is still reachable.
return String::cast(first_word.ToForwardingAddress());
}
void Heap::UpdateNewSpaceReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func) {
ExternalStringTable::Verify();
if (ExternalStringTable::new_space_strings_.is_empty()) return;
......@@ -864,16 +881,10 @@ void Heap::ScavengeExternalStringTable() {
for (Object** p = start; p < end; ++p) {
ASSERT(Heap::InFromSpace(*p));
MapWord first_word = HeapObject::cast(*p)->map_word();
String* target = updater_func(p);
if (!first_word.IsForwardingAddress()) {
// Unreachable external string can be finalized.
FinalizeExternalString(String::cast(*p));
continue;
}
if (target == NULL) continue;
// String is still reachable.
String* target = String::cast(first_word.ToForwardingAddress());
ASSERT(target->IsExternalString());
if (Heap::InNewSpace(target)) {
......
......@@ -200,6 +200,9 @@ class GCTracer;
class HeapStats;
typedef String* (*ExternalStringTableUpdaterCallback)(Object** pointer);
// The all static Heap captures the interface to the global object heap.
// All JavaScript contexts by this process share the same object heap.
......@@ -943,6 +946,30 @@ class Heap : public AllStatic {
static void RecordStats(HeapStats* stats);
// Copy block of memory from src to dst. Size of block should be aligned
// by pointer size.
static inline void CopyBlock(Object** dst, Object** src, int byte_size);
// Optimized version of memmove for blocks with pointer size aligned sizes and
// pointer size aligned addresses.
static inline void MoveBlock(Object** dst, Object** src, size_t byte_size);
// Check new space expansion criteria and expand semispaces if it was hit.
static void CheckNewSpaceExpansionCriteria();
static inline void IncrementYoungSurvivorsCounter(int survived) {
survived_since_last_expansion_ += survived;
}
static void UpdateNewSpaceReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func);
// Helper function that governs the promotion policy from new space to
// old. If the object's old address lies below the new space's age
// mark or if we've already filled the bottom 1/16th of the to space,
// we try to promote this object.
static inline bool ShouldBePromoted(Address old_address, int object_size);
static int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
private:
......@@ -1139,7 +1166,10 @@ class Heap : public AllStatic {
// Performs a minor collection in new generation.
static void Scavenge();
static void ScavengeExternalStringTable();
static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
Object** pointer);
static Address DoScavenge(ObjectVisitor* scavenge_visitor,
Address new_space_front);
......@@ -1157,11 +1187,6 @@ class Heap : public AllStatic {
HeapObject* target,
int size);
// Helper function that governs the promotion policy from new space to
// old. If the object's old address lies below the new space's age
// mark or if we've already filled the bottom 1/16th of the to space,
// we try to promote this object.
static inline bool ShouldBePromoted(Address old_address, int object_size);
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
// Record the copy of an object in the NewSpace's statistics.
static void RecordCopiedObject(HeapObject* obj);
......@@ -1180,9 +1205,6 @@ class Heap : public AllStatic {
// Slow part of scavenge object.
static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
// Copy memory from src to dst.
static inline void CopyBlock(Object** dst, Object** src, int byte_size);
// Initializes a function with a shared part and prototype.
// Returns the function.
// Note: this code was factored out of AllocateFunction such that
......
This diff is collapsed.
......@@ -407,26 +407,26 @@ class MarkCompactCollector: public AllStatic {
// Counters used for debugging the marking phase of mark-compact or
// mark-sweep collection.
// Number of live objects in Heap::to_space_.
static int live_young_objects_;
// Size of live objects in Heap::to_space_.
static int live_young_objects_size_;
// Number of live objects in Heap::old_pointer_space_.
static int live_old_pointer_objects_;
// Size of live objects in Heap::old_pointer_space_.
static int live_old_pointer_objects_size_;
// Number of live objects in Heap::old_data_space_.
static int live_old_data_objects_;
// Size of live objects in Heap::old_data_space_.
static int live_old_data_objects_size_;
// Number of live objects in Heap::code_space_.
static int live_code_objects_;
// Size of live objects in Heap::code_space_.
static int live_code_objects_size_;
// Number of live objects in Heap::map_space_.
static int live_map_objects_;
// Size of live objects in Heap::map_space_.
static int live_map_objects_size_;
// Number of live objects in Heap::cell_space_.
static int live_cell_objects_;
// Size of live objects in Heap::cell_space_.
static int live_cell_objects_size_;
// Number of live objects in Heap::lo_space_.
static int live_lo_objects_;
// Size of live objects in Heap::lo_space_.
static int live_lo_objects_size_;
// Number of live bytes in this collection.
static int live_bytes_;
......
......@@ -657,7 +657,7 @@ inline Dest BitCast(const Source& source) {
return dest;
}
} } // namespace v8::internal
#endif // V8_UTILS_H_
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment