Commit cd5d72fd authored by Hannes Payer's avatar Hannes Payer Committed by Commit Bot

Reland "[heap] Clear from space after garbage collection.

Bug: chromium:829771
Change-Id: I829b4d40bdbe1474eb7f087059be3e58b154768c
Reviewed-on: https://chromium-review.googlesource.com/1106657
Commit-Queue: Hannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#54082}
parent c51bcd17
......@@ -785,6 +785,12 @@ DEFINE_BOOL(optimize_ephemerons, true,
DEFINE_NEG_NEG_IMPLICATION(optimize_ephemerons, parallel_ephemeron_marking)
DEFINE_NEG_NEG_IMPLICATION(optimize_ephemerons, parallel_ephemeron_visiting)
DEFINE_BOOL(young_generation_large_objects, false,
"allocates large objects by default in the young generation large "
"object space")
DEFINE_BOOL(clear_free_memory, true, "initialize free memory with 0")
// assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc
DEFINE_BOOL(debug_code, DEBUG_BOOL,
"generate extra code (assertions) for debugging")
......
......@@ -933,7 +933,6 @@ void Heap::DeoptMarkedAllocationSites() {
void Heap::GarbageCollectionEpilogue() {
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EPILOGUE);
// In release mode, we only zap the from space under heap verification.
if (Heap::ShouldZapGarbage()) {
ZapFromSpace();
}
......@@ -3895,10 +3894,9 @@ void Heap::VerifyCountersBeforeConcurrentSweeping() {
void Heap::ZapFromSpace() {
if (!new_space_->IsFromSpaceCommitted()) return;
for (Page* page : PageRange(new_space_->from_space().first_page(), nullptr)) {
for (Address cursor = page->area_start(), limit = page->area_end();
cursor < limit; cursor += kPointerSize) {
Memory::Address_at(cursor) = static_cast<Address>(kFromSpaceZapValue);
}
memory_allocator()->ZapBlock(page->area_start(),
page->HighWaterMark() - page->area_start(),
ZapValue());
}
}
......
......@@ -710,11 +710,15 @@ class Heap {
#ifdef VERIFY_HEAP
return FLAG_verify_heap;
#else
return false;
return FLAG_clear_free_memory;
#endif
#endif
}
static uintptr_t ZapValue() {
return FLAG_clear_free_memory ? kClearedFreeMemoryValue : kZapValue;
}
static inline bool IsYoungGenerationCollector(GarbageCollector collector) {
return collector == SCAVENGER || collector == MINOR_MARK_COMPACTOR;
}
......
......@@ -854,8 +854,8 @@ MemoryChunk* MemoryAllocator::AllocateChunk(size_t reserve_area_size,
}
if (Heap::ShouldZapGarbage()) {
ZapBlock(base, CodePageGuardStartOffset());
ZapBlock(base + CodePageAreaStartOffset(), commit_area_size);
ZapBlock(base, CodePageGuardStartOffset(), kZapValue);
ZapBlock(base + CodePageAreaStartOffset(), commit_area_size, kZapValue);
}
area_start = base + CodePageAreaStartOffset();
......@@ -873,7 +873,7 @@ MemoryChunk* MemoryAllocator::AllocateChunk(size_t reserve_area_size,
if (base == kNullAddress) return nullptr;
if (Heap::ShouldZapGarbage()) {
ZapBlock(base, Page::kObjectStartOffset + commit_area_size);
ZapBlock(base, Page::kObjectStartOffset + commit_area_size, kZapValue);
}
area_start = base + Page::kObjectStartOffset;
......@@ -1173,7 +1173,7 @@ bool MemoryAllocator::CommitBlock(Address start, size_t size) {
if (!CommitMemory(start, size)) return false;
if (Heap::ShouldZapGarbage()) {
ZapBlock(start, size);
ZapBlock(start, size, kZapValue);
}
isolate_->counters()->memory_allocated()->Increment(static_cast<int>(size));
......@@ -1187,10 +1187,12 @@ bool MemoryAllocator::UncommitBlock(Address start, size_t size) {
return true;
}
void MemoryAllocator::ZapBlock(Address start, size_t size) {
void MemoryAllocator::ZapBlock(Address start, size_t size,
uintptr_t zap_value) {
DCHECK_EQ(start % kPointerSize, 0);
DCHECK_EQ(size % kPointerSize, 0);
for (size_t s = 0; s + kPointerSize <= size; s += kPointerSize) {
Memory::Address_at(start + s) = static_cast<Address>(kZapValue);
Memory::Address_at(start + s) = static_cast<Address>(zap_value);
}
}
......
......@@ -1414,9 +1414,9 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
// and false otherwise.
bool UncommitBlock(Address start, size_t size);
// Zaps a contiguous block of memory [start..(start+size)[ thus
// filling it up with a recognizable non-nullptr bit pattern.
void ZapBlock(Address start, size_t size);
// Zaps a contiguous block of memory [start..(start+size)[ with
// a given zap value.
void ZapBlock(Address start, size_t size, uintptr_t zap_value);
V8_WARN_UNUSED_RESULT bool CommitExecutableMemory(VirtualMemory* vm,
Address start,
......
......@@ -166,7 +166,8 @@ TEST(MemoryChunk) {
size_t initial_commit_area_size;
for (int i = 0; i < 100; i++) {
initial_commit_area_size = PseudorandomAreaSize();
initial_commit_area_size =
RoundUp(PseudorandomAreaSize(), CommitPageSize());
// With CodeRange.
const size_t code_range_size = 32 * MB;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment