Commit 8bef2652 authored by Hannes Payer's avatar Hannes Payer Committed by Commit Bot

[heap] Add a CodePageCollectionMemoryModificationScope to collect unprotected...

[heap] Add a CodePageCollectionMemoryModificationScope to collect unprotected code pages on the fly.

Bug: chromium:774108
Change-Id: I95bfe672df13a6b8f40ba258c9ea42c6b18f6138
Reviewed-on: https://chromium-review.googlesource.com/949482Reviewed-by: 's avatarMichael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Hannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#51765}
parent 903c631f
...@@ -300,7 +300,14 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, ...@@ -300,7 +300,14 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
// NEW_SPACE is not allowed here. // NEW_SPACE is not allowed here.
UNREACHABLE(); UNREACHABLE();
} }
if (allocation.To(&object)) { if (allocation.To(&object)) {
if (space == CODE_SPACE) {
// Unprotect the memory chunk of the object if it was not unprotected
// already.
UnprotectAndRegisterMemoryChunk(object);
ZapCodeObject(object->address(), size_in_bytes);
}
OnAllocationEvent(object, size_in_bytes); OnAllocationEvent(object, size_in_bytes);
} }
...@@ -644,6 +651,7 @@ AlwaysAllocateScope::~AlwaysAllocateScope() { ...@@ -644,6 +651,7 @@ AlwaysAllocateScope::~AlwaysAllocateScope() {
CodeSpaceMemoryModificationScope::CodeSpaceMemoryModificationScope(Heap* heap) CodeSpaceMemoryModificationScope::CodeSpaceMemoryModificationScope(Heap* heap)
: heap_(heap) { : heap_(heap) {
DCHECK(!heap_->unprotected_memory_chunks_registry_enabled());
if (heap_->write_protect_code_memory()) { if (heap_->write_protect_code_memory()) {
heap_->increment_code_space_memory_modification_scope_depth(); heap_->increment_code_space_memory_modification_scope_depth();
heap_->code_space()->SetReadAndWritable(); heap_->code_space()->SetReadAndWritable();
...@@ -659,6 +667,7 @@ CodeSpaceMemoryModificationScope::CodeSpaceMemoryModificationScope(Heap* heap) ...@@ -659,6 +667,7 @@ CodeSpaceMemoryModificationScope::CodeSpaceMemoryModificationScope(Heap* heap)
} }
CodeSpaceMemoryModificationScope::~CodeSpaceMemoryModificationScope() { CodeSpaceMemoryModificationScope::~CodeSpaceMemoryModificationScope() {
DCHECK(!heap_->unprotected_memory_chunks_registry_enabled());
if (heap_->write_protect_code_memory()) { if (heap_->write_protect_code_memory()) {
heap_->decrement_code_space_memory_modification_scope_depth(); heap_->decrement_code_space_memory_modification_scope_depth();
heap_->code_space()->SetReadAndExecutable(); heap_->code_space()->SetReadAndExecutable();
...@@ -673,6 +682,24 @@ CodeSpaceMemoryModificationScope::~CodeSpaceMemoryModificationScope() { ...@@ -673,6 +682,24 @@ CodeSpaceMemoryModificationScope::~CodeSpaceMemoryModificationScope() {
} }
} }
CodePageCollectionMemoryModificationScope::
CodePageCollectionMemoryModificationScope(Heap* heap)
: heap_(heap) {
if (heap_->write_protect_code_memory() &&
!heap_->code_space_memory_modification_scope_depth()) {
heap_->EnableUnprotectedMemoryChunksRegistry();
}
}
CodePageCollectionMemoryModificationScope::
~CodePageCollectionMemoryModificationScope() {
if (heap_->write_protect_code_memory() &&
!heap_->code_space_memory_modification_scope_depth()) {
heap_->ProtectUnprotectedMemoryChunks();
heap_->DisableUnprotectedMemoryChunksRegistry();
}
}
CodePageMemoryModificationScope::CodePageMemoryModificationScope( CodePageMemoryModificationScope::CodePageMemoryModificationScope(
MemoryChunk* chunk) MemoryChunk* chunk)
: chunk_(chunk), : chunk_(chunk),
......
...@@ -233,7 +233,8 @@ Heap::Heap() ...@@ -233,7 +233,8 @@ Heap::Heap()
use_tasks_(true), use_tasks_(true),
force_oom_(false), force_oom_(false),
delay_sweeper_tasks_for_testing_(false), delay_sweeper_tasks_for_testing_(false),
pending_layout_change_object_(nullptr) pending_layout_change_object_(nullptr),
unprotected_memory_chunks_registry_enabled_(false)
#ifdef V8_ENABLE_ALLOCATION_TIMEOUT #ifdef V8_ENABLE_ALLOCATION_TIMEOUT
, ,
allocation_timeout_(0) allocation_timeout_(0)
...@@ -2162,6 +2163,27 @@ void Heap::ComputeFastPromotionMode(double survival_rate) { ...@@ -2162,6 +2163,27 @@ void Heap::ComputeFastPromotionMode(double survival_rate) {
} }
} }
void Heap::UnprotectAndRegisterMemoryChunk(MemoryChunk* chunk) {
if (unprotected_memory_chunks_registry_enabled_ &&
unprotected_memory_chunks_.insert(chunk).second) {
chunk->SetReadAndWritable();
}
}
void Heap::UnprotectAndRegisterMemoryChunk(HeapObject* object) {
UnprotectAndRegisterMemoryChunk(MemoryChunk::FromAddress(object->address()));
}
void Heap::ProtectUnprotectedMemoryChunks() {
DCHECK(unprotected_memory_chunks_registry_enabled_);
for (auto chunk = unprotected_memory_chunks_.begin();
chunk != unprotected_memory_chunks_.end(); chunk++) {
CHECK(memory_allocator()->IsMemoryChunkExecutable(*chunk));
(*chunk)->SetReadAndExecutable();
}
unprotected_memory_chunks_.clear();
}
String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap, String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
Object** p) { Object** p) {
MapWord first_word = HeapObject::cast(*p)->map_word(); MapWord first_word = HeapObject::cast(*p)->map_word();
...@@ -3187,6 +3209,7 @@ AllocationResult Heap::AllocateCode(int object_size, Movability movability) { ...@@ -3187,6 +3209,7 @@ AllocationResult Heap::AllocateCode(int object_size, Movability movability) {
HeapObject* result = nullptr; HeapObject* result = nullptr;
if (!allocation.To(&result)) return allocation; if (!allocation.To(&result)) return allocation;
if (movability == kImmovable) { if (movability == kImmovable) {
Address address = result->address(); Address address = result->address();
MemoryChunk* chunk = MemoryChunk::FromAddress(address); MemoryChunk* chunk = MemoryChunk::FromAddress(address);
...@@ -3205,6 +3228,9 @@ AllocationResult Heap::AllocateCode(int object_size, Movability movability) { ...@@ -3205,6 +3228,9 @@ AllocationResult Heap::AllocateCode(int object_size, Movability movability) {
allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE);
if (!allocation.To(&result)) return allocation; if (!allocation.To(&result)) return allocation;
OnAllocationEvent(result, object_size); OnAllocationEvent(result, object_size);
// The old allocation was discarded. We have to unprotect the new
// allocation target.
UnprotectAndRegisterMemoryChunk(result);
} }
} }
} }
...@@ -3239,7 +3265,8 @@ AllocationResult Heap::AllocateCode( ...@@ -3239,7 +3265,8 @@ AllocationResult Heap::AllocateCode(
int object_size = Code::SizeFor(RoundUp(body_size, kObjectAlignment)); int object_size = Code::SizeFor(RoundUp(body_size, kObjectAlignment));
Code* code = nullptr; Code* code = nullptr;
CodeSpaceMemoryModificationScope code_allocation(this); CodePageCollectionMemoryModificationScope code_allocation(this);
AllocationResult allocation = AllocateCode(object_size, movability); AllocationResult allocation = AllocateCode(object_size, movability);
if (!allocation.To(&code)) return allocation; if (!allocation.To(&code)) return allocation;
...@@ -4976,6 +5003,14 @@ void Heap::ZapFromSpace() { ...@@ -4976,6 +5003,14 @@ void Heap::ZapFromSpace() {
} }
} }
void Heap::ZapCodeObject(Address start_address, int size_in_bytes) {
#ifdef DEBUG
for (int i = 0; i < size_in_bytes / kPointerSize; i++) {
reinterpret_cast<Object**>(start_address)[i] = Smi::FromInt(kCodeZapValue);
}
#endif
}
void Heap::IterateRoots(RootVisitor* v, VisitMode mode) { void Heap::IterateRoots(RootVisitor* v, VisitMode mode) {
IterateStrongRoots(v, mode); IterateStrongRoots(v, mode);
IterateWeakRoots(v, mode); IterateWeakRoots(v, mode);
......
...@@ -8,6 +8,7 @@ ...@@ -8,6 +8,7 @@
#include <cmath> #include <cmath>
#include <map> #include <map>
#include <unordered_map> #include <unordered_map>
#include <unordered_set>
#include <vector> #include <vector>
// Clients of this interface shouldn't depend on lots of heap internals. // Clients of this interface shouldn't depend on lots of heap internals.
...@@ -818,6 +819,23 @@ class Heap { ...@@ -818,6 +819,23 @@ class Heap {
code_space_memory_modification_scope_depth_--; code_space_memory_modification_scope_depth_--;
} }
void UnprotectAndRegisterMemoryChunk(MemoryChunk* chunk);
void UnprotectAndRegisterMemoryChunk(HeapObject* object);
void ProtectUnprotectedMemoryChunks();
void EnableUnprotectedMemoryChunksRegistry() {
unprotected_memory_chunks_registry_enabled_ = true;
}
void DisableUnprotectedMemoryChunksRegistry() {
unprotected_memory_chunks_registry_enabled_ = false;
}
bool unprotected_memory_chunks_registry_enabled() {
return unprotected_memory_chunks_registry_enabled_;
}
inline HeapState gc_state() { return gc_state_; } inline HeapState gc_state() { return gc_state_; }
void SetGCState(HeapState state); void SetGCState(HeapState state);
...@@ -1861,6 +1879,9 @@ class Heap { ...@@ -1861,6 +1879,9 @@ class Heap {
// Fill in bogus values in from space // Fill in bogus values in from space
void ZapFromSpace(); void ZapFromSpace();
// Zaps the memory of a code object.
void ZapCodeObject(Address start_address, int size_in_bytes);
// Deopts all code that contains allocation instruction which are tenured or // Deopts all code that contains allocation instruction which are tenured or
// not tenured. Moreover it clears the pretenuring allocation site statistics. // not tenured. Moreover it clears the pretenuring allocation site statistics.
void ResetAllAllocationSitesDependentCode(PretenureFlag flag); void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
...@@ -2601,6 +2622,9 @@ class Heap { ...@@ -2601,6 +2622,9 @@ class Heap {
HeapObject* pending_layout_change_object_; HeapObject* pending_layout_change_object_;
std::unordered_set<MemoryChunk*> unprotected_memory_chunks_;
bool unprotected_memory_chunks_registry_enabled_;
#ifdef V8_ENABLE_ALLOCATION_TIMEOUT #ifdef V8_ENABLE_ALLOCATION_TIMEOUT
// If the --gc-interval flag is set to a positive value, this // If the --gc-interval flag is set to a positive value, this
// variable holds the value indicating the number of allocations // variable holds the value indicating the number of allocations
...@@ -2707,6 +2731,18 @@ class CodeSpaceMemoryModificationScope { ...@@ -2707,6 +2731,18 @@ class CodeSpaceMemoryModificationScope {
Heap* heap_; Heap* heap_;
}; };
// The CodePageCollectionMemoryModificationScope can only be used by the main
// thread. It will not be enabled if a CodeSpaceMemoryModificationScope is
// already active.
class CodePageCollectionMemoryModificationScope {
public:
explicit inline CodePageCollectionMemoryModificationScope(Heap* heap);
inline ~CodePageCollectionMemoryModificationScope();
private:
Heap* heap_;
};
// The CodePageMemoryModificationScope does not check if tansitions to // The CodePageMemoryModificationScope does not check if tansitions to
// writeable and back to executable are actually allowed, i.e. the MemoryChunk // writeable and back to executable are actually allowed, i.e. the MemoryChunk
// was registered to be executable. It can be used by concurrent threads. // was registered to be executable. It can be used by concurrent threads.
......
...@@ -1774,6 +1774,13 @@ void PagedSpace::FreeLinearAllocationArea() { ...@@ -1774,6 +1774,13 @@ void PagedSpace::FreeLinearAllocationArea() {
InlineAllocationStep(current_top, nullptr, nullptr, 0); InlineAllocationStep(current_top, nullptr, nullptr, 0);
SetTopAndLimit(nullptr, nullptr); SetTopAndLimit(nullptr, nullptr);
DCHECK_GE(current_limit, current_top); DCHECK_GE(current_limit, current_top);
// The code page of the linear allocation area needs to be unprotected
// because we are going to write a filler into that memory area below.
if (identity() == CODE_SPACE) {
heap_->UnprotectAndRegisterMemoryChunk(
MemoryChunk::FromAddress(current_top));
}
Free(current_top, current_limit - current_top, Free(current_top, current_limit - current_top,
SpaceAccountingMode::kSpaceAccounted); SpaceAccountingMode::kSpaceAccounted);
} }
...@@ -1849,13 +1856,6 @@ bool PagedSpace::RefillLinearAllocationAreaFromFreeList(size_t size_in_bytes) { ...@@ -1849,13 +1856,6 @@ bool PagedSpace::RefillLinearAllocationAreaFromFreeList(size_t size_in_bytes) {
DCHECK_GE(new_node_size, size_in_bytes); DCHECK_GE(new_node_size, size_in_bytes);
#ifdef DEBUG
for (size_t i = 0; i < size_in_bytes / kPointerSize; i++) {
reinterpret_cast<Object**>(new_node->address())[i] =
Smi::FromInt(kCodeZapValue);
}
#endif
// The old-space-step might have finished sweeping and restarted marking. // The old-space-step might have finished sweeping and restarted marking.
// Verify that it did not turn the page of the new node into an evacuation // Verify that it did not turn the page of the new node into an evacuation
// candidate. // candidate.
...@@ -1863,7 +1863,8 @@ bool PagedSpace::RefillLinearAllocationAreaFromFreeList(size_t size_in_bytes) { ...@@ -1863,7 +1863,8 @@ bool PagedSpace::RefillLinearAllocationAreaFromFreeList(size_t size_in_bytes) {
// Memory in the linear allocation area is counted as allocated. We may free // Memory in the linear allocation area is counted as allocated. We may free
// a little of this again immediately - see below. // a little of this again immediately - see below.
IncreaseAllocatedBytes(new_node_size, Page::FromAddress(new_node->address())); Page* page = Page::FromAddress(new_node->address());
IncreaseAllocatedBytes(new_node_size, page);
Address start = new_node->address(); Address start = new_node->address();
Address end = new_node->address() + new_node_size; Address end = new_node->address() + new_node_size;
...@@ -1871,6 +1872,9 @@ bool PagedSpace::RefillLinearAllocationAreaFromFreeList(size_t size_in_bytes) { ...@@ -1871,6 +1872,9 @@ bool PagedSpace::RefillLinearAllocationAreaFromFreeList(size_t size_in_bytes) {
DCHECK_LE(limit, end); DCHECK_LE(limit, end);
DCHECK_LE(size_in_bytes, limit - start); DCHECK_LE(size_in_bytes, limit - start);
if (limit != end) { if (limit != end) {
if (identity() == CODE_SPACE) {
heap_->UnprotectAndRegisterMemoryChunk(page);
}
Free(limit, end - limit, SpaceAccountingMode::kSpaceAccounted); Free(limit, end - limit, SpaceAccountingMode::kSpaceAccounted);
} }
SetLinearAllocationArea(start, limit); SetLinearAllocationArea(start, limit);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment