Commit 0e15a7a4 authored by Michael Starzinger's avatar Michael Starzinger Committed by Commit Bot

Annotate all sites that still mutate {Code} objects.

This adds code-space modification scopes to all sites that still rely on
mutation of {Code} objects after allocation. Currently some scopes also
potentially are in performance-critical places that might regress if the
protection would be enabled in its current form.

R=clemensh@chromium.org
BUG=v8:6792

Change-Id: I8d511e0e452324dae027e50a9da8e6f77224b86f
Reviewed-on: https://chromium-review.googlesource.com/751521
Commit-Queue: Michael Starzinger <mstarzinger@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Reviewed-by: 's avatarClemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#49147}
parent 0dbc8130
......@@ -143,6 +143,9 @@ Code* BuildWithCodeStubAssemblerCS(Isolate* isolate,
void SetupIsolateDelegate::AddBuiltin(Builtins* builtins, int index,
Code* code) {
// TODO(mstarzinger,6792): This code-space modification section should be
// moved into {Heap} eventually and a safe wrapper be provided.
CodeSpaceMemoryModificationScope modification_scope(code->GetHeap());
builtins->builtins_[index] = code;
code->set_builtin_index(index);
}
......@@ -161,6 +164,9 @@ void SetupIsolateDelegate::PopulateWithPlaceholders(Isolate* isolate) {
}
void SetupIsolateDelegate::ReplacePlaceholders(Isolate* isolate) {
// TODO(mstarzinger,6792): This code-space modification section should be
// moved into {Heap} eventually and a safe wrapper be provided.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
// Replace references from all code objects to placeholders.
Builtins* builtins = isolate->builtins();
DisallowHeapAllocation no_gc;
......@@ -273,6 +279,10 @@ void SetupIsolateDelegate::SetupBuiltinsInternal(Isolate* isolate) {
BUILTIN_EXCEPTION_CAUGHT_PREDICTION_LIST(SET_EXCEPTION_CAUGHT_PREDICTION)
#undef SET_EXCEPTION_CAUGHT_PREDICTION
// TODO(mstarzinger,6792): This code-space modification section should be
// moved into {Heap} eventually and a safe wrapper be provided.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
#define SET_CODE_NON_TAGGED_PARAMS(Name) \
Code::cast(builtins->builtins_[Builtins::k##Name]) \
->set_has_tagged_params(false);
......
......@@ -4389,6 +4389,8 @@ Handle<Code> CompileWasmToJSWrapper(
Handle<Object> index_handle = isolate->factory()->NewNumberFromInt(
OffsetForImportData(index, WasmGraphBuilder::kFunction));
deopt_data->set(1, *index_handle);
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
code->set_deoptimization_data(*deopt_data);
#ifdef ENABLE_DISASSEMBLER
if (FLAG_print_opt_code && !code.is_null()) {
......@@ -4536,6 +4538,9 @@ Handle<Code> CompileWasmInterpreterEntry(Isolate* isolate, uint32_t func_index,
}
}
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
Handle<FixedArray> deopt_data = isolate->factory()->NewFixedArray(1, TENURED);
Handle<WeakCell> weak_instance = isolate->factory()->NewWeakCell(instance);
deopt_data->set(0, *weak_instance);
......@@ -4892,6 +4897,8 @@ void WasmCompilationUnit::PackProtectedInstructions(Handle<Code> code) const {
fn_protected->set(Code::kTrapDataSize * i + Code::kTrapLandingOffset,
Smi::FromInt(instruction.landing_offset));
}
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate_->heap());
code->set_protected_instructions(*fn_protected);
}
......
......@@ -237,6 +237,10 @@ void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
}
#endif
// TODO(mstarzinger,6792): This code-space modification section should be
// moved into {Heap} eventually and a safe wrapper be provided.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
// We will use this set to mark those Code objects that are marked for
// deoptimization and have not been found in stack frames.
std::set<Code*> codes;
......
......@@ -5361,6 +5361,7 @@ void Heap::DisableInlineAllocation() {
// Update inline allocation limit for old spaces.
PagedSpaces spaces(this);
CodeSpaceMemoryModificationScope modification_scope(this);
for (PagedSpace* space = spaces.next(); space != nullptr;
space = spaces.next()) {
space->EmptyAllocationInfo();
......
......@@ -542,7 +542,7 @@ void MemoryChunk::SetReadAndExecutable() {
return;
}
write_unprotect_counter_--;
DCHECK_LE(write_unprotect_counter_, 1);
DCHECK_LE(write_unprotect_counter_, 2);
if (write_unprotect_counter_ == 0) {
Address protect_start =
address() + MemoryAllocator::CodePageAreaStartOffset();
......@@ -563,7 +563,7 @@ void MemoryChunk::SetReadAndWritable() {
// protection mode has to be atomic.
base::LockGuard<base::Mutex> guard(page_protection_change_mutex_);
write_unprotect_counter_++;
DCHECK_LE(write_unprotect_counter_, 2);
DCHECK_LE(write_unprotect_counter_, 3);
if (write_unprotect_counter_ == 1) {
Address unprotect_start =
address() + MemoryAllocator::CodePageAreaStartOffset();
......@@ -584,7 +584,7 @@ void MemoryChunk::SetReadWriteAndExecutable() {
// protection mode has to be atomic.
base::LockGuard<base::Mutex> guard(page_protection_change_mutex_);
write_unprotect_counter_++;
DCHECK_LE(write_unprotect_counter_, 2);
DCHECK_LE(write_unprotect_counter_, 3);
Address unprotect_start =
address() + MemoryAllocator::CodePageAreaStartOffset();
size_t unprotect_size = size() - MemoryAllocator::CodePageAreaStartOffset();
......
......@@ -699,8 +699,8 @@ class MemoryChunk {
// times a component requested this page to be read+writeable. The
// counter is decremented when a component resets to read+executable.
// If Value() == 0 => The memory is read and executable.
// If Value() >= 1 => The Memory is read and writable.
// The maximum value can right now only be 2.
// If Value() >= 1 => The Memory is read and writable (and maybe executable).
// The maximum value can right now only be 3.
uintptr_t write_unprotect_counter_;
// Byte allocated on the page, which includes all objects on the page
......
......@@ -242,6 +242,9 @@ MaybeHandle<FixedArray> WasmCompiledModuleSerializer::DeserializeWasmModule(
return nothing;
}
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
MaybeHandle<WasmCompiledModule> maybe_result =
ObjectDeserializer::DeserializeWasmCompiledModule(isolate, &scd,
wire_bytes);
......@@ -260,6 +263,8 @@ void WasmCompiledModuleSerializer::SerializeCodeObject(
switch (kind) {
case Code::WASM_FUNCTION:
case Code::JS_TO_WASM_FUNCTION: {
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate()->heap());
// Because the trap handler index is not meaningful across copies and
// serializations, we need to serialize it as kInvalidIndex. We do this by
// saving the old value, setting the index to kInvalidIndex and then
......
......@@ -234,6 +234,8 @@ class JSToWasmWrapperCache {
int cached_idx = sig_map_.Find(func->sig);
if (cached_idx >= 0) {
Handle<Code> code = isolate->factory()->CopyCode(code_cache_[cached_idx]);
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
// Now patch the call to wasm code.
for (RelocIterator it(*code, RelocInfo::kCodeTargetMask);; it.next()) {
DCHECK(!it.done());
......@@ -402,6 +404,8 @@ static void InstanceFinalizer(const v8::WeakCallbackInfo<void>& data) {
WeakCell* weak_wasm_module = compiled_module->ptr_to_weak_wasm_module();
if (trap_handler::UseTrapHandler()) {
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
Handle<FixedArray> code_table = compiled_module->code_table();
for (int i = 0; i < code_table->length(); ++i) {
Handle<Code> code = code_table->GetValueChecked<Code>(isolate, i);
......@@ -658,6 +662,8 @@ Handle<Code> CompileLazy(Isolate* isolate) {
DCHECK(exp_table->get(exp_index) == *lazy_compile_code);
exp_table->set(exp_index, *compiled_code);
}
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
// After processing, remove the list of exported entries, such that we don't
// do the patching redundantly.
Handle<FixedArray> new_deopt_data =
......@@ -741,6 +747,9 @@ void LazyCompilationOrchestrator::CompileFunction(
CHECK(!thrower.error());
Handle<Code> code = maybe_code.ToHandleChecked();
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
Handle<FixedArray> deopt_data = isolate->factory()->NewFixedArray(2, TENURED);
Handle<WeakCell> weak_instance = isolate->factory()->NewWeakCell(instance);
// TODO(wasm): Introduce constants for the indexes in wasm deopt data.
......@@ -836,6 +845,8 @@ Handle<Code> LazyCompilationOrchestrator::CompileLazy(
if (is_js_to_wasm || patch_caller) {
DisallowHeapAllocation no_gc;
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
// Now patch the code object with all functions which are now compiled.
int idx = 0;
for (RelocIterator it(*caller, RelocInfo::kCodeTargetMask); !it.done();
......@@ -1236,6 +1247,10 @@ Handle<Code> EnsureExportedLazyDeoptData(Isolate* isolate,
code->builtin_index() == Builtins::kIllegal);
return code;
}
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
// deopt_data:
// #0: weak instance
// #1: func_index
......@@ -1275,6 +1290,9 @@ Handle<Code> EnsureTableExportLazyDeoptData(
EnsureExportedLazyDeoptData(isolate, instance, code_table, func_index);
if (code->builtin_index() != Builtins::kWasmCompileLazy) return code;
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
// deopt_data:
// #0: weak instance
// #1: func_index
......@@ -1334,6 +1352,8 @@ Handle<Code> MakeWasmToWasmWrapper(
Handle<Code> wrapper_code = compiler::CompileWasmToWasmWrapper(
isolate, wasm_code, *sig, imported_function->function_index(),
new_wasm_context_address);
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
// Set the deoptimization data for the WasmToWasm wrapper. This is
// needed by the interpreter to find the imported instance for
// a cross-instance call.
......@@ -1665,6 +1685,9 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
// If this code object has deoptimization data, then we need a
// unique copy to attach updated deoptimization data.
if (orig_code->deoptimization_data()->length() > 0) {
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(
isolate_->heap());
Handle<Code> code = factory->CopyCode(orig_code);
Handle<FixedArray> deopt_data =
factory->NewFixedArray(2, TENURED);
......@@ -1847,6 +1870,8 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
i < num_functions; ++i) {
Handle<Code> code = handle(Code::cast(code_table->get(i)), isolate_);
if (code->kind() == Code::WASM_FUNCTION) {
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate_->heap());
Handle<FixedArray> deopt_data = factory->NewFixedArray(2, TENURED);
deopt_data->set(0, *weak_link);
deopt_data->set(1, Smi::FromInt(i));
......
......@@ -111,6 +111,9 @@ bool CodeSpecialization::ApplyToWholeInstance(
bool changed = false;
int func_index = module->num_imported_functions;
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(instance->GetHeap());
// Patch all wasm functions.
for (int num_wasm_functions = static_cast<int>(wasm_functions->size());
func_index < num_wasm_functions; ++func_index) {
......@@ -187,6 +190,9 @@ bool CodeSpecialization::ApplyToWasmCode(Code* code,
std::unique_ptr<PatchDirectCallsHelper> patch_direct_calls_helper;
bool changed = false;
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(code->GetHeap());
for (RelocIterator it(code, reloc_mode); !it.done(); it.next()) {
RelocInfo::Mode mode = it.rinfo()->rmode();
switch (mode) {
......
......@@ -688,6 +688,8 @@ void WasmDebugInfo::RedirectToInterpreter(Handle<WasmDebugInfo> debug_info,
DCHECK_NULL(code_to_relocate.Find(old_code));
code_to_relocate.Set(old_code, new_code);
}
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
RedirectCallsitesInInstance(isolate, *instance, code_to_relocate);
}
......
......@@ -99,6 +99,9 @@ void UnpackAndRegisterProtectedInstructions(Isolate* isolate,
unpacked.clear();
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
// TODO(eholk): if index is negative, fail.
DCHECK_LE(0, index);
code->set_trap_handler_index(Smi::FromInt(index));
......
......@@ -171,6 +171,7 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
}
void SimulateFullSpace(v8::internal::PagedSpace* space) {
CodeSpaceMemoryModificationScope modification_scope(space->heap());
i::MarkCompactCollector* collector = space->heap()->mark_compact_collector();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
......
......@@ -39,19 +39,24 @@ WASM_COMPILED_EXEC_TEST(RunPatchWasmContext) {
WasmContext new_wasm_context = {0, 0,
reinterpret_cast<byte*>(new_global_data)};
// Patch in a new WasmContext that points to the new global data.
int filter = 1 << RelocInfo::WASM_CONTEXT_REFERENCE;
bool patched = false;
Handle<Code> code = r.GetWrapperCode();
for (RelocIterator it(*code, filter); !it.done(); it.next()) {
CHECK_EQ(old_wasm_context_address, it.rinfo()->wasm_context_reference());
it.rinfo()->set_wasm_context_reference(
isolate, reinterpret_cast<Address>(&new_wasm_context));
patched = true;
{
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
// Patch in a new WasmContext that points to the new global data.
int filter = 1 << RelocInfo::WASM_CONTEXT_REFERENCE;
bool patched = false;
Handle<Code> code = r.GetWrapperCode();
for (RelocIterator it(*code, filter); !it.done(); it.next()) {
CHECK_EQ(old_wasm_context_address, it.rinfo()->wasm_context_reference());
it.rinfo()->set_wasm_context_reference(
isolate, reinterpret_cast<Address>(&new_wasm_context));
patched = true;
}
CHECK(patched);
Assembler::FlushICache(isolate, code->instruction_start(),
code->instruction_size());
}
CHECK(patched);
Assembler::FlushICache(isolate, code->instruction_start(),
code->instruction_size());
// Run with the new global data.
CHECK_EQ(115, r.Call(115));
......
......@@ -421,6 +421,9 @@ void WasmFunctionCompiler::Build(const byte* start, const byte* end) {
CHECK(!thrower.error());
Handle<Code> code = maybe_code.ToHandleChecked();
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate()->heap());
// Manually add the deoptimization info that would otherwise be added
// during instantiation. Deopt data holds <WeakCell<wasm_instance>,
// func_index>.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment