Commit ff64dfa0 authored by Ben L. Titzer's avatar Ben L. Titzer Committed by Commit Bot

[wasm] Improve patching behavior for lazy compilation

This CL fixes the pathological O(n^2) patching behavior that
was introduced when simplifying the wasm instance/context data
structures. It introduces a per-instance reverse mapping of
function indexes to where they appear in import and indirect
function tables. The mapping is created lazily and rebuild in
response to too many failed lookups, which makes it robust
to table mutations in the future.

This CL also fixes a bug where the anonymous lazy compile stub
was not being used for direct calls, confusing the indirect
call patching mechanism.

R=clemensh@chromium.org,mstarzinger@chromium.org

Bug: v8:7424, chromium:830558
Change-Id: Ice0212593b31eb64687a3d52bd238020682a857f
Reviewed-on: https://chromium-review.googlesource.com/1004294
Commit-Queue: Ben Titzer <titzer@chromium.org>
Reviewed-by: 's avatarClemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52526}
parent 3c2a6933
......@@ -255,6 +255,7 @@ class InstanceBuilder {
const std::shared_ptr<Counters>& async_counters() const {
return async_counters_;
}
Counters* counters() const { return async_counters().get(); }
bool use_trap_handler() const { return compiled_module_->use_trap_handler(); }
......@@ -341,6 +342,115 @@ MaybeHandle<WasmInstanceObject> InstantiateToInstanceObject(
return {};
}
// A helper class to prevent pathological patching behavior for indirect
// references to code which must be updated after lazy compiles.
// Utilizes a reverse mapping to prevent O(n^2) behavior.
class IndirectPatcher {
public:
void Patch(Handle<WasmInstanceObject> caller_instance,
Handle<WasmInstanceObject> target_instance, int func_index,
Address old_target, const WasmCode* new_code) {
TRACE_LAZY(
"IndirectPatcher::Patch(caller=%p, target=%p, func_index=%i, "
"old_target=%p, "
"new_code=%p)\n",
*caller_instance, *target_instance, func_index, old_target, new_code);
if (mapping_.size() == 0 || misses_ >= kMaxMisses) {
BuildMapping(caller_instance);
}
// Patch entries for the given function index.
WasmCodeManager* code_manager =
caller_instance->GetIsolate()->wasm_engine()->code_manager();
USE(code_manager);
auto& entries = mapping_[func_index];
int patched = 0;
for (auto index : entries) {
if (index < 0) {
// Imported function entry.
int i = -1 - index;
auto entry = caller_instance->imported_function_entry_at(i);
if (entry.target() == old_target) {
DCHECK_EQ(
func_index,
code_manager->GetCodeFromStartAddress(entry.target())->index());
entry.set(target_instance, new_code);
patched++;
}
} else {
// Indirect function table entry.
int i = index;
auto entry = caller_instance->indirect_function_table_entry_at(i);
if (entry.target() == old_target) {
DCHECK_EQ(
func_index,
code_manager->GetCodeFromStartAddress(entry.target())->index());
entry.set(entry.sig_id(), target_instance, new_code);
patched++;
}
}
}
if (patched == 0) misses_++;
}
private:
void BuildMapping(Handle<WasmInstanceObject> caller_instance) {
mapping_.clear();
misses_ = 0;
TRACE_LAZY("BuildMapping for (caller=%p)...\n", *caller_instance);
Isolate* isolate = caller_instance->GetIsolate();
WasmCodeManager* code_manager = isolate->wasm_engine()->code_manager();
uint32_t num_imported_functions =
caller_instance->module()->num_imported_functions;
// Process the imported function entries.
for (unsigned i = 0; i < num_imported_functions; i++) {
auto entry = caller_instance->imported_function_entry_at(i);
WasmCode* code = code_manager->GetCodeFromStartAddress(entry.target());
if (code->kind() != WasmCode::kLazyStub) continue;
TRACE_LAZY(" +import[%u] -> #%d (%p)\n", i, code->index(),
code->instructions().start());
DCHECK(!entry.is_js_receiver_entry());
Handle<WasmInstanceObject> target_instance(entry.instance(), isolate);
WasmCode* new_code =
target_instance->compiled_module()->GetNativeModule()->GetCode(
code->index());
if (new_code->kind() != WasmCode::kLazyStub) {
// Patch an imported function entry which is already compiled.
entry.set(target_instance, new_code);
} else {
int key = code->index();
int index = -1 - i;
mapping_[key].push_back(index);
}
}
// Process the indirect function table entries.
size_t ift_size = caller_instance->indirect_function_table_size();
for (unsigned i = 0; i < ift_size; i++) {
auto entry = caller_instance->indirect_function_table_entry_at(i);
if (entry.target() == nullptr) continue; // null IFT entry
WasmCode* code = code_manager->GetCodeFromStartAddress(entry.target());
if (code->kind() != WasmCode::kLazyStub) continue;
TRACE_LAZY(" +indirect[%u] -> #%d (lazy:%p)\n", i, code->index(),
code->instructions().start());
Handle<WasmInstanceObject> target_instance(entry.instance(), isolate);
WasmCode* new_code =
target_instance->compiled_module()->GetNativeModule()->GetCode(
code->index());
if (new_code->kind() != WasmCode::kLazyStub) {
// Patch an indirect function table entry which is already compiled.
entry.set(entry.sig_id(), target_instance, new_code);
} else {
int key = code->index();
int index = i;
mapping_[key].push_back(index);
}
}
}
static constexpr int kMaxMisses = 5; // maximum misses before rebuilding
std::unordered_map<int, std::vector<int>> mapping_;
int misses_ = 0;
};
compiler::ModuleEnv CreateModuleEnvFromCompiledModule(
Isolate* isolate, Handle<WasmCompiledModule> compiled_module) {
DisallowHeapAllocation no_gc;
......@@ -709,23 +819,16 @@ Address CompileLazy(Isolate* isolate,
DCHECK_NOT_NULL(wasm_caller_code);
Handle<WasmInstanceObject> caller_instance(
WasmInstanceObject::GetOwningInstance(wasm_caller_code), isolate);
WasmModule* module = caller_instance->compiled_module()->shared()->module();
Address old_target = lazy_stub->instructions().start();
// TODO(wasm): this is O(n^2), since we scan the entire IFT and imports
// for every lazy compile. Introduce limited scanning.
for (unsigned i = 0; i < module->num_imported_functions; i++) {
auto entry = caller_instance->imported_function_entry_at(i);
if (entry.target() == old_target) {
entry.set(target_instance, result);
}
}
for (unsigned i = 0; i < caller_instance->indirect_function_table_size();
i++) {
auto entry = caller_instance->indirect_function_table_entry_at(i);
if (entry.target() == old_target) {
entry.set(entry.sig_id(), target_instance, result);
}
if (!caller_instance->has_managed_indirect_patcher()) {
auto patcher = Managed<IndirectPatcher>::Allocate(isolate);
caller_instance->set_managed_indirect_patcher(*patcher);
}
IndirectPatcher* patcher = Managed<IndirectPatcher>::cast(
caller_instance->managed_indirect_patcher())
->get();
Address old_target = lazy_stub->instructions().start();
patcher->Patch(caller_instance, target_instance, target_func_index,
old_target, result);
}
return result->instructions().start();
......@@ -786,23 +889,6 @@ void RecordStats(const wasm::NativeModule* native_module, Counters* counters) {
}
}
// Get the code for the given {func_index} in the given native module.
// If the code at that location is the (shared) lazy compile builtin,
// clone it, specializing it to the {func_index}.
wasm::WasmCode* CloneLazyCompileStubIfNeeded(Isolate* isolate,
wasm::NativeModule* native_module,
uint32_t func_index) {
wasm::WasmCode* code = native_module->GetCode(func_index);
// {code} will be nullptr when exporting imports.
if (code == nullptr || code->kind() != wasm::WasmCode::kLazyStub ||
!code->IsAnonymous()) {
return code;
}
// Clone the lazy builtin into the native module.
return native_module->CloneLazyBuiltinInto(code, func_index,
WasmCode::kFlushICache);
}
bool in_bounds(uint32_t offset, size_t size, size_t upper) {
return offset + size <= upper && offset + size >= offset;
}
......@@ -1603,7 +1689,7 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
if (module_->start_function_index >= 0) {
int start_index = module_->start_function_index;
wasm::WasmCode* start_code =
CloneLazyCompileStubIfNeeded(isolate_, native_module, start_index);
native_module->GetIndirectlyCallableCode(start_index);
FunctionSig* sig = module_->functions[start_index].sig;
Handle<Code> wrapper_code = js_to_wasm_cache_.CloneOrCompileJSToWasmWrapper(
isolate_, module_, start_code, start_index,
......@@ -2310,7 +2396,7 @@ void InstanceBuilder::LoadTableSegments(Handle<WasmInstanceObject> instance) {
// Update the local dispatch table first.
uint32_t sig_id = module_->signature_ids[function->sig_index];
wasm::WasmCode* wasm_code =
CloneLazyCompileStubIfNeeded(isolate_, native_module, func_index);
native_module->GetIndirectlyCallableCode(func_index);
auto entry = instance->indirect_function_table_entry_at(table_index);
entry.set(sig_id, instance, wasm_code);
......@@ -3229,7 +3315,7 @@ void CompileJsToWasmWrappers(Isolate* isolate,
for (auto exp : compiled_module->shared()->module()->export_table) {
if (exp.kind != kExternalFunction) continue;
wasm::WasmCode* wasm_code =
CloneLazyCompileStubIfNeeded(isolate, native_module, exp.index);
native_module->GetIndirectlyCallableCode(exp.index);
Handle<Code> wrapper_code = js_to_wasm_cache.CloneOrCompileJSToWasmWrapper(
isolate, compiled_module->shared()->module(), wasm_code, exp.index,
compiled_module->use_trap_handler());
......
......@@ -372,8 +372,7 @@ void NativeModule::CloneCodeHelper::CloneAndPatchCode(
case WasmCode::kLazyStub: {
// Use the first anonymous lazy compile stub hit in this loop as the
// canonical copy for all further ones by remembering it locally via
// the {anonymous_lazy_builtin} variable. All non-anonymous such stubs
// are just cloned directly via {CloneLazyBuiltinInto} below.
// the {anonymous_lazy_builtin} variable.
if (!original_code->IsAnonymous()) {
WasmCode* new_code = cloning_native_module_->CloneCode(
original_code, WasmCode::kNoFlushICache);
......@@ -854,15 +853,28 @@ WasmCode* NativeModule::Lookup(Address pc) {
return nullptr;
}
WasmCode* NativeModule::CloneLazyBuiltinInto(
const WasmCode* code, uint32_t index, WasmCode::FlushICache flush_icache) {
DCHECK_EQ(wasm::WasmCode::kLazyStub, code->kind());
DCHECK(code->IsAnonymous());
WasmCode* ret = CloneCode(code, WasmCode::kNoFlushICache);
RelocateCode(ret, code, flush_icache);
code_table_[index] = ret;
ret->index_ = Just(index);
return ret;
WasmCode* NativeModule::GetIndirectlyCallableCode(uint32_t func_index) {
WasmCode* code = GetCode(func_index);
if (!code || code->kind() != WasmCode::kLazyStub) {
return code;
}
if (!code->IsAnonymous()) {
DCHECK_EQ(func_index, code->index());
return code;
}
if (!lazy_compile_stubs_.get()) {
lazy_compile_stubs_ =
base::make_unique<std::vector<WasmCode*>>(FunctionCount());
}
WasmCode* cloned_code = lazy_compile_stubs_.get()->at(func_index);
if (cloned_code == nullptr) {
cloned_code = CloneCode(code, WasmCode::kNoFlushICache);
RelocateCode(cloned_code, code, WasmCode::kFlushICache);
cloned_code->index_ = Just(func_index);
lazy_compile_stubs_.get()->at(func_index) = cloned_code;
}
DCHECK_EQ(func_index, cloned_code->index());
return cloned_code;
}
void NativeModule::CloneTrampolinesAndStubs(
......
......@@ -244,16 +244,10 @@ class V8_EXPORT_PRIVATE NativeModule final {
void UnpackAndRegisterProtectedInstructions();
void ReleaseProtectedInstructions();
// We special-case lazy cloning because we currently rely on making copies
// of the lazy builtin, to be able to identify, in the runtime, which function
// the lazy builtin is a placeholder of. If we used trampolines, we would call
// the runtime function from a common pc. We could, then, figure who the
// caller was if the trampolines called rather than jumped to the common
// builtin. The logic for seeking though frames would change, though.
// TODO(mtrofin): perhaps we can do exactly that - either before or after
// this change.
WasmCode* CloneLazyBuiltinInto(const WasmCode* code, uint32_t index,
WasmCode::FlushICache);
// Gets code suitable for indirect or import calls for the given function
// index. If the code at the given index is the lazy compile stub, it will
// clone a non-anonymous lazy compile stub for the purpose.
WasmCode* GetIndirectlyCallableCode(uint32_t func_index);
bool SetExecutable(bool executable);
......@@ -326,6 +320,7 @@ class V8_EXPORT_PRIVATE NativeModule final {
std::vector<std::unique_ptr<WasmCode>> owned_code_;
std::vector<WasmCode*> code_table_;
std::unique_ptr<std::vector<WasmCode*>> lazy_compile_stubs_;
uint32_t num_imported_functions_;
// Maps from instruction start of an immovable code object to instruction
......
......@@ -130,7 +130,8 @@ bool CodeSpecialization::ApplyToWholeModule(NativeModule* native_module,
switch (mode) {
case RelocInfo::JS_TO_WASM_CALL: {
changed = true;
const WasmCode* new_code = native_module->GetCode(exp.index);
const WasmCode* new_code =
native_module->GetIndirectlyCallableCode(exp.index);
it.rinfo()->set_js_to_wasm_address(new_code->instructions().start(),
icache_flush_mode);
} break;
......
......@@ -136,6 +136,8 @@ OPTIONAL_ACCESSORS(WasmInstanceObject, indirect_function_table_instances,
FixedArray, kIndirectFunctionTableInstancesOffset)
ACCESSORS(WasmInstanceObject, managed_native_allocations, Foreign,
kManagedNativeAllocationsOffset)
OPTIONAL_ACCESSORS(WasmInstanceObject, managed_indirect_patcher, Foreign,
kManagedIndirectPatcherOffset)
inline bool WasmInstanceObject::has_indirect_function_table() {
return indirect_function_table_sig_ids() != nullptr;
......
......@@ -267,6 +267,7 @@ class WasmInstanceObject : public JSObject {
DECL_ACCESSORS(imported_function_callables, FixedArray)
DECL_OPTIONAL_ACCESSORS(indirect_function_table_instances, FixedArray)
DECL_OPTIONAL_ACCESSORS(managed_native_allocations, Foreign)
DECL_OPTIONAL_ACCESSORS(managed_indirect_patcher, Foreign)
DECL_PRIMITIVE_ACCESSORS(memory_start, byte*)
DECL_PRIMITIVE_ACCESSORS(memory_size, uintptr_t)
DECL_PRIMITIVE_ACCESSORS(memory_mask, uintptr_t)
......@@ -289,6 +290,7 @@ class WasmInstanceObject : public JSObject {
V(kImportedFunctionCallablesOffset, kPointerSize) \
V(kIndirectFunctionTableInstancesOffset, kPointerSize) \
V(kManagedNativeAllocationsOffset, kPointerSize) \
V(kManagedIndirectPatcherOffset, kPointerSize) \
V(kFirstUntaggedOffset, 0) /* marker */ \
V(kMemoryStartOffset, kPointerSize) /* untagged */ \
V(kMemorySizeOffset, kPointerSize) /* untagged */ \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment