Commit e11c57fe authored by Michael Starzinger's avatar Michael Starzinger Committed by Commit Bot

[wasm] Remove {NativeModule::lazy_builtin} field.

This removes the field in question to make it simpler to serialize and
deserialize modules without having to worry about the state of lazy
compilation. It is always possible to clone a non-anonymous builtin,
even without having this module-wide field.

R=clemensh@chromium.org
TEST=mjsunit/regress/wasm/regress-803427
BUG=chromium:803427

Change-Id: I72041e314eb6ee92859d45f1db0ed8500003edc4
Reviewed-on: https://chromium-review.googlesource.com/878581
Commit-Queue: Michael Starzinger <mstarzinger@chromium.org>
Reviewed-by: 's avatarClemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#50771}
parent bf19e60c
...@@ -1698,7 +1698,8 @@ WasmCodeWrapper EnsureExportedLazyDeoptData(Isolate* isolate, ...@@ -1698,7 +1698,8 @@ WasmCodeWrapper EnsureExportedLazyDeoptData(Isolate* isolate,
return WasmCodeWrapper(code); return WasmCodeWrapper(code);
} }
// Clone the lazy builtin into the native module. // Clone the lazy builtin into the native module.
return WasmCodeWrapper(native_module->CloneLazyBuiltinInto(func_index)); return WasmCodeWrapper(
native_module->CloneLazyBuiltinInto(code, func_index));
} }
} }
......
...@@ -364,15 +364,11 @@ WasmCode* NativeModule::AddInterpreterWrapper(Handle<Code> code, ...@@ -364,15 +364,11 @@ WasmCode* NativeModule::AddInterpreterWrapper(Handle<Code> code,
return ret; return ret;
} }
WasmCode* NativeModule::SetLazyBuiltin(Handle<Code> code) { void NativeModule::SetLazyBuiltin(Handle<Code> code) {
DCHECK_NULL(lazy_builtin_); WasmCode* lazy_builtin = AddAnonymousCode(code, WasmCode::kLazyStub);
lazy_builtin_ = AddAnonymousCode(code, WasmCode::kLazyStub);
for (uint32_t i = num_imported_functions(), e = FunctionCount(); i < e; ++i) { for (uint32_t i = num_imported_functions(), e = FunctionCount(); i < e; ++i) {
SetCodeTable(i, lazy_builtin_); SetCodeTable(i, lazy_builtin);
} }
return lazy_builtin_;
} }
WasmCompiledModule* NativeModule::compiled_module() const { WasmCompiledModule* NativeModule::compiled_module() const {
...@@ -655,9 +651,10 @@ WasmCode* NativeModule::Lookup(Address pc) { ...@@ -655,9 +651,10 @@ WasmCode* NativeModule::Lookup(Address pc) {
return nullptr; return nullptr;
} }
WasmCode* NativeModule::CloneLazyBuiltinInto(uint32_t index) { WasmCode* NativeModule::CloneLazyBuiltinInto(const WasmCode* code,
DCHECK_NOT_NULL(lazy_builtin()); uint32_t index) {
WasmCode* ret = CloneCode(lazy_builtin()); DCHECK_EQ(wasm::WasmCode::kLazyStub, code->kind());
WasmCode* ret = CloneCode(code);
SetCodeTable(index, ret); SetCodeTable(index, ret);
ret->index_ = Just(index); ret->index_ = Just(index);
return ret; return ret;
...@@ -882,10 +879,6 @@ std::unique_ptr<NativeModule> NativeModule::Clone() { ...@@ -882,10 +879,6 @@ std::unique_ptr<NativeModule> NativeModule::Clone() {
TRACE_HEAP("%zu cloned from %zu\n", ret->instance_id, instance_id); TRACE_HEAP("%zu cloned from %zu\n", ret->instance_id, instance_id);
if (!ret) return ret; if (!ret) return ret;
if (lazy_builtin() != nullptr) {
ret->lazy_builtin_ = ret->CloneCode(lazy_builtin());
}
if (!ret->CloneTrampolinesAndStubs(this)) return nullptr; if (!ret->CloneTrampolinesAndStubs(this)) return nullptr;
std::unordered_map<Address, Address, AddressHasher> reverse_lookup; std::unordered_map<Address, Address, AddressHasher> reverse_lookup;
...@@ -910,20 +903,29 @@ std::unique_ptr<NativeModule> NativeModule::Clone() { ...@@ -910,20 +903,29 @@ std::unique_ptr<NativeModule> NativeModule::Clone() {
WasmCode* old_stub = stubs_.find(pair.first)->second; WasmCode* old_stub = stubs_.find(pair.first)->second;
PatchTrampolineAndStubCalls(old_stub, new_stub, reverse_lookup); PatchTrampolineAndStubCalls(old_stub, new_stub, reverse_lookup);
} }
if (lazy_builtin_ != nullptr) {
PatchTrampolineAndStubCalls(lazy_builtin_, ret->lazy_builtin_,
reverse_lookup);
}
WasmCode* anonymous_lazy_builtin = nullptr;
for (uint32_t i = num_imported_functions(), e = FunctionCount(); i < e; ++i) { for (uint32_t i = num_imported_functions(), e = FunctionCount(); i < e; ++i) {
const WasmCode* original_code = GetCode(i); const WasmCode* original_code = GetCode(i);
switch (original_code->kind()) { switch (original_code->kind()) {
case WasmCode::kLazyStub: { case WasmCode::kLazyStub: {
if (original_code->IsAnonymous()) { // Use the first anonymous lazy compile stub hit in this loop as the
ret->SetCodeTable(i, ret->lazy_builtin()); // canonical copy for all further ones by remembering it locally via
} else { // the {anonymous_lazy_builtin} variable. All non-anonymous such stubs
if (!ret->CloneLazyBuiltinInto(i)) return nullptr; // are just cloned directly via {CloneLazyBuiltinInto} below.
if (!original_code->IsAnonymous()) {
WasmCode* new_code = ret->CloneLazyBuiltinInto(original_code, i);
if (new_code == nullptr) return nullptr;
PatchTrampolineAndStubCalls(original_code, new_code, reverse_lookup);
break;
}
if (anonymous_lazy_builtin == nullptr) {
WasmCode* new_code = ret->CloneCode(original_code);
if (new_code == nullptr) return nullptr;
PatchTrampolineAndStubCalls(original_code, new_code, reverse_lookup);
anonymous_lazy_builtin = new_code;
} }
ret->SetCodeTable(i, anonymous_lazy_builtin);
} break; } break;
case WasmCode::kFunction: { case WasmCode::kFunction: {
WasmCode* new_code = ret->CloneCode(original_code); WasmCode* new_code = ret->CloneCode(original_code);
......
...@@ -204,11 +204,11 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -204,11 +204,11 @@ class V8_EXPORT_PRIVATE NativeModule final {
WasmCode* AddInterpreterWrapper(Handle<Code> code, uint32_t index); WasmCode* AddInterpreterWrapper(Handle<Code> code, uint32_t index);
// When starting lazy compilation, provide the WasmLazyCompile builtin by // When starting lazy compilation, provide the WasmLazyCompile builtin by
// calling SetLazyBuiltin. It will initialize the code table with it, and the // calling SetLazyBuiltin. It will initialize the code table with it. Copies
// lazy_builtin_ field. The latter is used when creating entries for exported // of it might be cloned from them later when creating entries for exported
// functions and indirect callable functions, so that they may be identified // functions and indirect callable functions, so that they may be identified
// by the runtime. // by the runtime.
WasmCode* SetLazyBuiltin(Handle<Code> code); void SetLazyBuiltin(Handle<Code> code);
// ExportedWrappers are WasmToWasmWrappers for functions placed on import // ExportedWrappers are WasmToWasmWrappers for functions placed on import
// tables. We construct them as-needed. // tables. We construct them as-needed.
...@@ -219,8 +219,6 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -219,8 +219,6 @@ class V8_EXPORT_PRIVATE NativeModule final {
uint32_t FunctionCount() const; uint32_t FunctionCount() const;
WasmCode* GetCode(uint32_t index) const; WasmCode* GetCode(uint32_t index) const;
WasmCode* lazy_builtin() const { return lazy_builtin_; }
// We special-case lazy cloning because we currently rely on making copies // We special-case lazy cloning because we currently rely on making copies
// of the lazy builtin, to be able to identify, in the runtime, which function // of the lazy builtin, to be able to identify, in the runtime, which function
// the lazy builtin is a placeholder of. If we used trampolines, we would call // the lazy builtin is a placeholder of. If we used trampolines, we would call
...@@ -229,7 +227,7 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -229,7 +227,7 @@ class V8_EXPORT_PRIVATE NativeModule final {
// builtin. The logic for seeking though frames would change, though. // builtin. The logic for seeking though frames would change, though.
// TODO(mtrofin): perhaps we can do exactly that - either before or after // TODO(mtrofin): perhaps we can do exactly that - either before or after
// this change. // this change.
WasmCode* CloneLazyBuiltinInto(uint32_t); WasmCode* CloneLazyBuiltinInto(const WasmCode* code, uint32_t);
bool SetExecutable(bool executable); bool SetExecutable(bool executable);
...@@ -319,7 +317,6 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -319,7 +317,6 @@ class V8_EXPORT_PRIVATE NativeModule final {
DisjointAllocationPool allocated_memory_; DisjointAllocationPool allocated_memory_;
std::list<VirtualMemory> owned_memory_; std::list<VirtualMemory> owned_memory_;
WasmCodeManager* wasm_code_manager_; WasmCodeManager* wasm_code_manager_;
wasm::WasmCode* lazy_builtin_ = nullptr;
base::Mutex allocation_mutex_; base::Mutex allocation_mutex_;
Handle<WasmCompiledModule> compiled_module_; Handle<WasmCompiledModule> compiled_module_;
size_t committed_memory_ = 0; size_t committed_memory_ = 0;
......
...@@ -187,7 +187,6 @@ NativeModuleSerializer::NativeModuleSerializer(Isolate* isolate, ...@@ -187,7 +187,6 @@ NativeModuleSerializer::NativeModuleSerializer(Isolate* isolate,
: isolate_(isolate), native_module_(module) { : isolate_(isolate), native_module_(module) {
DCHECK_NOT_NULL(isolate_); DCHECK_NOT_NULL(isolate_);
DCHECK_NOT_NULL(native_module_); DCHECK_NOT_NULL(native_module_);
DCHECK_NULL(native_module_->lazy_builtin_);
// TODO(mtrofin): persist the export wrappers. Ideally, we'd only persist // TODO(mtrofin): persist the export wrappers. Ideally, we'd only persist
// the unique ones, i.e. the cache. // the unique ones, i.e. the cache.
ExternalReferenceTable* table = ExternalReferenceTable::instance(isolate_); ExternalReferenceTable* table = ExternalReferenceTable::instance(isolate_);
......
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --wasm-lazy-compilation
load('test/mjsunit/wasm/wasm-constants.js');
load('test/mjsunit/wasm/wasm-module-builder.js');
var builder = new WasmModuleBuilder();
let module = new WebAssembly.Module(builder.toBuffer());
var worker = new Worker('onmessage = function() {};');
worker.postMessage(module)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment