Commit cfc5c2ce authored by Mostyn Bramley-Moore's avatar Mostyn Bramley-Moore Committed by Commit Bot

[jumbo] move FlushICache enum into WasmCode class

https://chromium-review.googlesource.com/971881 triggered jumbo
build failures due to a collision between the FlushICache functions
in module-compiler.cc and the FlushICache enum.  If we move the
enum inside the WasmCode class we can disambiguate references to it.

Change-Id: Icd389ba8abf6afefc4a8aa53887779f4d1357dd2
Reviewed-on: https://chromium-review.googlesource.com/974261
Commit-Queue: Mostyn Bramley-Moore <mostynb@vewd.com>
Reviewed-by: 's avatarClemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52144}
parent 48f89fbd
...@@ -914,7 +914,8 @@ wasm::WasmCode* EnsureExportedLazyDeoptData(Isolate* isolate, ...@@ -914,7 +914,8 @@ wasm::WasmCode* EnsureExportedLazyDeoptData(Isolate* isolate,
return code; return code;
} }
// Clone the lazy builtin into the native module. // Clone the lazy builtin into the native module.
return native_module->CloneLazyBuiltinInto(code, func_index, kFlushICache); return native_module->CloneLazyBuiltinInto(code, func_index,
WasmCode::kFlushICache);
} }
// Ensure that the code object in <code_table> at offset <func_index> has // Ensure that the code object in <code_table> at offset <func_index> has
......
...@@ -66,7 +66,7 @@ const bool kModuleCanAllocateMoreMemory = true; ...@@ -66,7 +66,7 @@ const bool kModuleCanAllocateMoreMemory = true;
void PatchTrampolineAndStubCalls( void PatchTrampolineAndStubCalls(
const WasmCode* original_code, const WasmCode* new_code, const WasmCode* original_code, const WasmCode* new_code,
const std::unordered_map<Address, Address, AddressHasher>& reverse_lookup, const std::unordered_map<Address, Address, AddressHasher>& reverse_lookup,
FlushICache flush_icache) { WasmCode::FlushICache flush_icache) {
// Relocate everything in kApplyMask using this delta, and patch all code // Relocate everything in kApplyMask using this delta, and patch all code
// targets to call the new trampolines and stubs. // targets to call the new trampolines and stubs.
intptr_t delta = intptr_t delta =
...@@ -96,7 +96,7 @@ void PatchTrampolineAndStubCalls( ...@@ -96,7 +96,7 @@ void PatchTrampolineAndStubCalls(
} }
void RelocateCode(WasmCode* code, const WasmCode* orig, void RelocateCode(WasmCode* code, const WasmCode* orig,
FlushICache flush_icache) { WasmCode::FlushICache flush_icache) {
intptr_t delta = code->instructions().start() - orig->instructions().start(); intptr_t delta = code->instructions().start() - orig->instructions().start();
for (RelocIterator it(code->instructions(), code->reloc_info(), for (RelocIterator it(code->instructions(), code->reloc_info(),
code->constant_pool(), RelocInfo::kApplyMask); code->constant_pool(), RelocInfo::kApplyMask);
...@@ -362,7 +362,7 @@ WasmCode* NativeModule::AddOwnedCode( ...@@ -362,7 +362,7 @@ WasmCode* NativeModule::AddOwnedCode(
uint32_t stack_slots, size_t safepoint_table_offset, uint32_t stack_slots, size_t safepoint_table_offset,
size_t handler_table_offset, size_t handler_table_offset,
std::shared_ptr<ProtectedInstructions> protected_instructions, std::shared_ptr<ProtectedInstructions> protected_instructions,
WasmCode::Tier tier, FlushICache flush_icache) { WasmCode::Tier tier, WasmCode::FlushICache flush_icache) {
// both allocation and insertion in owned_code_ happen in the same critical // both allocation and insertion in owned_code_ happen in the same critical
// section, thus ensuring owned_code_'s elements are rarely if ever moved. // section, thus ensuring owned_code_'s elements are rarely if ever moved.
base::LockGuard<base::Mutex> lock(&allocation_mutex_); base::LockGuard<base::Mutex> lock(&allocation_mutex_);
...@@ -454,7 +454,7 @@ WasmCode* NativeModule::AddAnonymousCode(Handle<Code> code, ...@@ -454,7 +454,7 @@ WasmCode* NativeModule::AddAnonymousCode(Handle<Code> code,
code->handler_table_offset(), // handler_table_offset code->handler_table_offset(), // handler_table_offset
protected_instructions, // protected_instructions protected_instructions, // protected_instructions
WasmCode::kOther, // kind WasmCode::kOther, // kind
kNoFlushICache); // flush_icache WasmCode::kNoFlushICache); // flush_icache
intptr_t delta = ret->instructions().start() - code->instruction_start(); intptr_t delta = ret->instructions().start() - code->instruction_start();
int mask = RelocInfo::kApplyMask | RelocInfo::kCodeTargetMask | int mask = RelocInfo::kApplyMask | RelocInfo::kCodeTargetMask |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
...@@ -500,7 +500,7 @@ WasmCode* NativeModule::AddCode( ...@@ -500,7 +500,7 @@ WasmCode* NativeModule::AddCode(
std::move(reloc_info), static_cast<size_t>(desc.reloc_size), Just(index), std::move(reloc_info), static_cast<size_t>(desc.reloc_size), Just(index),
WasmCode::kFunction, desc.instr_size - desc.constant_pool_size, WasmCode::kFunction, desc.instr_size - desc.constant_pool_size,
frame_slots, safepoint_table_offset, handler_table_offset, frame_slots, safepoint_table_offset, handler_table_offset,
std::move(protected_instructions), tier, kNoFlushICache); std::move(protected_instructions), tier, WasmCode::kNoFlushICache);
code_table_[index] = ret; code_table_[index] = ret;
// TODO(mtrofin): this is a copy and paste from Code::CopyFrom. // TODO(mtrofin): this is a copy and paste from Code::CopyFrom.
...@@ -561,8 +561,8 @@ Address NativeModule::CreateTrampolineTo(Handle<Code> code) { ...@@ -561,8 +561,8 @@ Address NativeModule::CreateTrampolineTo(Handle<Code> code) {
0, // safepoint_table_offset 0, // safepoint_table_offset
0, // handler_table_offset 0, // handler_table_offset
{}, // protected_instructions {}, // protected_instructions
WasmCode::kOther, // tier WasmCode::kOther, // tier
kFlushICache); // flush_icache WasmCode::kFlushICache); // flush_icache
Address ret = wasm_code->instructions().start(); Address ret = wasm_code->instructions().start();
trampolines_.emplace(std::make_pair(dest, ret)); trampolines_.emplace(std::make_pair(dest, ret));
return ret; return ret;
...@@ -685,20 +685,19 @@ WasmCode* NativeModule::Lookup(Address pc) { ...@@ -685,20 +685,19 @@ WasmCode* NativeModule::Lookup(Address pc) {
return nullptr; return nullptr;
} }
WasmCode* NativeModule::CloneLazyBuiltinInto(const WasmCode* code, WasmCode* NativeModule::CloneLazyBuiltinInto(
uint32_t index, const WasmCode* code, uint32_t index, WasmCode::FlushICache flush_icache) {
FlushICache flush_icache) {
DCHECK_EQ(wasm::WasmCode::kLazyStub, code->kind()); DCHECK_EQ(wasm::WasmCode::kLazyStub, code->kind());
DCHECK(code->IsAnonymous()); DCHECK(code->IsAnonymous());
WasmCode* ret = CloneCode(code, kNoFlushICache); WasmCode* ret = CloneCode(code, WasmCode::kNoFlushICache);
RelocateCode(ret, code, flush_icache); RelocateCode(ret, code, flush_icache);
code_table_[index] = ret; code_table_[index] = ret;
ret->index_ = Just(index); ret->index_ = Just(index);
return ret; return ret;
} }
void NativeModule::CloneTrampolinesAndStubs(const NativeModule* other, void NativeModule::CloneTrampolinesAndStubs(
FlushICache flush_icache) { const NativeModule* other, WasmCode::FlushICache flush_icache) {
for (auto& pair : other->trampolines_) { for (auto& pair : other->trampolines_) {
Address key = pair.first; Address key = pair.first;
Address local = Address local =
...@@ -714,7 +713,7 @@ void NativeModule::CloneTrampolinesAndStubs(const NativeModule* other, ...@@ -714,7 +713,7 @@ void NativeModule::CloneTrampolinesAndStubs(const NativeModule* other,
} }
WasmCode* NativeModule::CloneCode(const WasmCode* original_code, WasmCode* NativeModule::CloneCode(const WasmCode* original_code,
FlushICache flush_icache) { WasmCode::FlushICache flush_icache) {
std::unique_ptr<byte[]> reloc_info; std::unique_ptr<byte[]> reloc_info;
if (original_code->reloc_info().size() > 0) { if (original_code->reloc_info().size() > 0) {
reloc_info.reset(new byte[original_code->reloc_info().size()]); reloc_info.reset(new byte[original_code->reloc_info().size()]);
...@@ -915,7 +914,7 @@ std::unique_ptr<NativeModule> NativeModule::Clone() { ...@@ -915,7 +914,7 @@ std::unique_ptr<NativeModule> NativeModule::Clone() {
// Clone trampolines and stubs. They are later patched, so no icache flush // Clone trampolines and stubs. They are later patched, so no icache flush
// needed yet. // needed yet.
ret->CloneTrampolinesAndStubs(this, kNoFlushICache); ret->CloneTrampolinesAndStubs(this, WasmCode::kNoFlushICache);
std::unordered_map<Address, Address, AddressHasher> reverse_lookup; std::unordered_map<Address, Address, AddressHasher> reverse_lookup;
for (auto& pair : trampolines_) { for (auto& pair : trampolines_) {
...@@ -938,7 +937,7 @@ std::unique_ptr<NativeModule> NativeModule::Clone() { ...@@ -938,7 +937,7 @@ std::unique_ptr<NativeModule> NativeModule::Clone() {
WasmCode* new_stub = pair.second; WasmCode* new_stub = pair.second;
WasmCode* old_stub = stubs_.find(pair.first)->second; WasmCode* old_stub = stubs_.find(pair.first)->second;
PatchTrampolineAndStubCalls(old_stub, new_stub, reverse_lookup, PatchTrampolineAndStubCalls(old_stub, new_stub, reverse_lookup,
kFlushICache); WasmCode::kFlushICache);
} }
WasmCode* anonymous_lazy_builtin = nullptr; WasmCode* anonymous_lazy_builtin = nullptr;
...@@ -951,23 +950,26 @@ std::unique_ptr<NativeModule> NativeModule::Clone() { ...@@ -951,23 +950,26 @@ std::unique_ptr<NativeModule> NativeModule::Clone() {
// the {anonymous_lazy_builtin} variable. All non-anonymous such stubs // the {anonymous_lazy_builtin} variable. All non-anonymous such stubs
// are just cloned directly via {CloneLazyBuiltinInto} below. // are just cloned directly via {CloneLazyBuiltinInto} below.
if (!original_code->IsAnonymous()) { if (!original_code->IsAnonymous()) {
WasmCode* new_code = ret->CloneCode(original_code, kNoFlushICache); WasmCode* new_code =
ret->CloneCode(original_code, WasmCode::kNoFlushICache);
PatchTrampolineAndStubCalls(original_code, new_code, reverse_lookup, PatchTrampolineAndStubCalls(original_code, new_code, reverse_lookup,
kFlushICache); WasmCode::kFlushICache);
break; break;
} }
if (anonymous_lazy_builtin == nullptr) { if (anonymous_lazy_builtin == nullptr) {
WasmCode* new_code = ret->CloneCode(original_code, kNoFlushICache); WasmCode* new_code =
ret->CloneCode(original_code, WasmCode::kNoFlushICache);
PatchTrampolineAndStubCalls(original_code, new_code, reverse_lookup, PatchTrampolineAndStubCalls(original_code, new_code, reverse_lookup,
kFlushICache); WasmCode::kFlushICache);
anonymous_lazy_builtin = new_code; anonymous_lazy_builtin = new_code;
} }
ret->code_table_[i] = anonymous_lazy_builtin; ret->code_table_[i] = anonymous_lazy_builtin;
} break; } break;
case WasmCode::kFunction: { case WasmCode::kFunction: {
WasmCode* new_code = ret->CloneCode(original_code, kNoFlushICache); WasmCode* new_code =
ret->CloneCode(original_code, WasmCode::kNoFlushICache);
PatchTrampolineAndStubCalls(original_code, new_code, reverse_lookup, PatchTrampolineAndStubCalls(original_code, new_code, reverse_lookup,
kFlushICache); WasmCode::kFlushICache);
} break; } break;
default: default:
UNREACHABLE(); UNREACHABLE();
......
...@@ -85,8 +85,6 @@ class V8_EXPORT_PRIVATE DisjointAllocationPool final { ...@@ -85,8 +85,6 @@ class V8_EXPORT_PRIVATE DisjointAllocationPool final {
using ProtectedInstructions = using ProtectedInstructions =
std::vector<trap_handler::ProtectedInstructionData>; std::vector<trap_handler::ProtectedInstructionData>;
enum FlushICache : bool { kFlushICache = true, kNoFlushICache = false };
class V8_EXPORT_PRIVATE WasmCode final { class V8_EXPORT_PRIVATE WasmCode final {
public: public:
enum Kind { enum Kind {
...@@ -140,6 +138,8 @@ class V8_EXPORT_PRIVATE WasmCode final { ...@@ -140,6 +138,8 @@ class V8_EXPORT_PRIVATE WasmCode final {
~WasmCode(); ~WasmCode();
enum FlushICache : bool { kFlushICache = true, kNoFlushICache = false };
private: private:
friend class NativeModule; friend class NativeModule;
...@@ -236,7 +236,7 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -236,7 +236,7 @@ class V8_EXPORT_PRIVATE NativeModule final {
// TODO(mtrofin): perhaps we can do exactly that - either before or after // TODO(mtrofin): perhaps we can do exactly that - either before or after
// this change. // this change.
WasmCode* CloneLazyBuiltinInto(const WasmCode* code, uint32_t index, WasmCode* CloneLazyBuiltinInto(const WasmCode* code, uint32_t index,
FlushICache); WasmCode::FlushICache);
bool SetExecutable(bool executable); bool SetExecutable(bool executable);
...@@ -282,9 +282,10 @@ class V8_EXPORT_PRIVATE NativeModule final { ...@@ -282,9 +282,10 @@ class V8_EXPORT_PRIVATE NativeModule final {
uint32_t stack_slots, size_t safepoint_table_offset, uint32_t stack_slots, size_t safepoint_table_offset,
size_t handler_table_offset, size_t handler_table_offset,
std::shared_ptr<ProtectedInstructions>, WasmCode::Tier, std::shared_ptr<ProtectedInstructions>, WasmCode::Tier,
FlushICache); WasmCode::FlushICache);
WasmCode* CloneCode(const WasmCode*, FlushICache); WasmCode* CloneCode(const WasmCode*, WasmCode::FlushICache);
void CloneTrampolinesAndStubs(const NativeModule* other, FlushICache); void CloneTrampolinesAndStubs(const NativeModule* other,
WasmCode::FlushICache);
WasmCode* Lookup(Address); WasmCode* Lookup(Address);
Address GetLocalAddressFor(Handle<Code>); Address GetLocalAddressFor(Handle<Code>);
Address CreateTrampolineTo(Handle<Code>); Address CreateTrampolineTo(Handle<Code>);
......
...@@ -576,7 +576,7 @@ bool NativeModuleDeserializer::ReadCode() { ...@@ -576,7 +576,7 @@ bool NativeModuleDeserializer::ReadCode() {
code_buffer, std::move(reloc_info), reloc_size, Just(index_), code_buffer, std::move(reloc_info), reloc_size, Just(index_),
WasmCode::kFunction, constant_pool_offset, stack_slot_count, WasmCode::kFunction, constant_pool_offset, stack_slot_count,
safepoint_table_offset, handler_table_offset, protected_instructions, safepoint_table_offset, handler_table_offset, protected_instructions,
tier, kNoFlushICache); tier, WasmCode::kNoFlushICache);
native_module_->code_table_[index_] = ret; native_module_->code_table_[index_] = ret;
// now relocate the code // now relocate the code
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment