Commit 85b80cb7 authored by Igor Sheludko's avatar Igor Sheludko Committed by Commit Bot

[ptr-compr][wasm] Prepare wasm for 32-bit kTaggedSize

Bug: v8:7703
Change-Id: I06d4195597f0ac8ee771da49c9a402be48e91f15
Reviewed-on: https://chromium-review.googlesource.com/c/1480377Reviewed-by: 's avatarClemens Hammacher <clemensh@chromium.org>
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#59764}
parent eac097c5
...@@ -2691,8 +2691,16 @@ Node* WasmGraphBuilder::BuildImportCall(wasm::FunctionSig* sig, Node** args, ...@@ -2691,8 +2691,16 @@ Node* WasmGraphBuilder::BuildImportCall(wasm::FunctionSig* sig, Node** args,
// Load the target from the imported_targets array at the offset of // Load the target from the imported_targets array at the offset of
// {func_index}. // {func_index}.
STATIC_ASSERT(kTaggedSize == kSystemPointerSize); Node* func_index_times_pointersize;
Node* func_index_times_pointersize = func_index_times_tagged_size; if (kSystemPointerSize == kTaggedSize) {
func_index_times_pointersize = func_index_times_tagged_size;
} else {
DCHECK_EQ(kSystemPointerSize, kTaggedSize + kTaggedSize);
func_index_times_pointersize = graph()->NewNode(
mcgraph()->machine()->Int32Add(), func_index_times_tagged_size,
func_index_times_tagged_size);
}
Node* imported_targets = Node* imported_targets =
LOAD_INSTANCE_FIELD(ImportedFunctionTargets, MachineType::Pointer()); LOAD_INSTANCE_FIELD(ImportedFunctionTargets, MachineType::Pointer());
Node* target_node = SetEffect(graph()->NewNode( Node* target_node = SetEffect(graph()->NewNode(
...@@ -2774,15 +2782,14 @@ Node* WasmGraphBuilder::CallIndirect(uint32_t sig_index, Node** args, ...@@ -2774,15 +2782,14 @@ Node* WasmGraphBuilder::CallIndirect(uint32_t sig_index, Node** args,
Node* ift_instances = LOAD_INSTANCE_FIELD(IndirectFunctionTableRefs, Node* ift_instances = LOAD_INSTANCE_FIELD(IndirectFunctionTableRefs,
MachineType::TaggedPointer()); MachineType::TaggedPointer());
Node* intptr_scaled_key = graph()->NewNode( Node* tagged_scaled_key;
machine->Word32Shl(), key, Int32Constant(kSystemPointerSizeLog2)); if (kTaggedSize == kInt32Size) {
tagged_scaled_key = int32_scaled_key;
Node* target = SetEffect( } else {
graph()->NewNode(machine->Load(MachineType::Pointer()), ift_targets, DCHECK_EQ(kTaggedSize, kInt32Size * 2);
intptr_scaled_key, Effect(), Control())); tagged_scaled_key = graph()->NewNode(machine->Int32Add(), int32_scaled_key,
int32_scaled_key);
STATIC_ASSERT(kTaggedSize == kSystemPointerSize); }
Node* tagged_scaled_key = intptr_scaled_key;
Node* target_instance = SetEffect(graph()->NewNode( Node* target_instance = SetEffect(graph()->NewNode(
machine->Load(MachineType::TaggedPointer()), machine->Load(MachineType::TaggedPointer()),
...@@ -2790,8 +2797,20 @@ Node* WasmGraphBuilder::CallIndirect(uint32_t sig_index, Node** args, ...@@ -2790,8 +2797,20 @@ Node* WasmGraphBuilder::CallIndirect(uint32_t sig_index, Node** args,
Int32Constant(wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(0)), Int32Constant(wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(0)),
Effect(), Control())); Effect(), Control()));
args[0] = target; Node* intptr_scaled_key;
if (kSystemPointerSize == kTaggedSize) {
intptr_scaled_key = tagged_scaled_key;
} else {
DCHECK_EQ(kSystemPointerSize, kTaggedSize + kTaggedSize);
intptr_scaled_key = graph()->NewNode(machine->Int32Add(), tagged_scaled_key,
tagged_scaled_key);
}
Node* target = SetEffect(
graph()->NewNode(machine->Load(MachineType::Pointer()), ift_targets,
intptr_scaled_key, Effect(), Control()));
args[0] = target;
return BuildWasmCall(sig, args, rets, position, target_instance, return BuildWasmCall(sig, args, rets, position, target_instance,
untrusted_code_mitigations_ ? kRetpoline : kNoRetpoline); untrusted_code_mitigations_ ? kRetpoline : kNoRetpoline);
} }
......
...@@ -1799,11 +1799,12 @@ class LiftoffCompiler { ...@@ -1799,11 +1799,12 @@ class LiftoffCompiler {
DEBUG_CODE_COMMENT("Check indirect call signature"); DEBUG_CODE_COMMENT("Check indirect call signature");
// Load the signature from {instance->ift_sig_ids[key]} // Load the signature from {instance->ift_sig_ids[key]}
LOAD_INSTANCE_FIELD(table, IndirectFunctionTableSigIds, kSystemPointerSize); LOAD_INSTANCE_FIELD(table, IndirectFunctionTableSigIds, kSystemPointerSize);
__ LoadConstant(LiftoffRegister(tmp_const), // Multiply {index} by 4 to represent kInt32Size items.
WasmValue(static_cast<uint32_t>(sizeof(uint32_t)))); STATIC_ASSERT(kInt32Size == 4);
// TODO(wasm): use a emit_i32_shli() instead of a multiply. // TODO(wasm): use a emit_i32_shli() instead of two adds.
// (currently cannot use shl on ia32/x64 because it clobbers %rcx). // (currently cannot use shl on ia32/x64 because it clobbers %rcx).
__ emit_i32_mul(index, index, tmp_const); __ emit_i32_add(index, index, index);
__ emit_i32_add(index, index, index);
__ Load(LiftoffRegister(scratch), table, index, 0, LoadType::kI32Load, __ Load(LiftoffRegister(scratch), table, index, 0, LoadType::kI32Load,
pinned); pinned);
...@@ -1815,20 +1816,28 @@ class LiftoffCompiler { ...@@ -1815,20 +1816,28 @@ class LiftoffCompiler {
__ emit_cond_jump(kUnequal, sig_mismatch_label, __ emit_cond_jump(kUnequal, sig_mismatch_label,
LiftoffAssembler::kWasmIntPtr, scratch, tmp_const); LiftoffAssembler::kWasmIntPtr, scratch, tmp_const);
// At this point {index} has already been multiplied by 4.
DEBUG_CODE_COMMENT("Execute indirect call"); DEBUG_CODE_COMMENT("Execute indirect call");
if (kSystemPointerSize == 8) { if (kTaggedSize != kInt32Size) {
// {index} has already been multiplied by 4. Multiply by another 2. DCHECK_EQ(kTaggedSize, kInt32Size * 2);
__ LoadConstant(LiftoffRegister(tmp_const), WasmValue(2)); // Multiply {index} by another 2 to represent kTaggedSize items.
__ emit_i32_mul(index, index, tmp_const); __ emit_i32_add(index, index, index);
} }
// At this point {index} has already been multiplied by kTaggedSize.
// Load the instance from {instance->ift_instances[key]} // Load the instance from {instance->ift_instances[key]}
LOAD_TAGGED_PTR_INSTANCE_FIELD(table, IndirectFunctionTableRefs); LOAD_TAGGED_PTR_INSTANCE_FIELD(table, IndirectFunctionTableRefs);
// {index} has already been multiplied by kSystemPointerSizeLog2.
STATIC_ASSERT(kTaggedSize == kSystemPointerSize);
__ LoadTaggedPointer(tmp_const, table, index, __ LoadTaggedPointer(tmp_const, table, index,
ObjectAccess::ElementOffsetInTaggedFixedArray(0), ObjectAccess::ElementOffsetInTaggedFixedArray(0),
pinned); pinned);
if (kTaggedSize != kSystemPointerSize) {
DCHECK_EQ(kSystemPointerSize, kTaggedSize * 2);
// Multiply {index} by another 2 to represent kSystemPointerSize items.
__ emit_i32_add(index, index, index);
}
// At this point {index} has already been multiplied by kSystemPointerSize.
Register* explicit_instance = &tmp_const; Register* explicit_instance = &tmp_const;
// Load the target from {instance->ift_targets[key]} // Load the target from {instance->ift_targets[key]}
......
...@@ -432,6 +432,8 @@ class WasmInstanceObject : public JSObject { ...@@ -432,6 +432,8 @@ class WasmInstanceObject : public JSObject {
DECL_PRIMITIVE_ACCESSORS(dropped_data_segments, byte*) DECL_PRIMITIVE_ACCESSORS(dropped_data_segments, byte*)
DECL_PRIMITIVE_ACCESSORS(dropped_elem_segments, byte*) DECL_PRIMITIVE_ACCESSORS(dropped_elem_segments, byte*)
// Clear uninitialized padding space. This ensures that the snapshot content
// is deterministic. Depending on the V8 build mode there could be no padding.
V8_INLINE void clear_padding(); V8_INLINE void clear_padding();
// Dispatched behavior. // Dispatched behavior.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment