Commit d3b30c99 authored by Milad Fa's avatar Milad Fa Committed by Commit Bot

PPC/s390: [wasm] Move ValueKind out of ValueType

Port a3776a63

Original Commit Message:

    Backends do not care about the concrete type, they only need to know the
    "kind" (e.g. "ref" or "i32").
    In order to prepare Liftoff to use the value kind instead of the
    value type for all stored data, this CL moves the kind out of the
    ValueType and makes it a top-level enum.

R=clemensb@chromium.org, joransiu@ca.ibm.com, junyan@redhat.com, midawson@redhat.com
BUG=
LOG=N

Change-Id: Ia4111941313037aa1a77f2a0a1536d492ae9dc0b
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2712392Reviewed-by: 's avatarClemens Backes <clemensb@chromium.org>
Commit-Queue: Milad Fa <mfarazma@redhat.com>
Cr-Commit-Position: refs/heads/master@{#72915}
parent 569cddb2
...@@ -74,7 +74,7 @@ constexpr int LiftoffAssembler::StaticStackFrameSize() { ...@@ -74,7 +74,7 @@ constexpr int LiftoffAssembler::StaticStackFrameSize() {
int LiftoffAssembler::SlotSizeForType(ValueType type) { int LiftoffAssembler::SlotSizeForType(ValueType type) {
switch (type.kind()) { switch (type.kind()) {
case ValueType::kS128: case kS128:
return type.element_size_bytes(); return type.element_size_bytes();
default: default:
return kStackSlotSize; return kStackSlotSize;
...@@ -82,7 +82,7 @@ int LiftoffAssembler::SlotSizeForType(ValueType type) { ...@@ -82,7 +82,7 @@ int LiftoffAssembler::SlotSizeForType(ValueType type) {
} }
bool LiftoffAssembler::NeedsAlignment(ValueType type) { bool LiftoffAssembler::NeedsAlignment(ValueType type) {
return (type.kind() == ValueType::kS128 || type.is_reference_type()); return (type.kind() == kS128 || type.is_reference_type());
} }
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value, void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
......
...@@ -136,7 +136,7 @@ constexpr int LiftoffAssembler::StaticStackFrameSize() { ...@@ -136,7 +136,7 @@ constexpr int LiftoffAssembler::StaticStackFrameSize() {
int LiftoffAssembler::SlotSizeForType(ValueType type) { int LiftoffAssembler::SlotSizeForType(ValueType type) {
switch (type.kind()) { switch (type.kind()) {
case ValueType::kS128: case kS128:
return type.element_size_bytes(); return type.element_size_bytes();
default: default:
return kStackSlotSize; return kStackSlotSize;
...@@ -144,25 +144,25 @@ int LiftoffAssembler::SlotSizeForType(ValueType type) { ...@@ -144,25 +144,25 @@ int LiftoffAssembler::SlotSizeForType(ValueType type) {
} }
bool LiftoffAssembler::NeedsAlignment(ValueType type) { bool LiftoffAssembler::NeedsAlignment(ValueType type) {
return (type.kind() == ValueType::kS128 || type.is_reference_type()); return (type.kind() == kS128 || type.is_reference_type());
} }
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value, void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
RelocInfo::Mode rmode) { RelocInfo::Mode rmode) {
switch (value.type().kind()) { switch (value.type().kind()) {
case ValueType::kI32: case kI32:
mov(reg.gp(), Operand(value.to_i32(), rmode)); mov(reg.gp(), Operand(value.to_i32(), rmode));
break; break;
case ValueType::kI64: case kI64:
mov(reg.gp(), Operand(value.to_i64(), rmode)); mov(reg.gp(), Operand(value.to_i64(), rmode));
break; break;
case ValueType::kF32: { case kF32: {
UseScratchRegisterScope temps(this); UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire(); Register scratch = temps.Acquire();
LoadF32(reg.fp(), value.to_f32_boxed().get_scalar(), scratch); LoadF32(reg.fp(), value.to_f32_boxed().get_scalar(), scratch);
break; break;
} }
case ValueType::kF64: { case kF64: {
UseScratchRegisterScope temps(this); UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire(); Register scratch = temps.Acquire();
LoadF64(reg.fp(), value.to_f64_boxed().get_bits(), scratch); LoadF64(reg.fp(), value.to_f64_boxed().get_bits(), scratch);
...@@ -468,7 +468,7 @@ void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst, ...@@ -468,7 +468,7 @@ void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
ValueType type) { ValueType type) {
int32_t offset = (caller_slot_idx + 1) * 8; int32_t offset = (caller_slot_idx + 1) * 8;
switch (type.kind()) { switch (type.kind()) {
case ValueType::kI32: { case kI32: {
#if defined(V8_TARGET_BIG_ENDIAN) #if defined(V8_TARGET_BIG_ENDIAN)
LoadS32(dst.gp(), MemOperand(fp, offset + 4)); LoadS32(dst.gp(), MemOperand(fp, offset + 4));
break; break;
...@@ -477,22 +477,22 @@ void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst, ...@@ -477,22 +477,22 @@ void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
break; break;
#endif #endif
} }
case ValueType::kRef: case kRef:
case ValueType::kRtt: case kRtt:
case ValueType::kOptRef: case kOptRef:
case ValueType::kI64: { case kI64: {
LoadU64(dst.gp(), MemOperand(fp, offset)); LoadU64(dst.gp(), MemOperand(fp, offset));
break; break;
} }
case ValueType::kF32: { case kF32: {
LoadF32(dst.fp(), MemOperand(fp, offset)); LoadF32(dst.fp(), MemOperand(fp, offset));
break; break;
} }
case ValueType::kF64: { case kF64: {
LoadF64(dst.fp(), MemOperand(fp, offset)); LoadF64(dst.fp(), MemOperand(fp, offset));
break; break;
} }
case ValueType::kS128: { case kS128: {
UseScratchRegisterScope temps(this); UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire(); Register scratch = temps.Acquire();
LoadV128(dst.fp(), MemOperand(fp, offset), scratch); LoadV128(dst.fp(), MemOperand(fp, offset), scratch);
...@@ -508,7 +508,7 @@ void LiftoffAssembler::StoreCallerFrameSlot(LiftoffRegister src, ...@@ -508,7 +508,7 @@ void LiftoffAssembler::StoreCallerFrameSlot(LiftoffRegister src,
ValueType type) { ValueType type) {
int32_t offset = (caller_slot_idx + 1) * 8; int32_t offset = (caller_slot_idx + 1) * 8;
switch (type.kind()) { switch (type.kind()) {
case ValueType::kI32: { case kI32: {
#if defined(V8_TARGET_BIG_ENDIAN) #if defined(V8_TARGET_BIG_ENDIAN)
StoreU32(src.gp(), MemOperand(fp, offset + 4)); StoreU32(src.gp(), MemOperand(fp, offset + 4));
break; break;
...@@ -517,22 +517,22 @@ void LiftoffAssembler::StoreCallerFrameSlot(LiftoffRegister src, ...@@ -517,22 +517,22 @@ void LiftoffAssembler::StoreCallerFrameSlot(LiftoffRegister src,
break; break;
#endif #endif
} }
case ValueType::kRef: case kRef:
case ValueType::kRtt: case kRtt:
case ValueType::kOptRef: case kOptRef:
case ValueType::kI64: { case kI64: {
StoreU64(src.gp(), MemOperand(fp, offset)); StoreU64(src.gp(), MemOperand(fp, offset));
break; break;
} }
case ValueType::kF32: { case kF32: {
StoreF32(src.fp(), MemOperand(fp, offset)); StoreF32(src.fp(), MemOperand(fp, offset));
break; break;
} }
case ValueType::kF64: { case kF64: {
StoreF64(src.fp(), MemOperand(fp, offset)); StoreF64(src.fp(), MemOperand(fp, offset));
break; break;
} }
case ValueType::kS128: { case kS128: {
UseScratchRegisterScope temps(this); UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire(); Register scratch = temps.Acquire();
StoreV128(src.fp(), MemOperand(fp, offset), scratch); StoreV128(src.fp(), MemOperand(fp, offset), scratch);
...@@ -546,7 +546,7 @@ void LiftoffAssembler::StoreCallerFrameSlot(LiftoffRegister src, ...@@ -546,7 +546,7 @@ void LiftoffAssembler::StoreCallerFrameSlot(LiftoffRegister src,
void LiftoffAssembler::LoadReturnStackSlot(LiftoffRegister dst, int offset, void LiftoffAssembler::LoadReturnStackSlot(LiftoffRegister dst, int offset,
ValueType type) { ValueType type) {
switch (type.kind()) { switch (type.kind()) {
case ValueType::kI32: { case kI32: {
#if defined(V8_TARGET_BIG_ENDIAN) #if defined(V8_TARGET_BIG_ENDIAN)
LoadS32(dst.gp(), MemOperand(sp, offset + 4)); LoadS32(dst.gp(), MemOperand(sp, offset + 4));
break; break;
...@@ -555,22 +555,22 @@ void LiftoffAssembler::LoadReturnStackSlot(LiftoffRegister dst, int offset, ...@@ -555,22 +555,22 @@ void LiftoffAssembler::LoadReturnStackSlot(LiftoffRegister dst, int offset,
break; break;
#endif #endif
} }
case ValueType::kRef: case kRef:
case ValueType::kRtt: case kRtt:
case ValueType::kOptRef: case kOptRef:
case ValueType::kI64: { case kI64: {
LoadU64(dst.gp(), MemOperand(sp, offset)); LoadU64(dst.gp(), MemOperand(sp, offset));
break; break;
} }
case ValueType::kF32: { case kF32: {
LoadF32(dst.fp(), MemOperand(sp, offset)); LoadF32(dst.fp(), MemOperand(sp, offset));
break; break;
} }
case ValueType::kF64: { case kF64: {
LoadF64(dst.fp(), MemOperand(sp, offset)); LoadF64(dst.fp(), MemOperand(sp, offset));
break; break;
} }
case ValueType::kS128: { case kS128: {
UseScratchRegisterScope temps(this); UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire(); Register scratch = temps.Acquire();
LoadV128(dst.fp(), MemOperand(sp, offset), scratch); LoadV128(dst.fp(), MemOperand(sp, offset), scratch);
...@@ -586,18 +586,18 @@ void LiftoffAssembler::MoveStackValue(uint32_t dst_offset, uint32_t src_offset, ...@@ -586,18 +586,18 @@ void LiftoffAssembler::MoveStackValue(uint32_t dst_offset, uint32_t src_offset,
DCHECK_NE(dst_offset, src_offset); DCHECK_NE(dst_offset, src_offset);
int length = 0; int length = 0;
switch (type.kind()) { switch (type.kind()) {
case ValueType::kI32: case kI32:
case ValueType::kF32: case kF32:
length = 4; length = 4;
break; break;
case ValueType::kI64: case kI64:
case ValueType::kOptRef: case kOptRef:
case ValueType::kRef: case kRef:
case ValueType::kRtt: case kRtt:
case ValueType::kF64: case kF64:
length = 8; length = 8;
break; break;
case ValueType::kS128: case kS128:
length = 16; length = 16;
break; break;
default: default:
...@@ -643,23 +643,23 @@ void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueType type) { ...@@ -643,23 +643,23 @@ void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueType type) {
RecordUsedSpillOffset(offset); RecordUsedSpillOffset(offset);
MemOperand dst = liftoff::GetStackSlot(offset); MemOperand dst = liftoff::GetStackSlot(offset);
switch (type.kind()) { switch (type.kind()) {
case ValueType::kI32: case kI32:
StoreU32(reg.gp(), dst); StoreU32(reg.gp(), dst);
break; break;
case ValueType::kI64: case kI64:
case ValueType::kOptRef: case kOptRef:
case ValueType::kRef: case kRef:
case ValueType::kRtt: case kRtt:
case ValueType::kRttWithDepth: case kRttWithDepth:
StoreU64(reg.gp(), dst); StoreU64(reg.gp(), dst);
break; break;
case ValueType::kF32: case kF32:
StoreF32(reg.fp(), dst); StoreF32(reg.fp(), dst);
break; break;
case ValueType::kF64: case kF64:
StoreF64(reg.fp(), dst); StoreF64(reg.fp(), dst);
break; break;
case ValueType::kS128: { case kS128: {
UseScratchRegisterScope temps(this); UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire(); Register scratch = temps.Acquire();
StoreV128(reg.fp(), dst, scratch); StoreV128(reg.fp(), dst, scratch);
...@@ -681,12 +681,12 @@ void LiftoffAssembler::Spill(int offset, WasmValue value) { ...@@ -681,12 +681,12 @@ void LiftoffAssembler::Spill(int offset, WasmValue value) {
src = temps.Acquire(); src = temps.Acquire();
} }
switch (value.type().kind()) { switch (value.type().kind()) {
case ValueType::kI32: { case kI32: {
mov(src, Operand(value.to_i32())); mov(src, Operand(value.to_i32()));
StoreU32(src, dst); StoreU32(src, dst);
break; break;
} }
case ValueType::kI64: { case kI64: {
mov(src, Operand(value.to_i64())); mov(src, Operand(value.to_i64()));
StoreU64(src, dst); StoreU64(src, dst);
break; break;
...@@ -700,22 +700,22 @@ void LiftoffAssembler::Spill(int offset, WasmValue value) { ...@@ -700,22 +700,22 @@ void LiftoffAssembler::Spill(int offset, WasmValue value) {
void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueType type) { void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueType type) {
MemOperand src = liftoff::GetStackSlot(offset); MemOperand src = liftoff::GetStackSlot(offset);
switch (type.kind()) { switch (type.kind()) {
case ValueType::kI32: case kI32:
LoadS32(reg.gp(), src); LoadS32(reg.gp(), src);
break; break;
case ValueType::kI64: case kI64:
case ValueType::kRef: case kRef:
case ValueType::kOptRef: case kOptRef:
case ValueType::kRtt: case kRtt:
LoadU64(reg.gp(), src); LoadU64(reg.gp(), src);
break; break;
case ValueType::kF32: case kF32:
LoadF32(reg.fp(), src); LoadF32(reg.fp(), src);
break; break;
case ValueType::kF64: case kF64:
LoadF64(reg.fp(), src); LoadF64(reg.fp(), src);
break; break;
case ValueType::kS128: { case kS128: {
UseScratchRegisterScope temps(this); UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire(); Register scratch = temps.Acquire();
LoadV128(reg.fp(), src, scratch); LoadV128(reg.fp(), src, scratch);
...@@ -1088,7 +1088,7 @@ void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond, ...@@ -1088,7 +1088,7 @@ void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
Condition cond = liftoff::ToCondition(liftoff_cond); Condition cond = liftoff::ToCondition(liftoff_cond);
bool use_signed = liftoff::UseSignedOp(liftoff_cond); bool use_signed = liftoff::UseSignedOp(liftoff_cond);
if (type.kind() == ValueType::kI32) { if (type.kind() == kI32) {
if (rhs == no_reg) { if (rhs == no_reg) {
if (use_signed) { if (use_signed) {
CmpS32(lhs, Operand::Zero()); CmpS32(lhs, Operand::Zero());
...@@ -1103,10 +1103,9 @@ void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond, ...@@ -1103,10 +1103,9 @@ void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
} }
} }
} else { } else {
CHECK(type.kind() == ValueType::kI64 || type.kind() == ValueType::kOptRef || CHECK(type.kind() == kI64 || type.kind() == kOptRef ||
type.kind() == ValueType::kRtt || type.kind() == kRtt || type.kind() == kRttWithDepth ||
type.kind() == ValueType::kRttWithDepth || type.kind() == kRef);
type.kind() == ValueType::kRef);
if (rhs == no_reg) { if (rhs == no_reg) {
if (use_signed) { if (use_signed) {
CmpS64(lhs, Operand::Zero()); CmpS64(lhs, Operand::Zero());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment