Commit 1ba5766f authored by Manos Koukoutos's avatar Manos Koukoutos Committed by Commit Bot

[wasm][gc] Implement struct.set and switch struct.get to optref

Work towards adding heap-allocated object support for wasm, according to
the gc proposal.

Changes:
- Implement subtyping for reference types (ref s) and (optref s),
  where 's' is a struct type.
  This CL does *not* implement subtyping between struct and function
  types. Also, it does not handle i31refs and eqrefs.
- Implement struct.set.
- Change struct.get to accept an optref as argument, as required by the
  standard.
- Allow locals to store objects of ref and optref types.
- Add a test for struct.set and optref locals. Modify the test for
  struct.get accordingly.

Reference: https://github.com/WebAssembly/gc

R=jkummerow@chromium.org
R=clemensb@chromium.org

Bug: v8:7748
Change-Id: I708626fa5f90a6e24e667d66eed1c7697f458a23
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2172089Reviewed-by: 's avatarJakob Kummerow <jkummerow@chromium.org>
Reviewed-by: 's avatarClemens Backes <clemensb@chromium.org>
Commit-Queue: Manos Koukoutos <manoskouk@chromium.org>
Cr-Commit-Position: refs/heads/master@{#67562}
parent 3ee4ead5
......@@ -873,6 +873,7 @@ namespace internal {
TFS(ThrowWasmTrapTableOutOfBounds) \
TFS(ThrowWasmTrapBrOnExnNullRef) \
TFS(ThrowWasmTrapRethrowNullRef) \
TFS(ThrowWasmTrapNullDereference) \
\
/* WeakMap */ \
TFJ(WeakMapConstructor, kDontAdaptArgumentsSentinel) \
......
......@@ -1603,7 +1603,8 @@ enum class LoadSensitivity {
V(TrapElemSegmentDropped) \
V(TrapTableOutOfBounds) \
V(TrapBrOnExnNullRef) \
V(TrapRethrowNullRef)
V(TrapRethrowNullRef) \
V(TrapNullDereference)
enum KeyedAccessLoadMode {
STANDARD_LOAD,
......
......@@ -553,6 +553,7 @@ namespace internal {
T(WasmTrapTableOutOfBounds, "table access out of bounds") \
T(WasmTrapBrOnExnNullRef, "br_on_exn on nullref value") \
T(WasmTrapRethrowNullRef, "rethrowing nullref value") \
T(WasmTrapNullDereference, "dereferencing a null pointer") \
T(WasmExceptionError, "wasm exception") \
/* Asm.js validation related */ \
T(AsmJsInvalid, "Invalid asm.js: %") \
......
......@@ -50,6 +50,7 @@
#include "src/wasm/memory-tracing.h"
#include "src/wasm/object-access.h"
#include "src/wasm/wasm-code-manager.h"
#include "src/wasm/wasm-constants.h"
#include "src/wasm/wasm-limits.h"
#include "src/wasm/wasm-linkage.h"
#include "src/wasm/wasm-module.h"
......@@ -5030,6 +5031,21 @@ Node* FieldOffset(MachineGraph* graph, const wasm::StructType* type,
return graph->IntPtrConstant(offset);
}
// Set a field of a struct, without checking if the struct is null.
// Helper method for StructNew and StructSet.
Node* StoreStructFieldUnchecked(MachineGraph* graph, WasmGraphAssembler* gasm,
Node* struct_object,
const wasm::StructType* type,
uint32_t field_index, Node* value) {
WriteBarrierKind write_barrier = type->field(field_index).IsReferenceType()
? kPointerWriteBarrier
: kNoWriteBarrier;
StoreRepresentation rep(type->field(field_index).machine_representation(),
write_barrier);
Node* offset = FieldOffset(graph, type, field_index);
return gasm->Store(rep, struct_object, offset, value);
}
Node* WasmGraphBuilder::StructNew(uint32_t struct_index,
const wasm::StructType* type,
Vector<Node*> fields) {
......@@ -5040,25 +5056,32 @@ Node* WasmGraphBuilder::StructNew(uint32_t struct_index,
Node* s = BuildCallToRuntime(Runtime::kWasmStructNew, runtime_args,
arraysize(runtime_args));
for (uint32_t i = 0; i < type->field_count(); i++) {
wasm::ValueType field_type = type->field(i);
WriteBarrierKind write_barrier = type->field(i).IsReferenceType()
? kPointerWriteBarrier
: kNoWriteBarrier;
StoreRepresentation rep(field_type.machine_representation(), write_barrier);
Node* offset = FieldOffset(mcgraph(), type, i);
gasm_->Store(rep, s, offset, fields[i]);
StoreStructFieldUnchecked(mcgraph(), gasm_.get(), s, type, i, fields[i]);
}
return s;
}
Node* WasmGraphBuilder::StructGet(Node* struct_object,
const wasm::StructType* type,
uint32_t field_index) {
uint32_t field_index,
wasm::WasmCodePosition position) {
MachineType machine_type = FieldType(type, field_index);
Node* offset = FieldOffset(mcgraph(), type, field_index);
TrapIfTrue(wasm::kTrapNullDereference,
gasm_->WordEqual(struct_object, RefNull()), position);
return gasm_->Load(machine_type, struct_object, offset);
}
Node* WasmGraphBuilder::StructSet(Node* struct_object,
const wasm::StructType* type,
uint32_t field_index, Node* field_value,
wasm::WasmCodePosition position) {
TrapIfTrue(wasm::kTrapNullDereference,
gasm_->WordEqual(struct_object, RefNull()), position);
return StoreStructFieldUnchecked(mcgraph(), gasm_.get(), struct_object, type,
field_index, field_value);
}
class WasmDecorator final : public GraphDecorator {
public:
explicit WasmDecorator(NodeOriginTable* origins, wasm::Decoder* decoder)
......
......@@ -370,7 +370,10 @@ class WasmGraphBuilder {
Node* StructNew(uint32_t struct_index, const wasm::StructType* type,
Vector<Node*> fields);
Node* StructGet(Node* struct_object, const wasm::StructType* type,
uint32_t field_index);
uint32_t field_index, wasm::WasmCodePosition position);
Node* StructSet(Node* struct_object, const wasm::StructType* type,
uint32_t field_index, Node* value,
wasm::WasmCodePosition position);
bool has_simd() const { return has_simd_; }
......
......@@ -3398,6 +3398,12 @@ class LiftoffCompiler {
// TODO(7748): Implement.
unsupported(decoder, kGC, "struct.get");
}
void StructSet(FullDecoder* decoder, const Value& struct_obj,
const FieldIndexImmediate<validate>& field,
const Value& field_value) {
// TODO(7748): Implement.
unsupported(decoder, kGC, "struct.set");
}
private:
// Emit additional source positions for return addresses. Used by debugging to
......
......@@ -210,6 +210,7 @@ struct GlobalIndexImmediate {
namespace function_body_decoder {
// Decode a byte representing a local type. Return {false} if the encoded
// byte was invalid or the start of a type index.
// TODO(7748): Refactor this to handle (opt)ref types
inline bool decode_local_type(uint8_t val, ValueType* result) {
switch (static_cast<ValueTypeCode>(val)) {
case kLocalVoid:
......@@ -810,7 +811,9 @@ enum class LoadTransformationKind : uint8_t {
F(StructNew, const StructIndexImmediate<validate>& imm, const Value args[], \
Value* result) \
F(StructGet, const Value& struct_object, \
const FieldIndexImmediate<validate>& field, Value* result)
const FieldIndexImmediate<validate>& field, Value* result) \
F(StructSet, const Value& struct_object, \
const FieldIndexImmediate<validate>& field, const Value& field_value)
// Generic Wasm bytecode decoder with utilities for decoding immediates,
// lengths, etc.
......@@ -915,6 +918,26 @@ class WasmDecoder : public Decoder {
"invalid local type 'exception ref', enable with "
"--experimental-wasm-eh");
return false;
case kLocalRef:
if (enabled.has_gc()) {
uint32_t type_index = decoder->consume_u32v("type index");
type = ValueType(ValueType::kRef, type_index);
break;
}
decoder->error(decoder->pc() - 1,
"invalid local type 'ref', enable with "
"--experimental-wasm-gc");
return false;
case kLocalOptRef:
if (enabled.has_gc()) {
uint32_t type_index = decoder->consume_u32v("type index");
type = ValueType(ValueType::kOptRef, type_index);
break;
}
decoder->error(decoder->pc() - 1,
"invalid local type 'optref', enable with "
"--experimental-wasm-gc");
return false;
case kLocalS128:
if (enabled.has_simd()) {
type = kWasmS128;
......@@ -2903,16 +2926,25 @@ class WasmFullDecoder : public WasmDecoder<validate> {
FieldIndexImmediate<validate> field(this, this->pc_ + len);
if (!this->Validate(this->pc_ + len, field)) break;
len += field.length;
// TODO(7748): This should take an optref, and perform a null-check.
auto struct_obj =
Pop(0, ValueType(ValueType::kRef, field.struct_index.index));
Pop(0, ValueType(ValueType::kOptRef, field.struct_index.index));
auto* value = Push(field.struct_index.struct_type->field(field.index));
// TODO(7748): Optimize this when struct type is null/ref
CALL_INTERFACE_IF_REACHABLE(StructGet, struct_obj, field, value);
break;
}
case kExprStructSet:
UNIMPLEMENTED(); // TODO(7748): Implement.
case kExprStructSet: {
FieldIndexImmediate<validate> field(this, this->pc_ + len);
if (!this->Validate(this->pc_ + len, field)) break;
len += field.length;
auto field_value = Pop(
0, ValueType(field.struct_index.struct_type->field(field.index)));
auto struct_obj =
Pop(0, ValueType(ValueType::kOptRef, field.struct_index.index));
// TODO(7748): Optimize this when struct type is null/ref
CALL_INTERFACE_IF_REACHABLE(StructSet, struct_obj, field, field_value);
break;
}
case kExprArrayNew:
UNIMPLEMENTED(); // TODO(7748): Implement.
break;
......
......@@ -614,8 +614,16 @@ class WasmGraphBuildingInterface {
void StructGet(FullDecoder* decoder, const Value& struct_object,
const FieldIndexImmediate<validate>& field, Value* result) {
result->node = BUILD(StructGet, struct_object.node,
field.struct_index.struct_type, field.index);
result->node =
BUILD(StructGet, struct_object.node, field.struct_index.struct_type,
field.index, decoder->position());
}
void StructSet(FullDecoder* decoder, const Value& struct_object,
const FieldIndexImmediate<validate>& field,
const Value& field_value) {
BUILD(StructSet, struct_object.node, field.struct_index.struct_type,
field.index, field_value.node, decoder->position());
}
private:
......@@ -729,6 +737,8 @@ class WasmGraphBuildingInterface {
case ValueType::kFuncRef:
case ValueType::kNullRef:
case ValueType::kExnRef:
case ValueType::kOptRef:
case ValueType::kEqRef:
return builder_->RefNull();
default:
UNREACHABLE();
......
......@@ -31,6 +31,9 @@ size_t LocalDeclEncoder::Emit(byte* buffer) const {
LEBHelper::write_u32v(&pos, local_decl.first);
*pos = local_decl.second.value_type_code();
++pos;
if (local_decl.second.has_immediate()) {
LEBHelper::write_u32v(&pos, local_decl.second.ref_index());
}
}
DCHECK_EQ(Size(), pos - buffer);
return static_cast<size_t>(pos - buffer);
......@@ -48,9 +51,17 @@ uint32_t LocalDeclEncoder::AddLocals(uint32_t count, ValueType type) {
return result;
}
// Size = (size of locals count) +
// (for each local pair <reps, type>, (size of reps) + (size of type))
size_t LocalDeclEncoder::Size() const {
size_t size = LEBHelper::sizeof_u32v(local_decls.size());
for (auto p : local_decls) size += 1 + LEBHelper::sizeof_u32v(p.first);
for (auto p : local_decls) {
size +=
LEBHelper::sizeof_u32v(p.first) + // number of locals
1 + // Opcode
(p.second.has_immediate() ? LEBHelper::sizeof_u32v(p.second.ref_index())
: 0); // immediate
}
return size;
}
......
......@@ -20,21 +20,26 @@ namespace wasm {
// Type for holding simd values, defined in wasm-value.h.
class Simd128;
// Type lattice: For any two types connected by a line, the type at the bottom
// is a subtype of the other type.
// Type lattice: Given a fixed struct type S, the following lattice
// defines the subtyping relation among types:
// For every two types connected by a line, the top type is a
// (direct) subtype of the bottom type.
//
// AnyRef
// / \
// FuncRef ExnRef
// \ /
// I32 I64 F32 F64 NullRef
// \ \ \ \ /
// ------------ Bottom
// / | \
// FuncRef ExnRef OptRef(S)
// \ | / \
// I32 I64 F32 F64 NullRef Ref(S)
// \ \ \ \ | /
// ---------------------- Bottom ---------
// Format: kind, log2Size, code, machineType, shortName, typeName
//
// Some of these types are from proposals that are not standardized yet:
// - "ref" types per https://github.com/WebAssembly/function-references
// - "optref"/"eqref" per https://github.com/WebAssembly/gc
//
// TODO(7748): Extend this with eqref, struct and function subtyping.
// Keep up to date with funcref vs. anyref subtyping.
#define FOREACH_VALUE_TYPE(V) \
V(Stmt, -1, Void, None, 'v', "<stmt>") \
V(I32, 2, I32, Int32, 'i', "i32") \
......@@ -59,20 +64,24 @@ class ValueType {
#undef DEF_ENUM
};
constexpr bool has_immediate() const {
return kind() == kRef || kind() == kOptRef;
}
constexpr ValueType() : bit_field_(KindField::encode(kStmt)) {}
explicit constexpr ValueType(Kind kind)
: bit_field_(KindField::encode(kind)) {
DCHECK(kind != kRef && kind != kOptRef);
DCHECK(!has_immediate());
}
constexpr ValueType(Kind kind, uint32_t ref_index)
: bit_field_(KindField::encode(kind) | RefIndexField::encode(ref_index)) {
DCHECK(kind == kRef || kind == kOptRef);
DCHECK(has_immediate());
}
constexpr Kind kind() const { return KindField::decode(bit_field_); }
constexpr uint32_t ref_index() const {
#if V8_HAS_CXX14_CONSTEXPR
DCHECK(kind() == kRef || kind() == kOptRef);
DCHECK(has_immediate());
#endif
return RefIndexField::decode(bit_field_);
}
......@@ -101,13 +110,18 @@ class ValueType {
return bit_field_ != other.bit_field_;
}
// TODO(7748): Extend this with eqref, struct and function subtyping.
// Keep up to date with funcref vs. anyref subtyping.
bool IsSubTypeOf(ValueType other) const {
return (*this == other) ||
(kind() == kNullRef && other.kind() == kAnyRef) ||
(kind() == kFuncRef && other.kind() == kAnyRef) ||
(kind() == kExnRef && other.kind() == kAnyRef) ||
(kind() == kNullRef && other.kind() == kFuncRef) ||
(kind() == kNullRef && other.kind() == kExnRef);
(kind() == kNullRef &&
(other.kind() == kAnyRef || other.kind() == kFuncRef ||
other.kind() == kExnRef || other.kind() == kOptRef)) ||
(other.kind() == kAnyRef &&
(kind() == kFuncRef || kind() == kExnRef || kind() == kOptRef ||
kind() == kRef)) ||
(kind() == kRef && other.kind() == kOptRef &&
ref_index() == other.ref_index());
}
bool IsReferenceType() const {
......@@ -116,17 +130,21 @@ class ValueType {
kind() == kEqRef;
}
// TODO(7748): Extend this with eqref, struct and function subtyping.
// Keep up to date with funcref vs. anyref subtyping.
static ValueType CommonSubType(ValueType a, ValueType b) {
if (a.kind() == b.kind()) return a;
if (a == b) return a;
// The only sub type of any value type is {bot}.
if (!a.IsReferenceType() || !b.IsReferenceType()) {
return ValueType(kBottom);
}
if (a.IsSubTypeOf(b)) return a;
if (b.IsSubTypeOf(a)) return b;
// {a} and {b} are not each other's subtype. The biggest sub-type of all
// reference types is {kWasmNullRef}.
return ValueType(kNullRef);
// {a} and {b} are not each other's subtype.
// If one of them is not nullable, their greatest subtype is bottom,
// otherwise null.
return (a.kind() == kRef || b.kind() == kRef) ? ValueType(kBottom)
: ValueType(kNullRef);
}
ValueTypeCode value_type_code() const {
......
......@@ -408,7 +408,7 @@ void WasmModuleBuilder::SetHasSharedMemory() { has_shared_memory_ = true; }
namespace {
void WriteValueType(ZoneBuffer* buffer, const ValueType& type) {
buffer->write_u8(type.value_type_code());
if (type.kind() == ValueType::kRef || type.kind() == ValueType::kOptRef) {
if (type.has_immediate()) {
buffer->write_u32v(type.ref_index());
}
}
......
......@@ -41,6 +41,7 @@ WASM_EXEC_TEST(BasicStruct) {
type_builder.AddField(kWasmI32);
int32_t type_index = builder->AddStructType(type_builder.Build());
ValueType kRefTypes[] = {ValueType(ValueType::kRef, type_index)};
ValueType kOptRefType = ValueType(ValueType::kOptRef, type_index);
FunctionSig sig_q_v(1, 0, kRefTypes);
WasmFunctionBuilder* f = builder->AddFunction(sigs.i_v());
......@@ -65,6 +66,19 @@ WASM_EXEC_TEST(BasicStruct) {
kExprEnd};
h->EmitCode(h_code, sizeof(h_code));
WasmFunctionBuilder* j = builder->AddFunction(sigs.i_v());
uint32_t local_index = j->AddLocal(kOptRefType);
uint32_t field_index = 0;
j->builder()->AddExport(CStrVector("j"), j);
byte i_code[] = {
WASM_SET_LOCAL(local_index,
WASM_STRUCT_NEW(type_index, WASM_I32V(42), WASM_I32V(64))),
WASM_STRUCT_SET(type_index, field_index, WASM_GET_LOCAL(local_index),
WASM_I32V(-99)),
WASM_STRUCT_GET(type_index, field_index, WASM_GET_LOCAL(local_index)),
kExprEnd};
j->EmitCode(i_code, sizeof(i_code));
ZoneBuffer buffer(&zone);
builder->WriteTo(&buffer);
......@@ -92,6 +106,9 @@ WASM_EXEC_TEST(BasicStruct) {
Execution::Call(isolate, h_export, undefined, 0, nullptr)
.ToHandleChecked();
CHECK(ref_result->IsWasmStruct());
CHECK_EQ(-99, testing::CallWasmFunctionForTesting(isolate, instance, &thrower,
"j", 0, nullptr));
}
} // namespace test_gc
......
......@@ -415,6 +415,9 @@ inline WasmOpcode LoadStoreOpcodeOf(MachineType type, bool store) {
#define WASM_STRUCT_GET(typeidx, fieldidx, ...) \
__VA_ARGS__, WASM_GC_OP(kExprStructGet), static_cast<byte>(typeidx), \
static_cast<byte>(fieldidx)
#define WASM_STRUCT_SET(typeidx, fieldidx, ...) \
__VA_ARGS__, WASM_GC_OP(kExprStructSet), static_cast<byte>(typeidx), \
static_cast<byte>(fieldidx)
// Pass: sig_index, ...args, func_index
#define WASM_CALL_INDIRECT(sig_index, ...) \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment