Commit 9e7ada8e authored by Manos Koukoutos's avatar Manos Koukoutos Committed by V8 LUCI CQ

[wasm-gc][turbofan] Introduce wasm-gc-specific nodes

We introduce wasm-gc specific nodes into the Turbofan IR, corresponding
to the wasm opcodes:
ref.as_non_null, ref.is_null, ref.null, rtt.canon, ref.test, ref.cast.
We define them as simplified operators. These are lowered by a dedicated
phase in the wasm pipeline.
Optimizations based on these nodes will be introduced later.
Note: We rename ObjectReferenceKnowledge to WasmTypeCheckConfig and move
it to a separate file, as it is now used in simplified-operator as well.

Bug: v8:7748
Change-Id: Iceaf04eca089b08bad794f567359196e8ba78d93
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3654102Reviewed-by: 's avatarMaya Lekova <mslekova@chromium.org>
Commit-Queue: Manos Koukoutos <manoskouk@chromium.org>
Reviewed-by: 's avatarJakob Kummerow <jkummerow@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80746}
parent 8e47a2c6
......@@ -2442,12 +2442,6 @@ filegroup(
"src/asmjs/asm-scanner.h",
"src/asmjs/asm-types.cc",
"src/asmjs/asm-types.h",
"src/compiler/int64-lowering.h",
"src/compiler/wasm-compiler.h",
"src/compiler/wasm-escape-analysis.h",
"src/compiler/wasm-graph-assembler.h",
"src/compiler/wasm-inlining.h",
"src/compiler/wasm-loop-peeling.h",
"src/debug/debug-wasm-objects.cc",
"src/debug/debug-wasm-objects.h",
"src/debug/debug-wasm-objects-inl.h",
......@@ -2859,11 +2853,20 @@ filegroup(
] + select({
":is_v8_enable_webassembly": [
"src/compiler/int64-lowering.cc",
"src/compiler/int64-lowering.h",
"src/compiler/wasm-compiler-definitions.h",
"src/compiler/wasm-compiler.cc",
"src/compiler/wasm-loop-peeling.cc",
"src/compiler/wasm-compiler.h",
"src/compiler/wasm-escape-analysis.cc",
"src/compiler/wasm-escape-analysis.h",
"src/compiler/wasm-loop-peeling.cc",
"src/compiler/wasm-loop-peeling.h",
"src/compiler/wasm-gc-lowering.cc",
"src/compiler/wasm-gc-lowering.h",
"src/compiler/wasm-graph-assembler.cc",
"src/compiler/wasm-graph-assembler.h",
"src/compiler/wasm-inlining.cc",
"src/compiler/wasm-inlining.h",
],
"//conditions:default": [],
}),
......
......@@ -3547,8 +3547,10 @@ v8_header_set("v8_internal_headers") {
"src/asmjs/asm-scanner.h",
"src/asmjs/asm-types.h",
"src/compiler/int64-lowering.h",
"src/compiler/wasm-compiler-definitions.h",
"src/compiler/wasm-compiler.h",
"src/compiler/wasm-escape-analysis.h",
"src/compiler/wasm-gc-lowering.h",
"src/compiler/wasm-graph-assembler.h",
"src/compiler/wasm-inlining.h",
"src/compiler/wasm-loop-peeling.h",
......@@ -4047,6 +4049,7 @@ if (v8_enable_webassembly) {
"src/compiler/int64-lowering.cc",
"src/compiler/wasm-compiler.cc",
"src/compiler/wasm-escape-analysis.cc",
"src/compiler/wasm-gc-lowering.cc",
"src/compiler/wasm-graph-assembler.cc",
"src/compiler/wasm-inlining.cc",
"src/compiler/wasm-loop-peeling.cc",
......
......@@ -12,6 +12,7 @@ include_rules = [
"-src/compiler",
"+src/compiler/pipeline.h",
"+src/compiler/code-assembler.h",
"+src/compiler/wasm-compiler-definitions.h",
"+src/compiler/wasm-compiler.h",
"-src/heap",
"+src/heap/basic-memory-chunk.h",
......
......@@ -504,6 +504,14 @@
V(SpeculativeBigIntAsUintN) \
V(SpeculativeBigIntNegate)
#define SIMPLIFIED_WASM_OP_LIST(V) \
V(AssertNotNull) \
V(IsNull) \
V(Null) \
V(RttCanon) \
V(WasmTypeCast) \
V(WasmTypeCheck)
#define SIMPLIFIED_OP_LIST(V) \
SIMPLIFIED_CHANGE_OP_LIST(V) \
SIMPLIFIED_CHECKED_OP_LIST(V) \
......@@ -516,6 +524,7 @@
SIMPLIFIED_SPECULATIVE_NUMBER_UNOP_LIST(V) \
SIMPLIFIED_SPECULATIVE_BIGINT_UNOP_LIST(V) \
SIMPLIFIED_SPECULATIVE_BIGINT_BINOP_LIST(V) \
IF_WASM(SIMPLIFIED_WASM_OP_LIST, V) \
SIMPLIFIED_OTHER_OP_LIST(V)
// Opcodes for Machine-level operators.
......
......@@ -104,6 +104,7 @@
#if V8_ENABLE_WEBASSEMBLY
#include "src/compiler/wasm-compiler.h"
#include "src/compiler/wasm-escape-analysis.h"
#include "src/compiler/wasm-gc-lowering.h"
#include "src/compiler/wasm-inlining.h"
#include "src/compiler/wasm-loop-peeling.h"
#include "src/wasm/function-body-decoder.h"
......@@ -2044,6 +2045,22 @@ struct TurboshaftRecreateSchedulePhase {
};
#if V8_ENABLE_WEBASSEMBLY
struct WasmGCLoweringPhase {
DECL_PIPELINE_PHASE_CONSTANTS(WasmGCLowering)
void Run(PipelineData* data, Zone* temp_zone) {
GraphReducer graph_reducer(
temp_zone, data->graph(), &data->info()->tick_counter(), data->broker(),
data->jsgraph()->Dead(), data->observe_node_manager());
WasmGCLowering lowering(&graph_reducer, data->mcgraph());
DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
data->common(), temp_zone);
AddReducer(data, &graph_reducer, &lowering);
AddReducer(data, &graph_reducer, &dead_code_elimination);
graph_reducer.ReduceGraph();
}
};
struct WasmOptimizationPhase {
DECL_PIPELINE_PHASE_CONSTANTS(WasmOptimization)
......@@ -3239,6 +3256,11 @@ void Pipeline::GenerateCodeForWasmFunction(
}
const bool is_asm_js = is_asmjs_module(module);
if (FLAG_experimental_wasm_gc) {
pipeline.Run<WasmGCLoweringPhase>();
pipeline.RunPrintAndVerify(WasmGCLoweringPhase::phase_name(), true);
}
if (FLAG_wasm_opt || is_asm_js) {
pipeline.Run<WasmOptimizationPhase>(is_asm_js);
pipeline.RunPrintAndVerify(WasmOptimizationPhase::phase_name(), true);
......
......@@ -16,6 +16,10 @@
#include "src/objects/name.h"
#include "src/objects/objects-inl.h"
#if V8_ENABLE_WEBASSEMBLY
#include "src/compiler/wasm-compiler-definitions.h"
#endif
namespace v8 {
namespace internal {
namespace compiler {
......@@ -1141,6 +1145,31 @@ struct SimplifiedOperatorGlobalCache final {
};
LoadStackArgumentOperator kLoadStackArgument;
#if V8_ENABLE_WEBASSEMBLY
struct IsNullOperator final : public Operator {
IsNullOperator()
: Operator(IrOpcode::kIsNull, Operator::kPure, "IsNull", 1, 0, 0, 1, 0,
0) {}
};
IsNullOperator kIsNull;
struct NullOperator final : public Operator {
NullOperator()
: Operator(IrOpcode::kNull, Operator::kPure, "Null", 0, 0, 0, 1, 0, 0) {
}
};
NullOperator kNull;
struct AssertNotNullOperator final : public Operator {
AssertNotNullOperator()
: Operator(
IrOpcode::kAssertNotNull,
Operator::kNoWrite | Operator::kNoThrow | Operator::kIdempotent,
"AssertNotNull", 1, 1, 1, 1, 0, 1) {}
};
AssertNotNullOperator kAssertNotNull;
#endif
#define SPECULATIVE_NUMBER_BINOP(Name) \
template <NumberOperationHint kHint> \
struct Name##Operator final : public Operator1<NumberOperationHint> { \
......@@ -1303,6 +1332,36 @@ const Operator* SimplifiedOperatorBuilder::VerifyType() {
"VerifyType", 1, 0, 0, 1, 0, 0);
}
#if V8_ENABLE_WEBASSEMBLY
const Operator* SimplifiedOperatorBuilder::WasmTypeCheck(
WasmTypeCheckConfig config) {
return zone_->New<Operator1<WasmTypeCheckConfig>>(
IrOpcode::kWasmTypeCheck, Operator::kEliminatable | Operator::kIdempotent,
"WasmTypeCheck", 2, 1, 1, 1, 1, 1, config);
}
const Operator* SimplifiedOperatorBuilder::WasmTypeCast(
WasmTypeCheckConfig config) {
return zone_->New<Operator1<WasmTypeCheckConfig>>(
IrOpcode::kWasmTypeCast,
Operator::kNoWrite | Operator::kNoThrow | Operator::kIdempotent,
"WasmTypeCast", 2, 1, 1, 1, 1, 1, config);
}
const Operator* SimplifiedOperatorBuilder::RttCanon(int index) {
return zone()->New<Operator1<int>>(IrOpcode::kRttCanon, Operator::kPure,
"RttCanon", 0, 0, 0, 1, 0, 0, index);
}
const Operator* SimplifiedOperatorBuilder::Null() { return &cache_.kNull; }
const Operator* SimplifiedOperatorBuilder::AssertNotNull() {
return &cache_.kAssertNotNull;
}
const Operator* SimplifiedOperatorBuilder::IsNull() { return &cache_.kIsNull; }
#endif // V8_ENABLE_WEBASSEMBLY
const Operator* SimplifiedOperatorBuilder::CheckIf(
DeoptimizeReason reason, const FeedbackSource& feedback) {
if (!feedback.IsValid()) {
......
......@@ -36,9 +36,10 @@ class Zone;
namespace compiler {
// Forward declarations.
class CallDescriptor;
class Operator;
struct SimplifiedOperatorGlobalCache;
class CallDescriptor;
struct WasmTypeCheckConfig;
enum BaseTaggedness : uint8_t { kUntaggedBase, kTaggedBase };
......@@ -1060,6 +1061,15 @@ class V8_EXPORT_PRIVATE SimplifiedOperatorBuilder final
// SimplifiedLowering.
const Operator* VerifyType();
#if V8_ENABLE_WEBASSEMBLY
const Operator* AssertNotNull();
const Operator* IsNull();
const Operator* Null();
const Operator* RttCanon(int index);
const Operator* WasmTypeCheck(WasmTypeCheckConfig config);
const Operator* WasmTypeCast(WasmTypeCheckConfig config);
#endif
const Operator* DateNow();
// Represents the inputs necessary to construct a fast and a slow API call.
......
......@@ -124,6 +124,7 @@ class Typer::Visitor : public Reducer {
DECLARE_IMPOSSIBLE_CASE(End)
SIMPLIFIED_CHANGE_OP_LIST(DECLARE_IMPOSSIBLE_CASE)
SIMPLIFIED_CHECKED_OP_LIST(DECLARE_IMPOSSIBLE_CASE)
IF_WASM(SIMPLIFIED_WASM_OP_LIST, DECLARE_IMPOSSIBLE_CASE)
MACHINE_SIMD_OP_LIST(DECLARE_IMPOSSIBLE_CASE)
MACHINE_OP_LIST(DECLARE_IMPOSSIBLE_CASE)
#undef DECLARE_IMPOSSIBLE_CASE
......
......@@ -1639,6 +1639,14 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) {
CheckTypeIs(node, Type::Any());
CheckValueInputIs(node, 0, Type::Any()); // callee
break;
case IrOpcode::kWasmTypeCheck:
case IrOpcode::kWasmTypeCast:
case IrOpcode::kRttCanon:
case IrOpcode::kNull:
case IrOpcode::kIsNull:
case IrOpcode::kAssertNotNull:
// TODO(manoskouk): What are the constraints here?
break;
#endif // V8_ENABLE_WEBASSEMBLY
// Machine operators
......
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#if !V8_ENABLE_WEBASSEMBLY
#error This header should only be included if WebAssembly is enabled.
#endif // !V8_ENABLE_WEBASSEMBLY
#ifndef V8_COMPILER_WASM_COMPILER_DEFINITIONS_H_
#define V8_COMPILER_WASM_COMPILER_DEFINITIONS_H_
#include <cstdint>
#include <ostream>
#include "src/base/functional.h"
namespace v8 {
namespace internal {
namespace compiler {
struct WasmTypeCheckConfig {
bool object_can_be_null;
uint8_t rtt_depth;
};
V8_INLINE std::ostream& operator<<(std::ostream& os,
WasmTypeCheckConfig const& p) {
return os << (p.object_can_be_null ? "nullable" : "non-nullable")
<< ", depth=" << static_cast<int>(p.rtt_depth);
}
V8_INLINE size_t hash_value(WasmTypeCheckConfig const& p) {
return base::hash_combine(p.object_can_be_null, p.rtt_depth);
}
V8_INLINE bool operator==(const WasmTypeCheckConfig& p1,
const WasmTypeCheckConfig& p2) {
return p1.object_can_be_null == p2.object_can_be_null &&
p1.rtt_depth == p2.rtt_depth;
}
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_WASM_COMPILER_DEFINITIONS_H_
This diff is collapsed.
......@@ -45,14 +45,12 @@ enum class TrapId : uint32_t;
struct Int64LoweringSpecialCase;
template <size_t VarCount>
class GraphAssemblerLabel;
struct WasmTypeCheckConfig;
} // namespace compiler
namespace wasm {
class AssemblerBufferCache;
struct DecodeStruct;
// Expose {Node} and {Graph} opaquely as {wasm::TFNode} and {wasm::TFGraph}.
using TFNode = compiler::Node;
using TFGraph = compiler::MachineGraph;
class WasmCode;
class WasmFeatures;
class WireBytesStorage;
......@@ -224,10 +222,6 @@ class WasmGraphBuilder {
kWasmApiFunctionRefMode,
kNoSpecialParameterMode
};
struct ObjectReferenceKnowledge {
bool object_can_be_null;
uint8_t rtt_depth;
};
enum EnforceBoundsCheck : bool { // --
kNeedsBoundsCheck = true,
kCanOmitBoundsCheck = false
......@@ -513,33 +507,33 @@ class WasmGraphBuilder {
Node* I31GetU(Node* input);
Node* RttCanon(uint32_t type_index);
Node* RefTest(Node* object, Node* rtt, ObjectReferenceKnowledge config);
Node* RefCast(Node* object, Node* rtt, ObjectReferenceKnowledge config,
Node* RefTest(Node* object, Node* rtt, WasmTypeCheckConfig config);
Node* RefCast(Node* object, Node* rtt, WasmTypeCheckConfig config,
wasm::WasmCodePosition position);
void BrOnCast(Node* object, Node* rtt, ObjectReferenceKnowledge config,
void BrOnCast(Node* object, Node* rtt, WasmTypeCheckConfig config,
Node** match_control, Node** match_effect,
Node** no_match_control, Node** no_match_effect);
Node* RefIsData(Node* object, bool object_can_be_null);
Node* RefAsData(Node* object, bool object_can_be_null,
wasm::WasmCodePosition position);
void BrOnData(Node* object, Node* rtt, ObjectReferenceKnowledge config,
void BrOnData(Node* object, Node* rtt, WasmTypeCheckConfig config,
Node** match_control, Node** match_effect,
Node** no_match_control, Node** no_match_effect);
Node* RefIsFunc(Node* object, bool object_can_be_null);
Node* RefAsFunc(Node* object, bool object_can_be_null,
wasm::WasmCodePosition position);
void BrOnFunc(Node* object, Node* rtt, ObjectReferenceKnowledge config,
void BrOnFunc(Node* object, Node* rtt, WasmTypeCheckConfig config,
Node** match_control, Node** match_effect,
Node** no_match_control, Node** no_match_effect);
Node* RefIsArray(Node* object, bool object_can_be_null);
Node* RefAsArray(Node* object, bool object_can_be_null,
wasm::WasmCodePosition position);
void BrOnArray(Node* object, Node* rtt, ObjectReferenceKnowledge config,
void BrOnArray(Node* object, Node* rtt, WasmTypeCheckConfig config,
Node** match_control, Node** match_effect,
Node** no_match_control, Node** no_match_effect);
Node* RefIsI31(Node* object);
Node* RefAsI31(Node* object, wasm::WasmCodePosition position);
void BrOnI31(Node* object, Node* rtt, ObjectReferenceKnowledge config,
void BrOnI31(Node* object, Node* rtt, WasmTypeCheckConfig config,
Node** match_control, Node** match_effect,
Node** no_match_control, Node** no_match_effect);
......@@ -700,6 +694,8 @@ class WasmGraphBuilder {
Node* IsNull(Node* object);
Node* AssertNotNull(Node* object, wasm::WasmCodePosition position);
void GetGlobalBaseAndOffset(const wasm::WasmGlobal&, Node** base_node,
Node** offset_node);
......@@ -711,9 +707,9 @@ class WasmGraphBuilder {
};
// This type is used to collect control/effect nodes we need to merge at the
// end of BrOn* functions. Nodes are collected in {TypeCheck} etc. by calling
// the passed callbacks succeed_if, fail_if and fail_if_not. We have up to 5
// control nodes to merge; the EffectPhi needs an additional input.
// end of BrOn* functions. Nodes are collected by calling the passed callbacks
// succeed_if, fail_if and fail_if_not. We have up to 5 control nodes to
// merge; the EffectPhi needs an additional input.
using SmallNodeVector = base::SmallVector<Node*, 6>;
Callbacks TestCallbacks(GraphAssemblerLabel<1>* label);
......@@ -724,8 +720,6 @@ class WasmGraphBuilder {
SmallNodeVector& match_controls,
SmallNodeVector& match_effects);
void TypeCheck(Node* object, Node* rtt, ObjectReferenceKnowledge config,
bool null_succeeds, Callbacks callbacks);
void DataCheck(Node* object, bool object_can_be_null, Callbacks callbacks);
void ManagedObjectInstanceCheck(Node* object, bool object_can_be_null,
InstanceType instance_type,
......
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/compiler/wasm-gc-lowering.h"
#include "src/base/logging.h"
#include "src/common/globals.h"
#include "src/compiler/common-operator.h"
#include "src/compiler/node-matchers.h"
#include "src/compiler/node-properties.h"
#include "src/compiler/opcodes.h"
#include "src/compiler/operator.h"
#include "src/compiler/simplified-operator.h"
#include "src/compiler/wasm-compiler-definitions.h"
#include "src/compiler/wasm-graph-assembler.h"
#include "src/wasm/object-access.h"
#include "src/wasm/wasm-linkage.h"
#include "src/wasm/wasm-objects.h"
namespace v8 {
namespace internal {
namespace compiler {
WasmGCLowering::WasmGCLowering(Editor* editor, MachineGraph* mcgraph)
: AdvancedReducer(editor),
gasm_(mcgraph, mcgraph->zone()),
dead_(mcgraph->Dead()),
instance_node_(nullptr) {
// Find and store the instance node.
for (Node* start_use : mcgraph->graph()->start()->uses()) {
if (start_use->opcode() == IrOpcode::kParameter &&
ParameterIndexOf(start_use->op()) == 0) {
instance_node_ = start_use;
break;
}
}
DCHECK_NOT_NULL(instance_node_);
}
Reduction WasmGCLowering::Reduce(Node* node) {
switch (node->opcode()) {
case IrOpcode::kWasmTypeCheck:
return ReduceWasmTypeCheck(node);
case IrOpcode::kWasmTypeCast:
return ReduceWasmTypeCast(node);
case IrOpcode::kAssertNotNull:
return ReduceAssertNotNull(node);
case IrOpcode::kNull:
return ReduceNull(node);
case IrOpcode::kIsNull:
return ReduceIsNull(node);
case IrOpcode::kRttCanon:
return ReduceRttCanon(node);
default:
return NoChange();
}
}
Node* WasmGCLowering::Null() {
Node* isolate_root = gasm_.LoadImmutable(
MachineType::Pointer(), instance_node_,
WasmInstanceObject::kIsolateRootOffset - kHeapObjectTag);
return gasm_.LoadImmutable(
MachineType::Pointer(), isolate_root,
IsolateData::root_slot_offset(RootIndex::kNullValue));
}
// TODO(manoskouk): Use the Callbacks infrastructure from wasm-compiler.h to
// unify all check/cast implementations.
// TODO(manoskouk): Find a way to optimize branches on typechecks.
Reduction WasmGCLowering::ReduceWasmTypeCheck(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kWasmTypeCheck);
Node* object = node->InputAt(0);
Node* rtt = node->InputAt(1);
Node* effect_input = NodeProperties::GetEffectInput(node);
Node* control_input = NodeProperties::GetControlInput(node);
auto config = OpParameter<WasmTypeCheckConfig>(node->op());
int rtt_depth = config.rtt_depth;
bool object_can_be_null = config.object_can_be_null;
gasm_.InitializeEffectControl(effect_input, control_input);
auto end_label = gasm_.MakeLabel(MachineRepresentation::kWord32);
if (object_can_be_null) {
gasm_.GotoIf(gasm_.TaggedEqual(object, Null()), &end_label,
BranchHint::kFalse, gasm_.Int32Constant(0));
}
Node* map = gasm_.LoadMap(object);
// First, check if types happen to be equal. This has been shown to give large
// speedups.
gasm_.GotoIf(gasm_.TaggedEqual(map, rtt), &end_label, BranchHint::kTrue,
gasm_.Int32Constant(1));
Node* type_info = gasm_.LoadWasmTypeInfo(map);
Node* supertypes = gasm_.LoadSupertypes(type_info);
DCHECK_GE(rtt_depth, 0);
Node* rtt_depth_node = gasm_.IntPtrConstant(rtt_depth);
// If the depth of the rtt is known to be less that the minimum supertype
// array length, we can access the supertype without bounds-checking the
// supertype array.
if (static_cast<uint32_t>(rtt_depth) >= wasm::kMinimumSupertypeArraySize) {
Node* supertypes_length = gasm_.BuildChangeSmiToIntPtr(
gasm_.LoadFixedArrayLengthAsSmi(supertypes));
gasm_.GotoIfNot(gasm_.UintLessThan(rtt_depth_node, supertypes_length),
&end_label, BranchHint::kTrue, gasm_.Int32Constant(0));
}
Node* maybe_match = gasm_.LoadImmutableFixedArrayElement(
supertypes, rtt_depth_node, MachineType::TaggedPointer());
gasm_.Goto(&end_label, gasm_.TaggedEqual(maybe_match, rtt));
gasm_.Bind(&end_label);
ReplaceWithValue(node, end_label.PhiAt(0), gasm_.effect(), gasm_.control());
node->Kill();
return Replace(end_label.PhiAt(0)); // Meaningless argument.
}
Reduction WasmGCLowering::ReduceWasmTypeCast(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kWasmTypeCast);
Node* object = node->InputAt(0);
Node* rtt = node->InputAt(1);
Node* effect_input = NodeProperties::GetEffectInput(node);
Node* control_input = NodeProperties::GetControlInput(node);
auto config = OpParameter<WasmTypeCheckConfig>(node->op());
int rtt_depth = config.rtt_depth;
bool object_can_be_null = config.object_can_be_null;
gasm_.InitializeEffectControl(effect_input, control_input);
auto end_label = gasm_.MakeLabel();
if (object_can_be_null) {
gasm_.GotoIf(gasm_.TaggedEqual(object, Null()), &end_label,
BranchHint::kFalse);
}
Node* map = gasm_.LoadMap(object);
// First, check if types happen to be equal. This has been shown to give large
// speedups.
gasm_.GotoIf(gasm_.TaggedEqual(map, rtt), &end_label, BranchHint::kTrue);
Node* type_info = gasm_.LoadWasmTypeInfo(map);
Node* supertypes = gasm_.LoadSupertypes(type_info);
DCHECK_GE(rtt_depth, 0);
Node* rtt_depth_node = gasm_.IntPtrConstant(rtt_depth);
// If the depth of the rtt is known to be less that the minimum supertype
// array length, we can access the supertype without bounds-checking the
// supertype array.
if (static_cast<uint32_t>(rtt_depth) >= wasm::kMinimumSupertypeArraySize) {
Node* supertypes_length = gasm_.BuildChangeSmiToIntPtr(
gasm_.LoadFixedArrayLengthAsSmi(supertypes));
gasm_.TrapUnless(gasm_.UintLessThan(rtt_depth_node, supertypes_length),
TrapId::kTrapIllegalCast);
}
Node* maybe_match = gasm_.LoadImmutableFixedArrayElement(
supertypes, rtt_depth_node, MachineType::TaggedPointer());
gasm_.TrapUnless(gasm_.TaggedEqual(maybe_match, rtt),
TrapId::kTrapIllegalCast);
gasm_.Goto(&end_label);
gasm_.Bind(&end_label);
ReplaceWithValue(node, object, gasm_.effect(), gasm_.control());
node->Kill();
return Replace(object);
}
Reduction WasmGCLowering::ReduceAssertNotNull(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kAssertNotNull);
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
Node* object = NodeProperties::GetValueInput(node, 0);
gasm_.InitializeEffectControl(effect, control);
gasm_.TrapIf(gasm_.TaggedEqual(object, Null()), TrapId::kTrapNullDereference);
ReplaceWithValue(node, object, gasm_.effect(), gasm_.control());
node->Kill();
return Replace(object);
}
Reduction WasmGCLowering::ReduceNull(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kNull);
return Replace(Null());
}
Reduction WasmGCLowering::ReduceIsNull(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kIsNull);
Node* object = NodeProperties::GetValueInput(node, 0);
return Replace(gasm_.TaggedEqual(object, Null()));
}
Reduction WasmGCLowering::ReduceRttCanon(Node* node) {
int type_index = OpParameter<int>(node->op());
Node* maps_list = gasm_.LoadImmutable(
MachineType::TaggedPointer(), instance_node_,
WasmInstanceObject::kManagedObjectMapsOffset - kHeapObjectTag);
return Replace(gasm_.LoadImmutable(
MachineType::TaggedPointer(), maps_list,
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(type_index)));
}
} // namespace compiler
} // namespace internal
} // namespace v8
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#if !V8_ENABLE_WEBASSEMBLY
#error This header should only be included if WebAssembly is enabled.
#endif // !V8_ENABLE_WEBASSEMBLY
#ifndef V8_COMPILER_WASM_GC_LOWERING_H_
#define V8_COMPILER_WASM_GC_LOWERING_H_
#include "src/compiler/graph-reducer.h"
#include "src/compiler/wasm-graph-assembler.h"
namespace v8 {
namespace internal {
namespace compiler {
class MachineGraph;
class WasmGraphAssembler;
class WasmGCLowering final : public AdvancedReducer {
public:
WasmGCLowering(Editor* editor, MachineGraph* mcgraph);
const char* reducer_name() const override { return "WasmGCLowering"; }
Reduction Reduce(Node* node) final;
private:
Reduction ReduceWasmTypeCheck(Node* node);
Reduction ReduceWasmTypeCast(Node* node);
Reduction ReduceAssertNotNull(Node* node);
Reduction ReduceNull(Node* node);
Reduction ReduceIsNull(Node* node);
Reduction ReduceRttCanon(Node* node);
Node* Null();
WasmGraphAssembler gasm_;
Node* dead_;
Node* instance_node_;
};
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_WASM_GC_LOWERING_H_
......@@ -6,6 +6,7 @@
#include "src/compiler/diamond.h"
#include "src/compiler/node-matchers.h"
#include "src/compiler/wasm-compiler-definitions.h"
#include "src/wasm/object-access.h"
#include "src/wasm/wasm-objects.h"
......@@ -347,6 +348,31 @@ Node* WasmGraphAssembler::IsDataRefMap(Node* map) {
Int32Constant(LAST_WASM_OBJECT_TYPE - FIRST_WASM_OBJECT_TYPE));
}
Node* WasmGraphAssembler::WasmTypeCheck(Node* object, Node* rtt,
WasmTypeCheckConfig config) {
return AddNode(graph()->NewNode(simplified_.WasmTypeCheck(config), object,
rtt, effect(), control()));
}
Node* WasmGraphAssembler::WasmTypeCast(Node* object, Node* rtt,
WasmTypeCheckConfig config) {
return AddNode(graph()->NewNode(simplified_.WasmTypeCast(config), object, rtt,
effect(), control()));
}
Node* WasmGraphAssembler::Null() {
return AddNode(graph()->NewNode(simplified_.Null()));
}
Node* WasmGraphAssembler::IsNull(Node* object) {
return AddNode(graph()->NewNode(simplified_.IsNull(), object));
}
Node* WasmGraphAssembler::AssertNotNull(Node* object) {
return AddNode(graph()->NewNode(simplified_.AssertNotNull(), object, effect(),
control()));
}
// Generic HeapObject helpers.
Node* WasmGraphAssembler::HasInstanceType(Node* heap_object,
......
......@@ -240,7 +240,17 @@ class WasmGraphAssembler : public GraphAssembler {
Node* IsDataRefMap(Node* map);
// Generic HeapObject helpers.
Node* WasmTypeCheck(Node* object, Node* rtt, WasmTypeCheckConfig config);
Node* WasmTypeCast(Node* object, Node* rtt, WasmTypeCheckConfig config);
Node* Null();
Node* IsNull(Node* object);
Node* AssertNotNull(Node* object);
// Generic helpers.
Node* HasInstanceType(Node* heap_object, InstanceType type);
......
......@@ -378,6 +378,7 @@ class RuntimeCallTimer final {
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Untyper) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, VerifyGraph) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmBaseOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmGCLowering) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmInlining) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmLoopPeeling) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmLoopUnrolling) \
......
......@@ -4,6 +4,7 @@
#include "src/wasm/graph-builder-interface.h"
#include "src/compiler/wasm-compiler-definitions.h"
#include "src/compiler/wasm-compiler.h"
#include "src/flags/flags.h"
#include "src/handles/handles.h"
......@@ -25,6 +26,8 @@ namespace wasm {
namespace {
using TFNode = compiler::Node;
// An SsaEnv environment carries the current local variable renaming
// as well as the current effect and control dependency in the TF graph.
// It maintains a control state that tracks whether the environment
......@@ -1179,12 +1182,12 @@ class WasmGraphBuildingInterface {
result->node = builder_->RttCanon(type_index);
}
using StaticKnowledge = compiler::WasmGraphBuilder::ObjectReferenceKnowledge;
using WasmTypeCheckConfig = v8::internal::compiler::WasmTypeCheckConfig;
StaticKnowledge ComputeStaticKnowledge(ValueType object_type,
ValueType rtt_type,
const WasmModule* module) {
StaticKnowledge result;
WasmTypeCheckConfig ComputeWasmTypeCheckConfig(ValueType object_type,
ValueType rtt_type,
const WasmModule* module) {
WasmTypeCheckConfig result;
result.object_can_be_null = object_type.is_nullable();
DCHECK(object_type.is_object_reference()); // Checked by validation.
// In the bottom case, the result is irrelevant.
......@@ -1197,27 +1200,27 @@ class WasmGraphBuildingInterface {
void RefTest(FullDecoder* decoder, const Value& object, const Value& rtt,
Value* result) {
StaticKnowledge config =
ComputeStaticKnowledge(object.type, rtt.type, decoder->module_);
WasmTypeCheckConfig config =
ComputeWasmTypeCheckConfig(object.type, rtt.type, decoder->module_);
result->node = builder_->RefTest(object.node, rtt.node, config);
}
void RefCast(FullDecoder* decoder, const Value& object, const Value& rtt,
Value* result) {
StaticKnowledge config =
ComputeStaticKnowledge(object.type, rtt.type, decoder->module_);
WasmTypeCheckConfig config =
ComputeWasmTypeCheckConfig(object.type, rtt.type, decoder->module_);
result->node =
builder_->RefCast(object.node, rtt.node, config, decoder->position());
}
template <void (compiler::WasmGraphBuilder::*branch_function)(
TFNode*, TFNode*, StaticKnowledge, TFNode**, TFNode**, TFNode**,
TFNode*, TFNode*, WasmTypeCheckConfig, TFNode**, TFNode**, TFNode**,
TFNode**)>
void BrOnCastAbs(FullDecoder* decoder, const Value& object, const Value& rtt,
Value* forwarding_value, uint32_t br_depth,
bool branch_on_match) {
StaticKnowledge config =
ComputeStaticKnowledge(object.type, rtt.type, decoder->module_);
WasmTypeCheckConfig config =
ComputeWasmTypeCheckConfig(object.type, rtt.type, decoder->module_);
SsaEnv* branch_env = Split(decoder->zone(), ssa_env_);
SsaEnv* no_branch_env = Steal(decoder->zone(), ssa_env_);
no_branch_env->SetNotMerged();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment