enum dnnl::graph::op::kindΒΆ
Kinds of operations.
#include <dnnl_graph.hpp> enum kind { Abs = dnnl_graph_op_abs, AbsBackward = dnnl_graph_op_abs_backward, Add = dnnl_graph_op_add, AvgPool = dnnl_graph_op_avg_pool, AvgPoolBackward = dnnl_graph_op_avg_pool_backward, BatchNormForwardTraining = dnnl_graph_op_batch_norm_forward_training, BatchNormInference = dnnl_graph_op_batch_norm_inference, BatchNormTrainingBackward = dnnl_graph_op_batch_norm_backward, BiasAdd = dnnl_graph_op_bias_add, BiasAddBackward = dnnl_graph_op_bias_add_backward, Clamp = dnnl_graph_op_clamp, ClampBackward = dnnl_graph_op_clamp_backward, Concat = dnnl_graph_op_concat, Convolution = dnnl_graph_op_convolution, ConvolutionBackwardData = dnnl_graph_op_convolution_backward_data, ConvolutionBackwardWeights = dnnl_graph_op_convolution_backward_weights, ConvTranspose = dnnl_graph_op_conv_transpose, ConvTransposeBackwardData = dnnl_graph_op_conv_transpose_backward_data, ConvTransposeBackwardWeights = dnnl_graph_op_conv_transpose_backward_weights, Dequantize = dnnl_graph_op_dequantize, Divide = dnnl_graph_op_divide, DynamicDequantize = dnnl_graph_op_dynamic_dequantize, DynamicQuantize = dnnl_graph_op_dynamic_quantize, Elu = dnnl_graph_op_elu, EluBackward = dnnl_graph_op_elu_backward, End = dnnl_graph_op_end, Exp = dnnl_graph_op_exp, GELU = dnnl_graph_op_gelu, GELUBackward = dnnl_graph_op_gelu_backward, GroupNorm = dnnl_graph_op_group_norm, HardSigmoid = dnnl_graph_op_hard_sigmoid, HardSigmoidBackward = dnnl_graph_op_hard_sigmoid_backward, HardSwish = dnnl_graph_op_hard_swish, HardSwishBackward = dnnl_graph_op_hard_swish_backward, Interpolate = dnnl_graph_op_interpolate, InterpolateBackward = dnnl_graph_op_interpolate_backward, LayerNorm = dnnl_graph_op_layer_norm, LayerNormBackward = dnnl_graph_op_layer_norm_backward, LeakyReLU = dnnl_graph_op_leaky_relu, Log = dnnl_graph_op_log, LogSoftmax = dnnl_graph_op_log_softmax, LogSoftmaxBackward = dnnl_graph_op_log_softmax_backward, MatMul = dnnl_graph_op_matmul, Maximum = dnnl_graph_op_maximum, MaxPool = dnnl_graph_op_max_pool, MaxPoolBackward = dnnl_graph_op_max_pool_backward, Minimum = dnnl_graph_op_minimum, Mish = dnnl_graph_op_mish, MishBackward = dnnl_graph_op_mish_backward, Multiply = dnnl_graph_op_multiply, Pow = dnnl_graph_op_pow, PReLU = dnnl_graph_op_prelu, PReLUBackward = dnnl_graph_op_prelu_backward, Quantize = dnnl_graph_op_quantize, Reciprocal = dnnl_graph_op_reciprocal, ReduceL1 = dnnl_graph_op_reduce_l1, ReduceL2 = dnnl_graph_op_reduce_l2, ReduceMax = dnnl_graph_op_reduce_max, ReduceMean = dnnl_graph_op_reduce_mean, ReduceMin = dnnl_graph_op_reduce_min, ReduceProd = dnnl_graph_op_reduce_prod, ReduceSum = dnnl_graph_op_reduce_sum, ReLU = dnnl_graph_op_relu, ReLUBackward = dnnl_graph_op_relu_backward, Reorder = dnnl_graph_op_reorder, Round = dnnl_graph_op_round, Select = dnnl_graph_op_select, Sigmoid = dnnl_graph_op_sigmoid, SigmoidBackward = dnnl_graph_op_sigmoid_backward, SoftMax = dnnl_graph_op_softmax, SoftMaxBackward = dnnl_graph_op_softmax_backward, SoftPlus = dnnl_graph_op_softplus, SoftPlusBackward = dnnl_graph_op_softplus_backward, Sqrt = dnnl_graph_op_sqrt, SqrtBackward = dnnl_graph_op_sqrt_backward, Square = dnnl_graph_op_square, SquaredDifference = dnnl_graph_op_squared_difference, StaticReshape = dnnl_graph_op_static_reshape, StaticTranspose = dnnl_graph_op_static_transpose, Subtract = dnnl_graph_op_subtract, Tanh = dnnl_graph_op_tanh, TanhBackward = dnnl_graph_op_tanh_backward, TypeCast = dnnl_graph_op_type_cast, Wildcard = dnnl_graph_op_wildcard, LastSymbol = dnnl_graph_op_last_symbol, };