A primitive to compute the common recurrent layer. More...
Functions | |
dnnl_status_t DNNL_API | dnnl_primitive_attr_set_rnn_data_qparams (dnnl_primitive_attr_t attr, const float scale, const float shift) |
Sets quantization scale and shift for RNN data tensors. More... | |
dnnl_status_t DNNL_API | dnnl_primitive_attr_set_rnn_weights_qparams (dnnl_primitive_attr_t attr, dnnl_dim_t count, int mask, const float *weights_scales) |
Sets quantization scales weights_scales for RNN weights tensors. More... | |
dnnl_status_t DNNL_API | dnnl_vanilla_rnn_forward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, const dnnl_alg_kind_t activation, const dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, unsigned flags, float alpha, float beta) |
Initializes an RNN descriptor rnn_desc for forward propagation using prop_kind , activation , direction , and memory descriptors. More... | |
dnnl_status_t DNNL_API | dnnl_vanilla_rnn_backward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, const dnnl_alg_kind_t activation, const dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *diff_src_layer_desc, const dnnl_memory_desc_t *diff_src_iter_desc, const dnnl_memory_desc_t *diff_weights_layer_desc, const dnnl_memory_desc_t *diff_weights_iter_desc, const dnnl_memory_desc_t *diff_bias_desc, const dnnl_memory_desc_t *diff_dst_layer_desc, const dnnl_memory_desc_t *diff_dst_iter_desc, unsigned flags, float alpha, float beta) |
Initializes an RNN descriptor rnn_desc for backward propagation using prop_kind , activation , direction , and memory descriptors. More... | |
dnnl_status_t DNNL_API | dnnl_lstm_forward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *src_iter_c_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *dst_iter_c_desc, unsigned flags) |
Initializes an LSTM descriptor rnn_desc for forward propagation using prop_kind , direction , and memory descriptors. More... | |
dnnl_status_t DNNL_API | dnnl_lstm_backward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *src_iter_c_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *dst_iter_c_desc, const dnnl_memory_desc_t *diff_src_layer_desc, const dnnl_memory_desc_t *diff_src_iter_desc, const dnnl_memory_desc_t *diff_src_iter_c_desc, const dnnl_memory_desc_t *diff_weights_layer_desc, const dnnl_memory_desc_t *diff_weights_iter_desc, const dnnl_memory_desc_t *diff_bias_desc, const dnnl_memory_desc_t *diff_dst_layer_desc, const dnnl_memory_desc_t *diff_dst_iter_desc, const dnnl_memory_desc_t *diff_dst_iter_c_desc, unsigned flags) |
Initializes an LSTM descriptor rnn_desc for backward propagation using prop_kind , direction , and memory descriptors. More... | |
dnnl_status_t DNNL_API | dnnl_gru_forward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, unsigned flags) |
Initializes a GRU descriptor rnn_desc for forward propagation using prop_kind , direction , and memory descriptors. More... | |
dnnl_status_t DNNL_API | dnnl_gru_backward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *diff_src_layer_desc, const dnnl_memory_desc_t *diff_src_iter_desc, const dnnl_memory_desc_t *diff_weights_layer_desc, const dnnl_memory_desc_t *diff_weights_iter_desc, const dnnl_memory_desc_t *diff_bias_desc, const dnnl_memory_desc_t *diff_dst_layer_desc, const dnnl_memory_desc_t *diff_dst_iter_desc, unsigned flags) |
Initializes a GRU descriptor rnn_desc for backward propagation using prop_kind , direction , and memory descriptors. More... | |
dnnl_status_t DNNL_API | dnnl_lbr_gru_forward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, unsigned flags) |
Initializes an LBR GRU descriptor rnn_desc for forward propagation using prop_kind , direction , and memory descriptors. More... | |
dnnl_status_t DNNL_API | dnnl_lbr_gru_backward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *diff_src_layer_desc, const dnnl_memory_desc_t *diff_src_iter_desc, const dnnl_memory_desc_t *diff_weights_layer_desc, const dnnl_memory_desc_t *diff_weights_iter_desc, const dnnl_memory_desc_t *diff_bias_desc, const dnnl_memory_desc_t *diff_dst_layer_desc, const dnnl_memory_desc_t *diff_dst_iter_desc, unsigned flags) |
Initializes an LBR GRU descriptor rnn_desc for backward propagation using prop_kind , direction , and memory descriptors. More... | |
A primitive to compute the common recurrent layer.
dnnl_status_t DNNL_API dnnl_primitive_attr_set_rnn_data_qparams | ( | dnnl_primitive_attr_t | attr, |
const float | scale, | ||
const float | shift | ||
) |
Sets quantization scale
and shift
for RNN data tensors.
For performance reasons, low precision configuration of RNN primitive expects input activations to have unsigned int8 data type. Scale and shift used to quantize floating point data to unsigned integer must be passed to RNN primitive using attributes. Example usage:
dnnl_status_t DNNL_API dnnl_primitive_attr_set_rnn_weights_qparams | ( | dnnl_primitive_attr_t | attr, |
dnnl_dim_t | count, | ||
int | mask, | ||
const float * | weights_scales | ||
) |
Sets quantization scales weights_scales
for RNN weights tensors.
Low precision configuration of RNN primitive expects input weights to have signed int8 data type. Scales used to quantize floating point data to signed integer must be passed to RNN primitive using attributes. The mask
argument defines correspondence between output tensor dimensions and the weights_scales
array. Set i-th bit of mask
to 1 to use dedicated scaling factor for each slice of the output tensor over i-th dimension. Set mask
to 0 to use common scaling factor for the whole output tensor. Example usage:
count
corresponds to mask
until an actual primitive descriptor is created, so it is user's responsibility to set proper values. The following formula must be held:\[count = \prod\limits_{d \in mask} output.dims[d]\]
dnnl_status_t DNNL_API dnnl_vanilla_rnn_forward_desc_init | ( | dnnl_rnn_desc_t * | rnn_desc, |
dnnl_prop_kind_t | prop_kind, | ||
const dnnl_alg_kind_t | activation, | ||
const dnnl_rnn_direction_t | direction, | ||
const dnnl_memory_desc_t * | src_layer_desc, | ||
const dnnl_memory_desc_t * | src_iter_desc, | ||
const dnnl_memory_desc_t * | weights_layer_desc, | ||
const dnnl_memory_desc_t * | weights_iter_desc, | ||
const dnnl_memory_desc_t * | bias_desc, | ||
const dnnl_memory_desc_t * | dst_layer_desc, | ||
const dnnl_memory_desc_t * | dst_iter_desc, | ||
unsigned | flags, | ||
float | alpha, | ||
float | beta | ||
) |
Initializes an RNN descriptor rnn_desc
for forward propagation using prop_kind
, activation
, direction
, and memory descriptors.
prop_kind
equals dnnl_forward_training, you must query a workspace memory descriptor before creating the primitive.src_iter_desc
, bias_desc
, and dst_iter_desc
are allowed to either be NULL
or point to a zero memory descriptor, which would indicate that the RNN primitive should not use them and will default to zero values.
format_kind
.Parameters:
Inputs:
Outputs:
prop_kind
equals dnnl_forward_training dnnl_status_t DNNL_API dnnl_vanilla_rnn_backward_desc_init | ( | dnnl_rnn_desc_t * | rnn_desc, |
dnnl_prop_kind_t | prop_kind, | ||
const dnnl_alg_kind_t | activation, | ||
const dnnl_rnn_direction_t | direction, | ||
const dnnl_memory_desc_t * | src_layer_desc, | ||
const dnnl_memory_desc_t * | src_iter_desc, | ||
const dnnl_memory_desc_t * | weights_layer_desc, | ||
const dnnl_memory_desc_t * | weights_iter_desc, | ||
const dnnl_memory_desc_t * | bias_desc, | ||
const dnnl_memory_desc_t * | dst_layer_desc, | ||
const dnnl_memory_desc_t * | dst_iter_desc, | ||
const dnnl_memory_desc_t * | diff_src_layer_desc, | ||
const dnnl_memory_desc_t * | diff_src_iter_desc, | ||
const dnnl_memory_desc_t * | diff_weights_layer_desc, | ||
const dnnl_memory_desc_t * | diff_weights_iter_desc, | ||
const dnnl_memory_desc_t * | diff_bias_desc, | ||
const dnnl_memory_desc_t * | diff_dst_layer_desc, | ||
const dnnl_memory_desc_t * | diff_dst_iter_desc, | ||
unsigned | flags, | ||
float | alpha, | ||
float | beta | ||
) |
Initializes an RNN descriptor rnn_desc
for backward propagation using prop_kind
, activation
, direction
, and memory descriptors.
format_kind
.src_iter_desc
(simultaneously with diff_src_iter_desc
), bias_desc
(simultaneously with diff_bias_desc
), and dst_iter_desc
(simultaneously with diff_src_iter_desc
) are allowed to either be NULL
or point to a zero memory descriptor, which would indicate that the RNN primitive should not use them and will default to zero values.
Parameters:
Inputs:
Outputs:
dnnl_status_t DNNL_API dnnl_lstm_forward_desc_init | ( | dnnl_rnn_desc_t * | rnn_desc, |
dnnl_prop_kind_t | prop_kind, | ||
dnnl_rnn_direction_t | direction, | ||
const dnnl_memory_desc_t * | src_layer_desc, | ||
const dnnl_memory_desc_t * | src_iter_desc, | ||
const dnnl_memory_desc_t * | src_iter_c_desc, | ||
const dnnl_memory_desc_t * | weights_layer_desc, | ||
const dnnl_memory_desc_t * | weights_iter_desc, | ||
const dnnl_memory_desc_t * | bias_desc, | ||
const dnnl_memory_desc_t * | dst_layer_desc, | ||
const dnnl_memory_desc_t * | dst_iter_desc, | ||
const dnnl_memory_desc_t * | dst_iter_c_desc, | ||
unsigned | flags | ||
) |
Initializes an LSTM descriptor rnn_desc
for forward propagation using prop_kind
, direction
, and memory descriptors.
prop_kind
equals dnnl_forward_training, you must query a workspace memory descriptor before creating the primitive.src_iter_desc
, bias_desc
, and dst_iter_desc
are allowed to either be NULL
or point to a zero memory descriptor, which would indicate that the RNN primitive should not use them and will default to zero values.
src_iter_desc
are allowed to be initialized with dnnl_format_kind_any value of format_kind
.Parameters:
Inputs:
Outputs:
prop_kind
equals dnnl_forward_training dnnl_status_t DNNL_API dnnl_lstm_backward_desc_init | ( | dnnl_rnn_desc_t * | rnn_desc, |
dnnl_prop_kind_t | prop_kind, | ||
dnnl_rnn_direction_t | direction, | ||
const dnnl_memory_desc_t * | src_layer_desc, | ||
const dnnl_memory_desc_t * | src_iter_desc, | ||
const dnnl_memory_desc_t * | src_iter_c_desc, | ||
const dnnl_memory_desc_t * | weights_layer_desc, | ||
const dnnl_memory_desc_t * | weights_iter_desc, | ||
const dnnl_memory_desc_t * | bias_desc, | ||
const dnnl_memory_desc_t * | dst_layer_desc, | ||
const dnnl_memory_desc_t * | dst_iter_desc, | ||
const dnnl_memory_desc_t * | dst_iter_c_desc, | ||
const dnnl_memory_desc_t * | diff_src_layer_desc, | ||
const dnnl_memory_desc_t * | diff_src_iter_desc, | ||
const dnnl_memory_desc_t * | diff_src_iter_c_desc, | ||
const dnnl_memory_desc_t * | diff_weights_layer_desc, | ||
const dnnl_memory_desc_t * | diff_weights_iter_desc, | ||
const dnnl_memory_desc_t * | diff_bias_desc, | ||
const dnnl_memory_desc_t * | diff_dst_layer_desc, | ||
const dnnl_memory_desc_t * | diff_dst_iter_desc, | ||
const dnnl_memory_desc_t * | diff_dst_iter_c_desc, | ||
unsigned | flags | ||
) |
Initializes an LSTM descriptor rnn_desc
for backward propagation using prop_kind
, direction
, and memory descriptors.
format_kind
.src_iter_desc
(simultaneously with diff_src_iter_desc
), bias_desc
(simultaneously with diff_bias_desc
), and dst_iter_desc
(simultaneously with diff_src_iter_desc
) are allowed to either be NULL
or point to a zero memory descriptor, which would indicate that the RNN primitive should not use them and will default to zero values.
Parameters:
Inputs:
Outputs:
dnnl_status_t DNNL_API dnnl_gru_forward_desc_init | ( | dnnl_rnn_desc_t * | rnn_desc, |
dnnl_prop_kind_t | prop_kind, | ||
dnnl_rnn_direction_t | direction, | ||
const dnnl_memory_desc_t * | src_layer_desc, | ||
const dnnl_memory_desc_t * | src_iter_desc, | ||
const dnnl_memory_desc_t * | weights_layer_desc, | ||
const dnnl_memory_desc_t * | weights_iter_desc, | ||
const dnnl_memory_desc_t * | bias_desc, | ||
const dnnl_memory_desc_t * | dst_layer_desc, | ||
const dnnl_memory_desc_t * | dst_iter_desc, | ||
unsigned | flags | ||
) |
Initializes a GRU descriptor rnn_desc
for forward propagation using prop_kind
, direction
, and memory descriptors.
prop_kind
equals dnnl_forward_training, you must query a workspace memory descriptor before creating the primitive.src_iter_desc
, bias_desc
, and dst_iter_desc
are allowed to either be NULL
or point to a zero memory descriptor, which would indicate that the RNN primitive should not use them and will default to zero values.
src_iter_desc
are allowed to be initialized with dnnl_format_kind_any value of format_kind
.Parameters:
Inputs:
Outputs:
prop_kind
equals dnnl_forward_training dnnl_status_t DNNL_API dnnl_gru_backward_desc_init | ( | dnnl_rnn_desc_t * | rnn_desc, |
dnnl_prop_kind_t | prop_kind, | ||
dnnl_rnn_direction_t | direction, | ||
const dnnl_memory_desc_t * | src_layer_desc, | ||
const dnnl_memory_desc_t * | src_iter_desc, | ||
const dnnl_memory_desc_t * | weights_layer_desc, | ||
const dnnl_memory_desc_t * | weights_iter_desc, | ||
const dnnl_memory_desc_t * | bias_desc, | ||
const dnnl_memory_desc_t * | dst_layer_desc, | ||
const dnnl_memory_desc_t * | dst_iter_desc, | ||
const dnnl_memory_desc_t * | diff_src_layer_desc, | ||
const dnnl_memory_desc_t * | diff_src_iter_desc, | ||
const dnnl_memory_desc_t * | diff_weights_layer_desc, | ||
const dnnl_memory_desc_t * | diff_weights_iter_desc, | ||
const dnnl_memory_desc_t * | diff_bias_desc, | ||
const dnnl_memory_desc_t * | diff_dst_layer_desc, | ||
const dnnl_memory_desc_t * | diff_dst_iter_desc, | ||
unsigned | flags | ||
) |
Initializes a GRU descriptor rnn_desc
for backward propagation using prop_kind
, direction
, and memory descriptors.
format_kind
.src_iter_desc
(simultaneously with diff_src_iter_desc
), bias_desc
(simultaneously with diff_bias_desc
), and dst_iter_desc
(simultaneously with diff_src_iter_desc
) are allowed to either be NULL
or point to a zero memory descriptor, which would indicate that the RNN primitive should not use them and will default to zero values.
Parameters:
Inputs:
Outputs:
dnnl_status_t DNNL_API dnnl_lbr_gru_forward_desc_init | ( | dnnl_rnn_desc_t * | rnn_desc, |
dnnl_prop_kind_t | prop_kind, | ||
dnnl_rnn_direction_t | direction, | ||
const dnnl_memory_desc_t * | src_layer_desc, | ||
const dnnl_memory_desc_t * | src_iter_desc, | ||
const dnnl_memory_desc_t * | weights_layer_desc, | ||
const dnnl_memory_desc_t * | weights_iter_desc, | ||
const dnnl_memory_desc_t * | bias_desc, | ||
const dnnl_memory_desc_t * | dst_layer_desc, | ||
const dnnl_memory_desc_t * | dst_iter_desc, | ||
unsigned | flags | ||
) |
Initializes an LBR GRU descriptor rnn_desc
for forward propagation using prop_kind
, direction
, and memory descriptors.
prop_kind
equals dnnl_forward_training, you must query a workspace memory descriptor before creating the primitive.src_iter_desc
, bias_desc
, and dst_iter_desc
are allowed to either be NULL
or point to a zero memory descriptor, which would indicate that the RNN primitive should not use them and will default to zero values.
src_iter_desc
are allowed to be initialized with dnnl_format_kind_any value of format_kind
.Parameters:
Inputs:
Outputs:
prop_kind
equals dnnl_forward_training dnnl_status_t DNNL_API dnnl_lbr_gru_backward_desc_init | ( | dnnl_rnn_desc_t * | rnn_desc, |
dnnl_prop_kind_t | prop_kind, | ||
dnnl_rnn_direction_t | direction, | ||
const dnnl_memory_desc_t * | src_layer_desc, | ||
const dnnl_memory_desc_t * | src_iter_desc, | ||
const dnnl_memory_desc_t * | weights_layer_desc, | ||
const dnnl_memory_desc_t * | weights_iter_desc, | ||
const dnnl_memory_desc_t * | bias_desc, | ||
const dnnl_memory_desc_t * | dst_layer_desc, | ||
const dnnl_memory_desc_t * | dst_iter_desc, | ||
const dnnl_memory_desc_t * | diff_src_layer_desc, | ||
const dnnl_memory_desc_t * | diff_src_iter_desc, | ||
const dnnl_memory_desc_t * | diff_weights_layer_desc, | ||
const dnnl_memory_desc_t * | diff_weights_iter_desc, | ||
const dnnl_memory_desc_t * | diff_bias_desc, | ||
const dnnl_memory_desc_t * | diff_dst_layer_desc, | ||
const dnnl_memory_desc_t * | diff_dst_iter_desc, | ||
unsigned | flags | ||
) |
Initializes an LBR GRU descriptor rnn_desc
for backward propagation using prop_kind
, direction
, and memory descriptors.
format_kind
.src_iter_desc
(simultaneously with diff_src_iter_desc
), bias_desc
(simultaneously with diff_bias_desc
), and dst_iter_desc
(simultaneously with diff_src_iter_desc
) are allowed to either be NULL
or point to a zero memory descriptor, which would indicate that the RNN primitive should not use them and will default to zero values.
Parameters:
Inputs:
Outputs: