Deep Neural Network Library (DNNL)  1.1.3
Performance library for Deep Learning
Functions

A primitive to compute the common recurrent layer. More...

Functions

dnnl_status_t DNNL_API dnnl_primitive_attr_set_rnn_data_qparams (dnnl_primitive_attr_t attr, const float scale, const float shift)
 Sets quantization scale and shift for RNN data tensors. More...
 
dnnl_status_t DNNL_API dnnl_primitive_attr_set_rnn_weights_qparams (dnnl_primitive_attr_t attr, dnnl_dim_t count, int mask, const float *weights_scales)
 Sets quantization scales weights_scales for RNN weights tensors. More...
 
dnnl_status_t DNNL_API dnnl_vanilla_rnn_forward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, const dnnl_alg_kind_t activation, const dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, unsigned flags, float alpha, float beta)
 Initializes an RNN descriptor rnn_desc for forward propagation using prop_kind, activation, direction, and memory descriptors. More...
 
dnnl_status_t DNNL_API dnnl_vanilla_rnn_backward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, const dnnl_alg_kind_t activation, const dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *diff_src_layer_desc, const dnnl_memory_desc_t *diff_src_iter_desc, const dnnl_memory_desc_t *diff_weights_layer_desc, const dnnl_memory_desc_t *diff_weights_iter_desc, const dnnl_memory_desc_t *diff_bias_desc, const dnnl_memory_desc_t *diff_dst_layer_desc, const dnnl_memory_desc_t *diff_dst_iter_desc, unsigned flags, float alpha, float beta)
 Initializes an RNN descriptor rnn_desc for backward propagation using prop_kind, activation, direction, and memory descriptors. More...
 
dnnl_status_t DNNL_API dnnl_lstm_forward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *src_iter_c_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *dst_iter_c_desc, unsigned flags)
 Initializes an LSTM descriptor rnn_desc for forward propagation using prop_kind, direction, and memory descriptors. More...
 
dnnl_status_t DNNL_API dnnl_lstm_backward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *src_iter_c_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *dst_iter_c_desc, const dnnl_memory_desc_t *diff_src_layer_desc, const dnnl_memory_desc_t *diff_src_iter_desc, const dnnl_memory_desc_t *diff_src_iter_c_desc, const dnnl_memory_desc_t *diff_weights_layer_desc, const dnnl_memory_desc_t *diff_weights_iter_desc, const dnnl_memory_desc_t *diff_bias_desc, const dnnl_memory_desc_t *diff_dst_layer_desc, const dnnl_memory_desc_t *diff_dst_iter_desc, const dnnl_memory_desc_t *diff_dst_iter_c_desc, unsigned flags)
 Initializes an LSTM descriptor rnn_desc for backward propagation using prop_kind, direction, and memory descriptors. More...
 
dnnl_status_t DNNL_API dnnl_gru_forward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, unsigned flags)
 Initializes a GRU descriptor rnn_desc for forward propagation using prop_kind, direction, and memory descriptors. More...
 
dnnl_status_t DNNL_API dnnl_gru_backward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *diff_src_layer_desc, const dnnl_memory_desc_t *diff_src_iter_desc, const dnnl_memory_desc_t *diff_weights_layer_desc, const dnnl_memory_desc_t *diff_weights_iter_desc, const dnnl_memory_desc_t *diff_bias_desc, const dnnl_memory_desc_t *diff_dst_layer_desc, const dnnl_memory_desc_t *diff_dst_iter_desc, unsigned flags)
 Initializes a GRU descriptor rnn_desc for backward propagation using prop_kind, direction, and memory descriptors. More...
 
dnnl_status_t DNNL_API dnnl_lbr_gru_forward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, unsigned flags)
 Initializes an LBR GRU descriptor rnn_desc for forward propagation using prop_kind, direction, and memory descriptors. More...
 
dnnl_status_t DNNL_API dnnl_lbr_gru_backward_desc_init (dnnl_rnn_desc_t *rnn_desc, dnnl_prop_kind_t prop_kind, dnnl_rnn_direction_t direction, const dnnl_memory_desc_t *src_layer_desc, const dnnl_memory_desc_t *src_iter_desc, const dnnl_memory_desc_t *weights_layer_desc, const dnnl_memory_desc_t *weights_iter_desc, const dnnl_memory_desc_t *bias_desc, const dnnl_memory_desc_t *dst_layer_desc, const dnnl_memory_desc_t *dst_iter_desc, const dnnl_memory_desc_t *diff_src_layer_desc, const dnnl_memory_desc_t *diff_src_iter_desc, const dnnl_memory_desc_t *diff_weights_layer_desc, const dnnl_memory_desc_t *diff_weights_iter_desc, const dnnl_memory_desc_t *diff_bias_desc, const dnnl_memory_desc_t *diff_dst_layer_desc, const dnnl_memory_desc_t *diff_dst_iter_desc, unsigned flags)
 Initializes an LBR GRU descriptor rnn_desc for backward propagation using prop_kind, direction, and memory descriptors. More...
 

Detailed Description

A primitive to compute the common recurrent layer.

See also
RNN in developer guide
RNN in C++ API

Function Documentation

◆ dnnl_primitive_attr_set_rnn_data_qparams()

dnnl_status_t DNNL_API dnnl_primitive_attr_set_rnn_data_qparams ( dnnl_primitive_attr_t  attr,
const float  scale,
const float  shift 
)

Sets quantization scale and shift for RNN data tensors.

For performance reasons, low precision configuration of RNN primitive expects input activations to have unsigned int8 data type. Scale and shift used to quantize floating point data to unsigned integer must be passed to RNN primitive using attributes. Example usage:

// rnn parameters
int l = 2, t = 2, mb = 32, sic = 32, slc = 32, dic = 32, dlc = 32;
// activations quantization parameters
float scale = ..., shift = ..;
// create default attributes
// set scale and shift for int8 quantization of activation
// create & configure rnn op_desc
dnnl_primitive_desc_create(&rnn_pd, &rnn_d, attr, engine, NULL);
Note
Quantization scale and shift are common for src_layer, src_iter, dst_iter and dst_layer.

◆ dnnl_primitive_attr_set_rnn_weights_qparams()

dnnl_status_t DNNL_API dnnl_primitive_attr_set_rnn_weights_qparams ( dnnl_primitive_attr_t  attr,
dnnl_dim_t  count,
int  mask,
const float *  weights_scales 
)

Sets quantization scales weights_scales for RNN weights tensors.

Low precision configuration of RNN primitive expects input weights to have signed int8 data type. Scales used to quantize floating point data to signed integer must be passed to RNN primitive using attributes. The mask argument defines correspondence between output tensor dimensions and the weights_scales array. Set i-th bit of mask to 1 to use dedicated scaling factor for each slice of the output tensor over i-th dimension. Set mask to 0 to use common scaling factor for the whole output tensor. Example usage:

// rnn parameters
int l = 2, t = 2, mb = 32, sic = 32, slc = 32, dic = 32, dlc = 32;
// unique output scales per output channel
float weights_scales[dic * n_gates] = { ... };
// mask that specifies last two dimensions of ldigo format
int mask = 0x3;
// create default attributes
// set output channel-wise weights scales
weights_scales);
// create & configure rnn op_desc
dnnl_primitive_desc_create(&rnn_pd, &rnn_d, attr, engine, NULL);
Note
The dimension order is always native and does not depend on the actual layout used. For example, 5 dimensional weights always have (l, d, i, g, o) logical dimension ordering.
Quantization sales are common for weights_layer and weights_iteration
There is no way to check that count corresponds to mask until an actual primitive descriptor is created, so it is user's responsibility to set proper values. The following formula must be held:

\[count = \prod\limits_{d \in mask} output.dims[d]\]

◆ dnnl_vanilla_rnn_forward_desc_init()

dnnl_status_t DNNL_API dnnl_vanilla_rnn_forward_desc_init ( dnnl_rnn_desc_t rnn_desc,
dnnl_prop_kind_t  prop_kind,
const dnnl_alg_kind_t  activation,
const dnnl_rnn_direction_t  direction,
const dnnl_memory_desc_t src_layer_desc,
const dnnl_memory_desc_t src_iter_desc,
const dnnl_memory_desc_t weights_layer_desc,
const dnnl_memory_desc_t weights_iter_desc,
const dnnl_memory_desc_t bias_desc,
const dnnl_memory_desc_t dst_layer_desc,
const dnnl_memory_desc_t dst_iter_desc,
unsigned  flags,
float  alpha,
float  beta 
)

Initializes an RNN descriptor rnn_desc for forward propagation using prop_kind, activation, direction, and memory descriptors.

Note
If prop_kind equals dnnl_forward_training, you must query a workspace memory descriptor before creating the primitive.

src_iter_desc, bias_desc, and dst_iter_desc are allowed to either be NULL or point to a zero memory descriptor, which would indicate that the RNN primitive should not use them and will default to zero values.

Note
All memory descriptorsare allowed to be initialized with dnnl_format_kind_any value of format_kind.

Parameters:

Inputs:

Outputs:

◆ dnnl_vanilla_rnn_backward_desc_init()

dnnl_status_t DNNL_API dnnl_vanilla_rnn_backward_desc_init ( dnnl_rnn_desc_t rnn_desc,
dnnl_prop_kind_t  prop_kind,
const dnnl_alg_kind_t  activation,
const dnnl_rnn_direction_t  direction,
const dnnl_memory_desc_t src_layer_desc,
const dnnl_memory_desc_t src_iter_desc,
const dnnl_memory_desc_t weights_layer_desc,
const dnnl_memory_desc_t weights_iter_desc,
const dnnl_memory_desc_t bias_desc,
const dnnl_memory_desc_t dst_layer_desc,
const dnnl_memory_desc_t dst_iter_desc,
const dnnl_memory_desc_t diff_src_layer_desc,
const dnnl_memory_desc_t diff_src_iter_desc,
const dnnl_memory_desc_t diff_weights_layer_desc,
const dnnl_memory_desc_t diff_weights_iter_desc,
const dnnl_memory_desc_t diff_bias_desc,
const dnnl_memory_desc_t diff_dst_layer_desc,
const dnnl_memory_desc_t diff_dst_iter_desc,
unsigned  flags,
float  alpha,
float  beta 
)

Initializes an RNN descriptor rnn_desc for backward propagation using prop_kind, activation, direction, and memory descriptors.

Note
All memory descriptors are allowed to be initialized with dnnl_format_kind_any value of format_kind.

src_iter_desc (simultaneously with diff_src_iter_desc), bias_desc (simultaneously with diff_bias_desc), and dst_iter_desc (simultaneously with diff_src_iter_desc) are allowed to either be NULL or point to a zero memory descriptor, which would indicate that the RNN primitive should not use them and will default to zero values.

Parameters:

Inputs:

Outputs:

◆ dnnl_lstm_forward_desc_init()

dnnl_status_t DNNL_API dnnl_lstm_forward_desc_init ( dnnl_rnn_desc_t rnn_desc,
dnnl_prop_kind_t  prop_kind,
dnnl_rnn_direction_t  direction,
const dnnl_memory_desc_t src_layer_desc,
const dnnl_memory_desc_t src_iter_desc,
const dnnl_memory_desc_t src_iter_c_desc,
const dnnl_memory_desc_t weights_layer_desc,
const dnnl_memory_desc_t weights_iter_desc,
const dnnl_memory_desc_t bias_desc,
const dnnl_memory_desc_t dst_layer_desc,
const dnnl_memory_desc_t dst_iter_desc,
const dnnl_memory_desc_t dst_iter_c_desc,
unsigned  flags 
)

Initializes an LSTM descriptor rnn_desc for forward propagation using prop_kind, direction, and memory descriptors.

Note
If prop_kind equals dnnl_forward_training, you must query a workspace memory descriptor before creating the primitive.

src_iter_desc, bias_desc, and dst_iter_desc are allowed to either be NULL or point to a zero memory descriptor, which would indicate that the RNN primitive should not use them and will default to zero values.

Note
All memory descriptors except src_iter_desc are allowed to be initialized with dnnl_format_kind_any value of format_kind.

Parameters:

  • flags (unused for now)

Inputs:

Outputs:

◆ dnnl_lstm_backward_desc_init()

dnnl_status_t DNNL_API dnnl_lstm_backward_desc_init ( dnnl_rnn_desc_t rnn_desc,
dnnl_prop_kind_t  prop_kind,
dnnl_rnn_direction_t  direction,
const dnnl_memory_desc_t src_layer_desc,
const dnnl_memory_desc_t src_iter_desc,
const dnnl_memory_desc_t src_iter_c_desc,
const dnnl_memory_desc_t weights_layer_desc,
const dnnl_memory_desc_t weights_iter_desc,
const dnnl_memory_desc_t bias_desc,
const dnnl_memory_desc_t dst_layer_desc,
const dnnl_memory_desc_t dst_iter_desc,
const dnnl_memory_desc_t dst_iter_c_desc,
const dnnl_memory_desc_t diff_src_layer_desc,
const dnnl_memory_desc_t diff_src_iter_desc,
const dnnl_memory_desc_t diff_src_iter_c_desc,
const dnnl_memory_desc_t diff_weights_layer_desc,
const dnnl_memory_desc_t diff_weights_iter_desc,
const dnnl_memory_desc_t diff_bias_desc,
const dnnl_memory_desc_t diff_dst_layer_desc,
const dnnl_memory_desc_t diff_dst_iter_desc,
const dnnl_memory_desc_t diff_dst_iter_c_desc,
unsigned  flags 
)

Initializes an LSTM descriptor rnn_desc for backward propagation using prop_kind, direction, and memory descriptors.

Note
All memory descriptors are allowed to be initialized with dnnl_format_kind_any value of format_kind.

src_iter_desc (simultaneously with diff_src_iter_desc), bias_desc (simultaneously with diff_bias_desc), and dst_iter_desc (simultaneously with diff_src_iter_desc) are allowed to either be NULL or point to a zero memory descriptor, which would indicate that the RNN primitive should not use them and will default to zero values.

Parameters:

  • flags (unused for now)

Inputs:

Outputs:

◆ dnnl_gru_forward_desc_init()

dnnl_status_t DNNL_API dnnl_gru_forward_desc_init ( dnnl_rnn_desc_t rnn_desc,
dnnl_prop_kind_t  prop_kind,
dnnl_rnn_direction_t  direction,
const dnnl_memory_desc_t src_layer_desc,
const dnnl_memory_desc_t src_iter_desc,
const dnnl_memory_desc_t weights_layer_desc,
const dnnl_memory_desc_t weights_iter_desc,
const dnnl_memory_desc_t bias_desc,
const dnnl_memory_desc_t dst_layer_desc,
const dnnl_memory_desc_t dst_iter_desc,
unsigned  flags 
)

Initializes a GRU descriptor rnn_desc for forward propagation using prop_kind, direction, and memory descriptors.

Note
If prop_kind equals dnnl_forward_training, you must query a workspace memory descriptor before creating the primitive.

src_iter_desc, bias_desc, and dst_iter_desc are allowed to either be NULL or point to a zero memory descriptor, which would indicate that the RNN primitive should not use them and will default to zero values.

Note
All memory descriptors except src_iter_desc are allowed to be initialized with dnnl_format_kind_any value of format_kind.

Parameters:

  • flags (unused for now)

Inputs:

Outputs:

◆ dnnl_gru_backward_desc_init()

dnnl_status_t DNNL_API dnnl_gru_backward_desc_init ( dnnl_rnn_desc_t rnn_desc,
dnnl_prop_kind_t  prop_kind,
dnnl_rnn_direction_t  direction,
const dnnl_memory_desc_t src_layer_desc,
const dnnl_memory_desc_t src_iter_desc,
const dnnl_memory_desc_t weights_layer_desc,
const dnnl_memory_desc_t weights_iter_desc,
const dnnl_memory_desc_t bias_desc,
const dnnl_memory_desc_t dst_layer_desc,
const dnnl_memory_desc_t dst_iter_desc,
const dnnl_memory_desc_t diff_src_layer_desc,
const dnnl_memory_desc_t diff_src_iter_desc,
const dnnl_memory_desc_t diff_weights_layer_desc,
const dnnl_memory_desc_t diff_weights_iter_desc,
const dnnl_memory_desc_t diff_bias_desc,
const dnnl_memory_desc_t diff_dst_layer_desc,
const dnnl_memory_desc_t diff_dst_iter_desc,
unsigned  flags 
)

Initializes a GRU descriptor rnn_desc for backward propagation using prop_kind, direction, and memory descriptors.

Note
All memory descriptors are allowed to be initialized with dnnl_format_kind_any value of format_kind.

src_iter_desc (simultaneously with diff_src_iter_desc), bias_desc (simultaneously with diff_bias_desc), and dst_iter_desc (simultaneously with diff_src_iter_desc) are allowed to either be NULL or point to a zero memory descriptor, which would indicate that the RNN primitive should not use them and will default to zero values.

Parameters:

  • flags (unused for now)

Inputs:

Outputs:

◆ dnnl_lbr_gru_forward_desc_init()

dnnl_status_t DNNL_API dnnl_lbr_gru_forward_desc_init ( dnnl_rnn_desc_t rnn_desc,
dnnl_prop_kind_t  prop_kind,
dnnl_rnn_direction_t  direction,
const dnnl_memory_desc_t src_layer_desc,
const dnnl_memory_desc_t src_iter_desc,
const dnnl_memory_desc_t weights_layer_desc,
const dnnl_memory_desc_t weights_iter_desc,
const dnnl_memory_desc_t bias_desc,
const dnnl_memory_desc_t dst_layer_desc,
const dnnl_memory_desc_t dst_iter_desc,
unsigned  flags 
)

Initializes an LBR GRU descriptor rnn_desc for forward propagation using prop_kind, direction, and memory descriptors.

Note
If prop_kind equals dnnl_forward_training, you must query a workspace memory descriptor before creating the primitive.

src_iter_desc, bias_desc, and dst_iter_desc are allowed to either be NULL or point to a zero memory descriptor, which would indicate that the RNN primitive should not use them and will default to zero values.

Note
All memory descriptors except src_iter_desc are allowed to be initialized with dnnl_format_kind_any value of format_kind.

Parameters:

  • flags (unused for now)

Inputs:

Outputs:

◆ dnnl_lbr_gru_backward_desc_init()

dnnl_status_t DNNL_API dnnl_lbr_gru_backward_desc_init ( dnnl_rnn_desc_t rnn_desc,
dnnl_prop_kind_t  prop_kind,
dnnl_rnn_direction_t  direction,
const dnnl_memory_desc_t src_layer_desc,
const dnnl_memory_desc_t src_iter_desc,
const dnnl_memory_desc_t weights_layer_desc,
const dnnl_memory_desc_t weights_iter_desc,
const dnnl_memory_desc_t bias_desc,
const dnnl_memory_desc_t dst_layer_desc,
const dnnl_memory_desc_t dst_iter_desc,
const dnnl_memory_desc_t diff_src_layer_desc,
const dnnl_memory_desc_t diff_src_iter_desc,
const dnnl_memory_desc_t diff_weights_layer_desc,
const dnnl_memory_desc_t diff_weights_iter_desc,
const dnnl_memory_desc_t diff_bias_desc,
const dnnl_memory_desc_t diff_dst_layer_desc,
const dnnl_memory_desc_t diff_dst_iter_desc,
unsigned  flags 
)

Initializes an LBR GRU descriptor rnn_desc for backward propagation using prop_kind, direction, and memory descriptors.

Note
All memory descriptors are allowed to be initialized with dnnl_format_kind_any value of format_kind.

src_iter_desc (simultaneously with diff_src_iter_desc), bias_desc (simultaneously with diff_bias_desc), and dst_iter_desc (simultaneously with diff_src_iter_desc) are allowed to either be NULL or point to a zero memory descriptor, which would indicate that the RNN primitive should not use them and will default to zero values.

Parameters:

  • flags (unused for now)

Inputs:

Outputs: