Compute Library
 21.02
arm_compute::utils::info_helpers Namespace Reference

Functions

bool is_relu (ActivationLayerInfo activation_info)
 Checks if activation information correspond to a relu activation function. More...
 
bool is_relu6 (ActivationLayerInfo activation_info)
 Checks if activation information correspond to a relu6 activation function. More...
 
template<typename T >
void build_lstm_params_tensor_info (const LSTMParams< T > &lstm_params, LSTMParams< ITensorInfo > *lstm_params_info)
 Build LSTMParams<ITensorInfo> object by extracting the metadata from each tensor. More...
 

Function Documentation

◆ build_lstm_params_tensor_info()

void arm_compute::utils::info_helpers::build_lstm_params_tensor_info ( const LSTMParams< T > &  lstm_params,
LSTMParams< ITensorInfo > *  lstm_params_info 
)
inline

Build LSTMParams<ITensorInfo> object by extracting the metadata from each tensor.

Parameters
[in]lstm_paramsThe LSTMParams<T> object containing the tensors.
[out]lstm_params_infoThe LSTMParams<ITensorInfo> to be constructed.

Definition at line 71 of file InfoHelpers.h.

References ARM_COMPUTE_ERROR_ON_NULLPTR, LSTMParams< T >::cell_intermediate_scale(), LSTMParams< T >::cell_layer_norm_weights(), LSTMParams< T >::cell_to_forget_weights(), LSTMParams< T >::cell_to_input_weights(), LSTMParams< T >::cell_to_output_weights(), LSTMParams< T >::forget_intermediate_scale(), LSTMParams< T >::forget_layer_norm_weights(), LSTMParams< T >::has_cifg_opt(), LSTMParams< T >::has_peephole_opt(), LSTMParams< T >::has_projection(), LSTMParams< T >::hidden_state_scale(), LSTMParams< T >::hidden_state_zero(), LSTMParams< T >::input_gate_bias(), arm_compute::test::validation::input_info, LSTMParams< T >::input_intermediate_scale(), LSTMParams< T >::input_layer_norm_weights(), LSTMParams< T >::input_to_input_weights(), arm_compute::test::validation::output_info, LSTMParams< T >::output_intermediate_scale(), LSTMParams< T >::output_layer_norm_weights(), LSTMParams< T >::projection_bias(), LSTMParams< T >::projection_weights(), LSTMParams< T >::recurrent_to_input_weights(), LSTMParams< T >::set_cifg_params(), LSTMParams< T >::set_hidden_state_params(), LSTMParams< T >::set_layer_normalization_params(), LSTMParams< T >::set_matmul_scale_params(), LSTMParams< T >::set_peephole_params(), LSTMParams< T >::set_projection_params(), and LSTMParams< T >::use_layer_norm().

Referenced by NELSTMLayer::configure(), NEQLSTMLayer::configure(), CLLSTMLayer::configure(), and CLQLSTMLayer::configure().

73 {
74  if(lstm_params.has_peephole_opt())
75  {
76  ARM_COMPUTE_ERROR_ON_NULLPTR(lstm_params.cell_to_forget_weights(), lstm_params.cell_to_output_weights());
77  lstm_params_info->set_peephole_params(lstm_params.cell_to_forget_weights()->info(), lstm_params.cell_to_output_weights()->info());
78  }
79  if(lstm_params.has_projection())
80  {
81  ARM_COMPUTE_ERROR_ON_NULLPTR(lstm_params.projection_weights());
82  lstm_params_info->set_projection_params(lstm_params.projection_weights()->info(),
83  lstm_params.projection_bias() != nullptr ? lstm_params.projection_bias()->info() : nullptr);
84  }
85  if(!lstm_params.has_cifg_opt())
86  {
87  ARM_COMPUTE_ERROR_ON_NULLPTR(lstm_params.input_to_input_weights(), lstm_params.recurrent_to_input_weights(), lstm_params.input_gate_bias());
88 
89  ITensorInfo *cell_to_input_weights_info = (lstm_params.has_peephole_opt()) ? lstm_params.cell_to_input_weights()->info() : nullptr;
90  lstm_params_info->set_cifg_params(lstm_params.input_to_input_weights()->info(), lstm_params.recurrent_to_input_weights()->info(),
91  cell_to_input_weights_info, lstm_params.input_gate_bias()->info());
92  }
93  if(lstm_params.use_layer_norm())
94  {
95  ARM_COMPUTE_ERROR_ON_NULLPTR(lstm_params.forget_layer_norm_weights(),
96  lstm_params.output_layer_norm_weights(),
97  lstm_params.cell_layer_norm_weights());
98  if(!lstm_params.has_cifg_opt())
99  {
100  ARM_COMPUTE_ERROR_ON_NULLPTR(lstm_params.input_layer_norm_weights());
101  }
102 
103  ITensorInfo *forget_info = lstm_params.forget_layer_norm_weights()->info();
104  ITensorInfo *cell_info = lstm_params.cell_layer_norm_weights()->info();
105  ITensorInfo *output_info = lstm_params.output_layer_norm_weights()->info();
106  ITensorInfo *input_info = lstm_params.has_cifg_opt() ? nullptr : lstm_params.input_layer_norm_weights()->info();
107 
108  lstm_params_info->set_layer_normalization_params(input_info, forget_info, cell_info, output_info);
109  }
110 
111  lstm_params_info->set_matmul_scale_params(lstm_params.input_intermediate_scale(),
112  lstm_params.forget_intermediate_scale(),
113  lstm_params.cell_intermediate_scale(),
114  lstm_params.output_intermediate_scale());
115 
116  lstm_params_info->set_hidden_state_params(lstm_params.hidden_state_zero(), lstm_params.hidden_state_scale());
117 }
#define ARM_COMPUTE_ERROR_ON_NULLPTR(...)
Definition: Validate.h:161

◆ is_relu()

bool arm_compute::utils::info_helpers::is_relu ( ActivationLayerInfo  activation_info)
inline

Checks if activation information correspond to a relu activation function.

Parameters
[in]activation_infoActivation metadata
Returns
True if activation metadata correspond to a relu activation else false

Definition at line 43 of file InfoHelpers.h.

References ActivationLayerInfo::activation(), ActivationLayerInfo::enabled(), and ActivationLayerInfo::RELU.

Referenced by NEDepthwiseConvolutionAssemblyDispatch::validate().

44 {
45  return activation_info.enabled() && activation_info.activation() == ActivationLayerInfo::ActivationFunction::RELU;
46 }

◆ is_relu6()

bool arm_compute::utils::info_helpers::is_relu6 ( ActivationLayerInfo  activation_info)
inline

Checks if activation information correspond to a relu6 activation function.

Parameters
[in]activation_infoActivation metadata
Returns
True if activation metadata correspond to a relu6 activation else false

Definition at line 54 of file InfoHelpers.h.

References ActivationLayerInfo::a(), ActivationLayerInfo::activation(), ActivationLayerInfo::b(), ActivationLayerInfo::BOUNDED_RELU, ActivationLayerInfo::enabled(), and ActivationLayerInfo::LU_BOUNDED_RELU.

Referenced by NEDepthwiseConvolutionAssemblyDispatch::validate().

55 {
56  const bool is_lu_bounded_relu = activation_info.activation() == ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU
57  && activation_info.a() == 6.f && activation_info.b() == 0.f;
58  const bool is_bounded_relu = activation_info.activation() == ActivationLayerInfo::ActivationFunction::BOUNDED_RELU
59  && activation_info.a() == 6.f;
60  return activation_info.enabled() && (is_lu_bounded_relu || is_bounded_relu);
61 }