24 #ifndef ARM_COMPUTE_MISC_INFO_HELPERS_H 25 #define ARM_COMPUTE_MISC_INFO_HELPERS_H 35 namespace info_helpers
57 && activation_info.
a() == 6.f && activation_info.
b() == 0.f;
59 && activation_info.
a() == 6.f;
60 return activation_info.
enabled() && (is_lu_bounded_relu || is_bounded_relu);
const T * projection_weights() const
const T * input_to_input_weights() const
bool use_layer_norm() const
bool enabled() const
Check if initialised.
bool has_peephole_opt() const
LSTMParams & set_cifg_params(const T *input_to_input_weights, const T *recurrent_to_input_weights, T *cell_to_input_weights, const T *input_gate_bias)
Set CIFG tensor parameters.
T * forget_layer_norm_weights() const
void build_lstm_params_tensor_info(const LSTMParams< T > &lstm_params, LSTMParams< ITensorInfo > *lstm_params_info)
Build LSTMParams<ITensorInfo> object by extracting the metadata from each tensor. ...
float a() const
Get the alpha value.
float output_intermediate_scale() const
bool has_cifg_opt() const
float cell_intermediate_scale() const
float forget_intermediate_scale() const
LSTMParams & set_hidden_state_params(int32_t hidden_state_zero, float hidden_state_scale)
Set hidden state zero and scale parameters.
Store the tensor's metadata.
T * cell_to_input_weights() const
Activation Layer Information class.
Copyright (c) 2017-2021 Arm Limited.
const T * recurrent_to_input_weights() const
int32_t hidden_state_zero() const
const T * projection_bias() const
T * output_layer_norm_weights() const
float input_intermediate_scale() const
float hidden_state_scale() const
LSTMParams & set_matmul_scale_params(float input_intermediate_scale, float forget_intermediate_scale, float cell_intermediate_scale, float output_intermediate_scale)
Set scale of the intermediate results of matmul of each layer parameters.
LSTMParams & set_projection_params(const T *projection_weights, const T *projection_bias)
Set projection tensor parameters.
LSTMParams & set_layer_normalization_params(T *input_layer_norm_weights, T *forget_layer_norm_weights, T *cell_layer_norm_weights, T *output_layer_norm_weights)
Set layer normalization tensor parameters.
T * cell_to_forget_weights() const
bool is_relu6(ActivationLayerInfo activation_info)
Checks if activation information correspond to a relu6 activation function.
Lower and Upper Bounded Rectifier ( )
LSTMParams & set_peephole_params(T *cell_to_forget_weights, T *cell_to_output_weights)
Set peephole tensor parameters.
bool has_projection() const
T * cell_to_output_weights() const
Upper Bounded Rectifier ( )
T * input_layer_norm_weights() const
const T * input_gate_bias() const
bool is_relu(ActivationLayerInfo activation_info)
Checks if activation information correspond to a relu activation function.
#define ARM_COMPUTE_ERROR_ON_NULLPTR(...)
T * cell_layer_norm_weights() const
ActivationFunction activation() const
Get the type of activation function.
float b() const
Get the beta value.