23.11
|
Go to the documentation of this file.
24 #ifndef ARM_COMPUTE_MISC_INFO_HELPERS_H
25 #define ARM_COMPUTE_MISC_INFO_HELPERS_H
35 namespace info_helpers
45 return activation_info.
enabled() && activation_info.
activation() == ActivationLayerInfo::ActivationFunction::RELU;
56 const bool is_lu_bounded_relu =
57 activation_info.
activation() == ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU &&
58 activation_info.
a() == 6.f && activation_info.
b() == 0.f;
59 const bool is_bounded_relu =
60 activation_info.
activation() == ActivationLayerInfo::ActivationFunction::BOUNDED_RELU &&
61 activation_info.
a() == 6.f;
62 return activation_info.
enabled() && (is_lu_bounded_relu || is_bounded_relu);
float forget_intermediate_scale() const
bool is_relu(ActivationLayerInfo activation_info)
Checks if activation information correspond to a relu activation function.
T * cell_to_input_weights() const
T * cell_layer_norm_weights() const
T * output_layer_norm_weights() const
ActivationFunction activation() const
Get the type of activation function.
Activation Layer Information class.
float input_intermediate_scale() const
#define ARM_COMPUTE_ERROR_ON_NULLPTR(...)
T * cell_to_output_weights() const
bool enabled() const
Check if initialised.
const T * projection_weights() const
bool is_relu6(ActivationLayerInfo activation_info)
Checks if activation information correspond to a relu6 activation function.
bool use_layer_norm() const
void build_lstm_params_tensor_info(const LSTMParams< T > &lstm_params, LSTMParams< ITensorInfo > *lstm_params_info)
Build LSTMParams<ITensorInfo> object by extracting the metadata from each tensor.
LSTMParams & set_projection_params(const T *projection_weights, const T *projection_bias)
Set projection tensor parameters.
bool has_peephole_opt() const
LSTMParams & set_cifg_params(const T *input_to_input_weights, const T *recurrent_to_input_weights, T *cell_to_input_weights, const T *input_gate_bias)
Set CIFG tensor parameters.
bool has_cifg_opt() const
const T * recurrent_to_input_weights() const
int32_t hidden_state_zero() const
float a() const
Get the alpha value.
LSTMParams & set_layer_normalization_params(T *input_layer_norm_weights, T *forget_layer_norm_weights, T *cell_layer_norm_weights, T *output_layer_norm_weights)
Set layer normalization tensor parameters.
T * input_layer_norm_weights() const
float output_intermediate_scale() const
Copyright (c) 2017-2023 Arm Limited.
T * forget_layer_norm_weights() const
bool has_projection() const
LSTMParams & set_matmul_scale_params(float input_intermediate_scale, float forget_intermediate_scale, float cell_intermediate_scale, float output_intermediate_scale)
Set scale of the intermediate results of matmul of each layer parameters.
float hidden_state_scale() const
T * cell_to_forget_weights() const
Store the tensor's metadata.
float cell_intermediate_scale() const
LSTMParams & set_peephole_params(T *cell_to_forget_weights, T *cell_to_output_weights)
Set peephole tensor parameters.
float b() const
Get the beta value.
const T * projection_bias() const
const T * input_to_input_weights() const
LSTMParams & set_hidden_state_params(int32_t hidden_state_zero, float hidden_state_scale)
Set hidden state zero and scale parameters.
const T * input_gate_bias() const