Compute Library
 21.02
InfoHelpers.h
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2019-2020 Arm Limited.
3  *
4  * SPDX-License-Identifier: MIT
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to
8  * deal in the Software without restriction, including without limitation the
9  * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10  * sell copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in all
14  * copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22  * SOFTWARE.
23  */
24 #ifndef ARM_COMPUTE_MISC_INFO_HELPERS_H
25 #define ARM_COMPUTE_MISC_INFO_HELPERS_H
26 
27 #include "arm_compute/core/Error.h"
28 #include "arm_compute/core/Types.h"
30 
31 namespace arm_compute
32 {
33 namespace utils
34 {
35 namespace info_helpers
36 {
37 /** Checks if activation information correspond to a relu activation function
38  *
39  * @param[in] activation_info Activation metadata
40  *
41  * @return True if activation metadata correspond to a relu activation else false
42  */
43 inline bool is_relu(ActivationLayerInfo activation_info)
44 {
45  return activation_info.enabled() && activation_info.activation() == ActivationLayerInfo::ActivationFunction::RELU;
46 }
47 
48 /** Checks if activation information correspond to a relu6 activation function
49  *
50  * @param[in] activation_info Activation metadata
51  *
52  * @return True if activation metadata correspond to a relu6 activation else false
53  */
54 inline bool is_relu6(ActivationLayerInfo activation_info)
55 {
56  const bool is_lu_bounded_relu = activation_info.activation() == ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU
57  && activation_info.a() == 6.f && activation_info.b() == 0.f;
58  const bool is_bounded_relu = activation_info.activation() == ActivationLayerInfo::ActivationFunction::BOUNDED_RELU
59  && activation_info.a() == 6.f;
60  return activation_info.enabled() && (is_lu_bounded_relu || is_bounded_relu);
61 }
62 
63 /** Build LSTMParams<ITensorInfo> object by extracting the metadata from each
64  * tensor.
65  *
66  * @param[in] lstm_params The LSTMParams<T> object containing the tensors.
67  * @param[out] lstm_params_info The LSTMParams<ITensorInfo> to be constructed.
68  *
69  */
70 template <typename T>
71 inline void build_lstm_params_tensor_info(const LSTMParams<T> &lstm_params,
72  LSTMParams<ITensorInfo> *lstm_params_info)
73 {
74  if(lstm_params.has_peephole_opt())
75  {
77  lstm_params_info->set_peephole_params(lstm_params.cell_to_forget_weights()->info(), lstm_params.cell_to_output_weights()->info());
78  }
79  if(lstm_params.has_projection())
80  {
82  lstm_params_info->set_projection_params(lstm_params.projection_weights()->info(),
83  lstm_params.projection_bias() != nullptr ? lstm_params.projection_bias()->info() : nullptr);
84  }
85  if(!lstm_params.has_cifg_opt())
86  {
88 
89  ITensorInfo *cell_to_input_weights_info = (lstm_params.has_peephole_opt()) ? lstm_params.cell_to_input_weights()->info() : nullptr;
90  lstm_params_info->set_cifg_params(lstm_params.input_to_input_weights()->info(), lstm_params.recurrent_to_input_weights()->info(),
91  cell_to_input_weights_info, lstm_params.input_gate_bias()->info());
92  }
93  if(lstm_params.use_layer_norm())
94  {
96  lstm_params.output_layer_norm_weights(),
97  lstm_params.cell_layer_norm_weights());
98  if(!lstm_params.has_cifg_opt())
99  {
101  }
102 
103  ITensorInfo *forget_info = lstm_params.forget_layer_norm_weights()->info();
104  ITensorInfo *cell_info = lstm_params.cell_layer_norm_weights()->info();
105  ITensorInfo *output_info = lstm_params.output_layer_norm_weights()->info();
106  ITensorInfo *input_info = lstm_params.has_cifg_opt() ? nullptr : lstm_params.input_layer_norm_weights()->info();
107 
108  lstm_params_info->set_layer_normalization_params(input_info, forget_info, cell_info, output_info);
109  }
110 
111  lstm_params_info->set_matmul_scale_params(lstm_params.input_intermediate_scale(),
112  lstm_params.forget_intermediate_scale(),
113  lstm_params.cell_intermediate_scale(),
114  lstm_params.output_intermediate_scale());
115 
116  lstm_params_info->set_hidden_state_params(lstm_params.hidden_state_zero(), lstm_params.hidden_state_scale());
117 }
118 } // namespace info_helpers
119 } // namespace utils
120 } // namespace arm_compute
121 #endif /* ARM_COMPUTE_MISC_INFO_HELPERS_H */
const T * projection_weights() const
Definition: LSTMParams.h:227
const T * input_to_input_weights() const
Definition: LSTMParams.h:197
bool use_layer_norm() const
Definition: LSTMParams.h:312
bool enabled() const
Check if initialised.
Definition: Types.h:1600
bool has_peephole_opt() const
Definition: LSTMParams.h:297
LSTMParams & set_cifg_params(const T *input_to_input_weights, const T *recurrent_to_input_weights, T *cell_to_input_weights, const T *input_gate_bias)
Set CIFG tensor parameters.
Definition: LSTMParams.h:84
T * forget_layer_norm_weights() const
Definition: LSTMParams.h:242
void build_lstm_params_tensor_info(const LSTMParams< T > &lstm_params, LSTMParams< ITensorInfo > *lstm_params_info)
Build LSTMParams<ITensorInfo> object by extracting the metadata from each tensor. ...
Definition: InfoHelpers.h:71
float a() const
Get the alpha value.
Definition: Types.h:1590
float output_intermediate_scale() const
Definition: LSTMParams.h:282
bool has_cifg_opt() const
Definition: LSTMParams.h:307
float cell_intermediate_scale() const
Definition: LSTMParams.h:277
float forget_intermediate_scale() const
Definition: LSTMParams.h:272
LSTMParams & set_hidden_state_params(int32_t hidden_state_zero, float hidden_state_scale)
Set hidden state zero and scale parameters.
Definition: LSTMParams.h:190
Store the tensor&#39;s metadata.
Definition: ITensorInfo.h:40
T * cell_to_input_weights() const
Definition: LSTMParams.h:207
Activation Layer Information class.
Definition: Types.h:1550
Copyright (c) 2017-2021 Arm Limited.
const T * recurrent_to_input_weights() const
Definition: LSTMParams.h:202
int32_t hidden_state_zero() const
Definition: LSTMParams.h:287
const T * projection_bias() const
Definition: LSTMParams.h:232
T * output_layer_norm_weights() const
Definition: LSTMParams.h:252
float input_intermediate_scale() const
Definition: LSTMParams.h:267
float hidden_state_scale() const
Definition: LSTMParams.h:292
LSTMParams & set_matmul_scale_params(float input_intermediate_scale, float forget_intermediate_scale, float cell_intermediate_scale, float output_intermediate_scale)
Set scale of the intermediate results of matmul of each layer parameters.
Definition: LSTMParams.h:174
LSTMParams & set_projection_params(const T *projection_weights, const T *projection_bias)
Set projection tensor parameters.
Definition: LSTMParams.h:100
LSTMParams & set_layer_normalization_params(T *input_layer_norm_weights, T *forget_layer_norm_weights, T *cell_layer_norm_weights, T *output_layer_norm_weights)
Set layer normalization tensor parameters.
Definition: LSTMParams.h:130
T * cell_to_forget_weights() const
Definition: LSTMParams.h:217
bool is_relu6(ActivationLayerInfo activation_info)
Checks if activation information correspond to a relu6 activation function.
Definition: InfoHelpers.h:54
LSTMParams & set_peephole_params(T *cell_to_forget_weights, T *cell_to_output_weights)
Set peephole tensor parameters.
Definition: LSTMParams.h:114
bool has_projection() const
Definition: LSTMParams.h:302
T * cell_to_output_weights() const
Definition: LSTMParams.h:222
T * input_layer_norm_weights() const
Definition: LSTMParams.h:237
const T * input_gate_bias() const
Definition: LSTMParams.h:212
bool is_relu(ActivationLayerInfo activation_info)
Checks if activation information correspond to a relu activation function.
Definition: InfoHelpers.h:43
#define ARM_COMPUTE_ERROR_ON_NULLPTR(...)
Definition: Validate.h:161
T * cell_layer_norm_weights() const
Definition: LSTMParams.h:247
ActivationFunction activation() const
Get the type of activation function.
Definition: Types.h:1585
float b() const
Get the beta value.
Definition: Types.h:1595