ArmNN
 25.11
Loading...
Searching...
No Matches
HardSwishOperator.hpp File Reference
Include dependency graph for HardSwishOperator.hpp:
This graph shows which files directly or indirectly include this file:

Go to the source code of this file.

Functions

TosaSerializationBasicBlock * ConvertHardSwishToTosaOperator (const Layer *layer, const std::vector< const TensorInfo * > &inputs, const std::vector< const TensorInfo * > &outputs, const ActivationDescriptor *activationDescriptor)

Function Documentation

◆ ConvertHardSwishToTosaOperator()

TosaSerializationBasicBlock * ConvertHardSwishToTosaOperator ( const Layer * layer,
const std::vector< const TensorInfo * > & inputs,
const std::vector< const TensorInfo * > & outputs,
const ActivationDescriptor * activationDescriptor )

Definition at line 160 of file HardSwishOperator.cpp.

164{
165 if (inputs.size() != 1)
166 {
167 throw armnn::Exception("ConvertHardSwishToTosaOperator: 1 input tensors required.");
168 }
169
170 if (outputs.size() != 1)
171 {
172 throw armnn::Exception("ConvertHardSwishToTosaOperator: 1 output tensor required.");
173 }
174
175 if (desc->m_Function != ActivationFunction::HardSwish)
176 {
177 throw armnn::Exception("ConvertHardSwishToTosaOperator ActivationDescriptor only supports function HardSwish.");
178 }
179
180 std::string inputName = std::string("input_");
181 std::string outputName = std::string("output0_");
182 std::string blockName = std::string("Op_HARDSWISH_block_") + GetUniqueTosaMappingID();
183
184 // If a layer is present then the block will be used for execution, so input and output names need to be determined
185 // using the previous and following layers so the graph is connected correctly. For validation this doesn't matter.
186 if (layer != nullptr)
187 {
188 inputName = GenerateUniqueInputName(layer->GetInputSlot(0));
189 outputName = GenerateUniqueOutputName(*layer);
190 }
191
192 std::vector<TosaSerializationTensor*> tensors;
193 std::vector<TosaSerializationOperator*> operators;
194
195 DataType inputDType = inputs[0]->GetDataType();
196
197 bool isInt8 = (inputDType == DataType::QAsymmS8 || inputDType == DataType::QSymmS8);
198 if (isInt8)
199 {
200 float inputScale = inputs[0]->GetQuantizationScale();
201 float outputScale = outputs[0]->GetQuantizationScale();
202 int32_t inputZp = inputs[0]->GetQuantizationOffset();
203 int32_t outputZp = outputs[0]->GetQuantizationOffset();
204
205 TosaTableAttribute attribute(
206 getTosaConstHardSwish8bitTable(inputScale, inputZp, outputScale, outputZp));
207 operators.push_back(new TosaSerializationOperator(tosa::Op_TABLE,
208 Attribute_TableAttribute,
209 &attribute,
210 {inputName},
211 {outputName}));
212 }
213 else
214 {
215 throw Exception("ConvertHardSwishToTosaOperator() type currently unimplemented.");
216 }
217
218 // Only add input tensors if connected layer is an input layer.
219 // As intermediate or constant tensors will be created separately.
220 // There also can't be duplicate tensor.
221 std::vector<int32_t> inputShape0;
222 DType inputDType0 = ArmNNToDType(inputs[0]->GetDataType());
223 if(inputName.find("input_") != std::string::npos)
224 {
225 inputShape0 = GetTosaTensorShape(inputs[0]->GetShape());
226 tensors.push_back(new TosaSerializationTensor(inputName, inputShape0, inputDType0, {}));
227 }
228
229 std::vector<int32_t> outputShape0 = GetTosaTensorShape(outputs[0]->GetShape());
230 DType outputDType0 = ArmNNToDType(outputs[0]->GetDataType());
231 tensors.push_back(new TosaSerializationTensor(outputName, outputShape0, outputDType0, {}));
232
233 // operatorInputNames/operatorOutputNames ends up being the same as
234 // blockInputNames/blockOutputNames for one-to-one ArmNN to Tosa mappings
235 return new TosaSerializationBasicBlock(blockName, // name
236 mainName, // region name
237 operators, // operators
238 tensors, // tensors
239 {inputName}, // inputs
240 {outputName}); // outputs
241}
std::vector< int16_t > getTosaConstHardSwish8bitTable(float inputScale, int32_t inputZp, float outputScale, int32_t outputZp)
std::string GenerateUniqueOutputName(const Layer &layer, uint32_t layerSlot=0)
const std::string mainName
DType ArmNNToDType(const DataType &type)
std::string GenerateUniqueInputName(const armnn::InputSlot &slot)
std::string GetUniqueTosaMappingID()
std::vector< int32_t > GetTosaTensorShape(const TensorShape &shape)
Base class for all ArmNN exceptions so that users can filter to just those.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition Layer.hpp:337
DataType
Definition Types.hpp:49

References ArmNNToDType(), GenerateUniqueInputName(), GenerateUniqueOutputName(), Layer::GetInputSlot(), getTosaConstHardSwish8bitTable(), GetTosaTensorShape(), GetUniqueTosaMappingID(), ActivationDescriptor::m_Function, and mainName.

Referenced by GetTosaMapping().