20 if (inputs.size() != 1)
22 throw armnn::Exception(
"ConvertSigmoidToTosaOperator: 1 input tensors required.");
25 if (outputs.size() != 1)
27 throw armnn::Exception(
"ConvertSigmoidToTosaOperator: 1 output tensor required.");
30 if (desc->m_Function != ActivationFunction::Sigmoid)
32 throw armnn::Exception(
"ConvertSigmoidToTosaOperator ActivationDescriptor only supports function Sigmoid.");
35 std::string inputName = std::string(
"input_");
36 std::string outputName = std::string(
"output0_");
38 std::string supportedTypes = std::string(
" Supported Datatypes: INT8, FLOAT16, FLOAT32");
48 std::vector<TosaSerializationTensor*> tensors;
49 std::vector<TosaSerializationOperator*> operators;
54 std::vector<int32_t> inputShape0;
55 if(inputName.find(
"input_") != std::string::npos)
58 DType inputDType0 =
ArmNNToDType(inputs[0]->GetDataType());
59 tensors.push_back(
new TosaSerializationTensor(inputName, inputShape0, inputDType0, {}));
62 DataType inputDType = inputs[0]->GetDataType();
64 bool isInt8 = inputDType == DataType::QAsymmS8 || inputDType == DataType::QSymmS8;
67 float inputScale = inputs[0]->GetQuantizationScale();
68 float outputScale = outputs[0]->GetQuantizationScale();
69 int32_t inputZp = inputs[0]->GetQuantizationOffset();
70 int32_t outputZp = outputs[0]->GetQuantizationOffset();
72 auto sigmoidFunc = [](
float x) ->
float
74 return 1.0f / (1.0f + std::exp(-x));
77 TosaTableAttribute attribute(
79 operators.push_back(
new TosaSerializationOperator(tosa::Op_TABLE,
80 Attribute_TableAttribute,
85 else if (inputDType == DataType::QSymmS16)
87 throw Exception(
"ConvertSigmoidToTosaOperator(): INT16 is not implemented." + supportedTypes);
89 else if (inputDType == DataType::Float16 ||
90 inputDType == DataType::Float32)
92 operators.push_back(
new TosaSerializationOperator(tosa::Op_SIGMOID,
100 throw Exception(
"ConvertSigmoidToTosaOperator(): TOSA Spec doesn't support this datatype." + supportedTypes);
104 DType outputDType0 =
ArmNNToDType(outputs[0]->GetDataType());
105 tensors.push_back(
new TosaSerializationTensor(outputName, outputShape0, outputDType0, {}));
109 return new TosaSerializationBasicBlock(blockName,
std::string GenerateUniqueOutputName(const Layer &layer, uint32_t layerSlot=0)
const std::string mainName
DType ArmNNToDType(const DataType &type)
std::vector< int32_t > GetTosaTensorShape(const TensorShape &shape)
std::string GenerateUniqueInputName(const armnn::InputSlot &slot)
std::string GetUniqueTosaMappingID()
std::vector< int16_t > getTosaConst8bitTable(float input_scale, int32_t input_zp, float output_scale, int32_t output_zp, std::function< float(float)> func)
Base class for all ArmNN exceptions so that users can filter to just those.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.