19{
20 if (inputs.size() != 1)
21 {
22 throw armnn::Exception(
"ConvertSigmoidToTosaOperator: 1 input tensors required.");
23 }
24
25 if (outputs.size() != 1)
26 {
27 throw armnn::Exception(
"ConvertSigmoidToTosaOperator: 1 output tensor required.");
28 }
29
30 if (desc->m_Function != ActivationFunction::Sigmoid)
31 {
32 throw armnn::Exception(
"ConvertSigmoidToTosaOperator ActivationDescriptor only supports function Sigmoid.");
33 }
34
35 std::string inputName = std::string("input_");
36 std::string outputName = std::string("output0_");
38 std::string supportedTypes = std::string(" Supported Datatypes: INT8, FLOAT16, FLOAT32");
39
40
41
42 if (layer != nullptr)
43 {
46 }
47
48 std::vector<TosaSerializationTensor*> tensors;
49 std::vector<TosaSerializationOperator*> operators;
50
51
52
53
54 std::vector<int32_t> inputShape0;
55 if(inputName.find("input_") != std::string::npos)
56 {
58 DType inputDType0 =
ArmNNToDType(inputs[0]->GetDataType());
59 tensors.push_back(new TosaSerializationTensor(inputName, inputShape0, inputDType0, {}));
60 }
61
62 DataType inputDType = inputs[0]->GetDataType();
63
64 bool isInt8 = inputDType == DataType::QAsymmS8 || inputDType == DataType::QSymmS8;
65 if (isInt8)
66 {
67 float inputScale = inputs[0]->GetQuantizationScale();
68 float outputScale = outputs[0]->GetQuantizationScale();
69 int32_t inputZp = inputs[0]->GetQuantizationOffset();
70 int32_t outputZp = outputs[0]->GetQuantizationOffset();
71
72 auto sigmoidFunc = [](float x) -> float
73 {
74 return 1.0f / (1.0f + std::exp(-x));
75 };
76
77 TosaTableAttribute attribute(
79 operators.push_back(new TosaSerializationOperator(tosa::Op_TABLE,
80 Attribute_TableAttribute,
81 &attribute,
82 {inputName},
83 {outputName}));
84 }
85 else if (inputDType == DataType::QSymmS16)
86 {
87 throw Exception(
"ConvertSigmoidToTosaOperator(): INT16 is not implemented." + supportedTypes);
88 }
89 else if (inputDType == DataType::Float16 ||
90 inputDType == DataType::Float32)
91 {
92 operators.push_back(new TosaSerializationOperator(tosa::Op_SIGMOID,
93 Attribute_NONE,
94 nullptr,
95 {inputName},
96 {outputName}));
97 }
98 else
99 {
100 throw Exception(
"ConvertSigmoidToTosaOperator(): TOSA Spec doesn't support this datatype." + supportedTypes);
101 }
102
104 DType outputDType0 =
ArmNNToDType(outputs[0]->GetDataType());
105 tensors.push_back(new TosaSerializationTensor(outputName, outputShape0, outputDType0, {}));
106
107
108
109 return new TosaSerializationBasicBlock(blockName,
111 operators,
112 tensors,
113 {inputName},
114 {outputName});
115}
std::string GenerateUniqueOutputName(const Layer &layer, uint32_t layerSlot=0)
const std::string mainName
DType ArmNNToDType(const DataType &type)
std::string GenerateUniqueInputName(const armnn::InputSlot &slot)
std::string GetUniqueTosaMappingID()
std::vector< int32_t > GetTosaTensorShape(const TensorShape &shape)
std::vector< int16_t > getTosaConst8bitTable(float input_scale, int32_t input_zp, float output_scale, int32_t output_zp, std::function< float(float)> func)
Base class for all ArmNN exceptions so that users can filter to just those.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.