14 std::string inputName = std::string(
"input_");
15 std::string outputName = std::string(
"output0_");
26 std::vector<int32_t> padding;
27 padding.reserve(padDescriptor->
m_PadList.size());
28 for (
size_t it = 0; it < padDescriptor->
m_PadList.size(); ++it) {
29 padding.push_back(
static_cast<int32_t
>(padDescriptor->
m_PadList[it].first));
30 padding.push_back(
static_cast<int32_t
>(padDescriptor->
m_PadList[it].second));
33 auto intPadValue = armnnUtils::SelectiveQuantize<int32_t>(padDescriptor->
m_PadValue,
34 inputs[0]->GetQuantizationScale(),
35 inputs[0]->GetQuantizationOffset());
36 TosaPadAttribute padAttribute(padding, intPadValue ,padDescriptor->
m_PadValue);
38 auto* op =
new TosaSerializationOperator(Op_PAD,
39 Attribute_PadAttribute,
44 std::vector<TosaSerializationTensor*> tensors;
49 if(inputName.find(
"input_") != std::string::npos)
52 DType inputDType0 =
ArmNNToDType(inputs[0]->GetDataType());
54 tensors.push_back(
new TosaSerializationTensor(inputName, inputShape0, inputDType0, {}));
58 DType outputDType0 =
ArmNNToDType(outputs[0]->GetDataType());
60 tensors.push_back(
new TosaSerializationTensor(outputName, outputShape0, outputDType0, {}));
64 return new TosaSerializationBasicBlock(blockName,
std::string GenerateUniqueOutputName(const Layer &layer, uint32_t layerSlot=0)
const std::string mainName
DType ArmNNToDType(const DataType &type)
std::vector< int32_t > GetTosaTensorShape(const TensorShape &shape)
std::string GenerateUniqueInputName(const armnn::InputSlot &slot)
std::string GetUniqueTosaMappingID()
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
float m_PadValue
Optional value to use for padding, defaults to 0.
std::vector< std::pair< unsigned int, unsigned int > > m_PadList
Specifies the padding for input dimension.