23 #include <fmt/format.h> 31 using namespace armnn;
37 IDeserializer::IDeserializer() : pDeserializerImpl(new DeserializerImpl()){}
39 IDeserializer::~IDeserializer() =
default;
56 armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(
const std::vector<uint8_t> &binaryContent)
63 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
66 BindingPointInfo IDeserializer::GetNetworkInputBindingInfo(
unsigned int layerId,
const std::string &name)
const 68 return pDeserializerImpl->GetNetworkInputBindingInfo(layerId, name);
71 BindingPointInfo IDeserializer::GetNetworkOutputBindingInfo(
unsigned int layerId,
const std::string &name)
const 73 return pDeserializerImpl->GetNetworkOutputBindingInfo(layerId, name);
79 const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
81 void CheckGraph(
const GraphPtr& graph,
82 unsigned int layersIndex,
85 if (graph->layers() ==
nullptr)
87 throw ParseException(fmt::format(
"{0} was called with invalid (null) graph. " 88 "Possible reason is that the graph is not yet loaded and Unpack(ed). " 94 else if (layersIndex >= graph->layers()->size())
96 throw ParseException(fmt::format(
"{0} was called with an invalid layers index. layers:{1} at {2}",
104 unsigned int layersIndex,
105 unsigned int layerIndex,
108 if (graph->layers() ==
nullptr)
110 throw ParseException(fmt::format(
"{0} was called with invalid (null) graph. " 111 "Possible reason is that the graph is not yet loaded and Unpack(ed). " 117 else if (layersIndex >= graph->layers()->size())
119 throw ParseException(fmt::format(
"{0} was called with an invalid layers index. " 125 else if (layerIndex >= graph->layers()[layersIndex].size()
126 && layerIndex != VIRTUAL_LAYER_ID)
128 throw ParseException(fmt::format(
"{0} was called with an invalid layer index. " 129 "layers:{1} layer:{2} at {3}",
140 if (rawPtr ==
nullptr)
142 throw ParseException(fmt::format(
"{0} was called with a null tensor pointer. at {1}",
151 if (rawPtr ==
nullptr)
153 throw ParseException(fmt::format(
"{0} was called with a null const tensor pointer. at {1}",
159 void CheckConstTensorSize(
const unsigned int constTensorSize,
160 const unsigned int tensorSize,
163 if (constTensorSize != tensorSize)
165 throw ParseException(fmt::format(
"{0} wrong number of components supplied to tensor. at:{1}",
171 #define CHECK_TENSOR_PTR(TENSOR_PTR) \ 172 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION()) 174 #define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \ 175 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION()) 177 #define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \ 178 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION()) 180 #define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \ 181 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION()) 183 #define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \ 184 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION()) 190 if (actualSize != expected.size())
195 for (
unsigned int i = 0u; i < actualSize; i++)
197 if (actual[i] != static_cast<unsigned int>(expected[i]))
206 IDeserializer::DeserializerImpl::DeserializerImpl()
207 : m_Network(nullptr, nullptr),
281 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
286 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
288 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
290 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
292 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
294 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
296 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
298 return graphPtr->layers()->Get(layerIndex)->layer_as_CastLayer()->base();
300 return graphPtr->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->base();
302 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
304 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
306 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
308 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
310 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution3dLayer()->base();
312 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
314 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
316 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
318 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
320 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
322 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
324 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
326 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
328 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
330 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
332 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
334 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
336 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
338 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
340 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
342 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
344 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
346 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
348 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
350 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
352 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
354 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
356 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
358 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
360 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
362 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
364 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
366 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
368 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
370 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->base();
372 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
374 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
376 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
378 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
380 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
382 return graphPtr->layers()->Get(layerIndex)->layer_as_ReduceLayer()->base();
384 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
386 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
388 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
390 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
392 return graphPtr->layers()->Get(layerIndex)->layer_as_ShapeLayer()->base();
394 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
396 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
398 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
400 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
402 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
404 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
406 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
408 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
410 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
412 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
414 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
416 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
418 return graphPtr->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer()->base();
421 throw ParseException(fmt::format(
"Layer type {} not recognized", layerType));
429 return layer->layerName()->str();
434 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
438 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
442 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
614 switch (tensorPtr->dataType())
648 throw ParseException(fmt::format(
"Unsupported data type {0} = {1}. {2}",
649 tensorPtr->dataType(),
655 float quantizationScale = tensorPtr->quantizationScale();
656 int32_t quantizationOffset = tensorPtr->quantizationOffset();
658 if (tensorPtr->dimensionality() ==
static_cast<unsigned int>(Dimensionality::Scalar))
665 else if (tensorPtr->dimensionality() ==
static_cast<unsigned int>(Dimensionality::NotSpecified))
674 auto dimensions = tensorPtr->dimensions();
675 unsigned int size = dimensions->size();
676 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
681 if (tensorPtr->dimensionSpecificity() !=
nullptr)
683 auto dimensionSpecificity = tensorPtr->dimensionSpecificity();
684 size = dimensionSpecificity->size();
685 for (
unsigned int i = 0; i < size; ++i)
687 dimensionsSpecificity[i] = dimensionSpecificity->Get(i);
691 TensorShape shape(size, outputDims.data(), dimensionsSpecificity);
693 auto quantizationScales = tensorPtr->quantizationScales();
694 if (quantizationScales)
696 unsigned int quantizationScalesSize = quantizationScales->size();
697 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
698 unsigned int quantizationDim = tensorPtr->quantizationDim();
721 switch (constTensorPtr->data_type())
725 auto byteData = constTensorPtr->data_as_ByteData()->data();
731 auto shortData = constTensorPtr->data_as_ShortData()->data();
737 auto intData = constTensorPtr->data_as_IntData()->data();
743 auto longData = constTensorPtr->data_as_LongData()->data();
750 throw ParseException(fmt::format(
"Unsupported data type {0} = {1}. {2}",
751 constTensorPtr->data_type(),
762 const auto& numInputs = layer->inputSlots()->size();
766 for (
unsigned int i=0; i<numInputs; ++i)
769 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
770 result[i] =
GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
779 const auto& numOutputs = layer->outputSlots()->size();
783 for (
unsigned int i=0; i<numOutputs; ++i)
785 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
790 void IDeserializer::DeserializerImpl::ParseUnsupportedLayer(
GraphPtr graph,
unsigned int layerIndex)
793 const auto layerName =
GetBaseLayer(graph, layerIndex)->layerName()->c_str();
794 throw ParseException(fmt::format(
"Layer not supported. layerIndex: {0} " 795 "layerName: {1} / {2}",
801 void IDeserializer::DeserializerImpl::ResetParser()
804 m_InputBindings.clear();
805 m_OutputBindings.clear();
813 return CreateNetworkFromGraph(graph);
819 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
821 return CreateNetworkFromGraph(graph);
826 if (binaryContent ==
nullptr)
831 flatbuffers::Verifier verifier(binaryContent, len);
832 if (verifier.VerifyBuffer<SerializedGraph>() ==
false)
834 throw ParseException(fmt::format(
"Buffer doesn't conform to the expected Armnn " 835 "flatbuffers format. size:{0} {1}",
844 m_Network = INetwork::Create();
846 unsigned int layerIndex = 0;
847 for (AnyLayer
const* layer : *graph->layers())
853 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
854 (this->*parserFunction)(graph, layerIndex);
859 SetupInputLayers(graph);
860 SetupOutputLayers(graph);
863 for (
auto&& graphIt : m_GraphConnections)
865 Connections& connections = graphIt.second;
866 for (
auto&& outputIt : connections.outputSlots)
868 const unsigned int outputSlotIndex = outputIt.first;
870 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
872 for (
IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
874 outputSlot->
Connect(*inputSlot);
880 return std::move(m_Network);
884 const std::string& name)
const 887 for (
auto inputBinding : m_InputBindings)
889 if (inputBinding.first == name)
891 return inputBinding.second;
894 throw ParseException(fmt::format(
"No input binding found for layer:{0} / {1}",
900 const std::string& name)
const 903 for (
auto outputBinding : m_OutputBindings)
905 if (outputBinding.first == name)
907 return outputBinding.second;
910 throw ParseException(fmt::format(
"No output binding found for layer:{0} / {1}",
915 unsigned int IDeserializer::DeserializerImpl::GetInputLayerInVector(
GraphPtr graph,
int targetId)
917 for (
unsigned int i = 0; i < graph->layers()->size(); i++)
919 auto layer = graph->layers()->Get(i);
922 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
923 if (layerBindingId == targetId)
929 throw ParseException(
"Input layer with given layerBindingId not found");
932 unsigned int IDeserializer::DeserializerImpl::GetOutputLayerInVector(
GraphPtr graph,
int targetId)
934 for (
unsigned int i = 0; i < graph->layers()->size(); i++)
936 auto layer = graph->layers()->Get(i);
939 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
940 if (layerBindingId == targetId)
946 throw ParseException(
"Output layer with given layerBindingId not found");
949 unsigned int IDeserializer::DeserializerImpl::GetLayerIndexInVector(
GraphPtr graph,
unsigned int targetIndex)
951 for (
unsigned int i = 0; i < graph->layers()->size(); i++)
954 if (layer->index() == targetIndex)
962 IDeserializer::DeserializerImpl::FeatureVersions IDeserializer::DeserializerImpl::GetFeatureVersions(
GraphPtr graph)
964 IDeserializer::DeserializerImpl::FeatureVersions versions;
966 if (graph->featureVersions())
968 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
969 versions.m_WeightsLayoutScheme = graph->featureVersions()->weightsLayoutScheme();
970 versions.m_ConstTensorsAsInputs = graph->featureVersions()->constantTensorsAsInputs();
976 void IDeserializer::DeserializerImpl::SetupInputLayers(
GraphPtr graph)
979 const unsigned int numInputs = graph->inputIds()->size();
980 m_InputBindings.clear();
981 m_InputBindings.reserve(numInputs);
983 for (
unsigned int i = 0; i < numInputs; i++)
985 unsigned int inputLayerIndex = 0xFFFFFFFF;
986 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
989 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
993 const int inputId = graph->inputIds()->Get(i);
994 inputLayerIndex = GetInputLayerInVector(graph, inputId);
1004 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
1007 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
1008 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
1011 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
1015 void IDeserializer::DeserializerImpl::SetupOutputLayers(
GraphPtr graph)
1018 const unsigned int numOutputs = graph->outputIds()->size();
1019 m_OutputBindings.clear();
1020 m_OutputBindings.reserve(numOutputs);
1022 for (
unsigned int i = 0; i < numOutputs; i++)
1024 unsigned int outputLayerIndex = 0xFFFFFFFF;
1025 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1027 const unsigned int outputId =
armnn::numeric_cast<
unsigned int>(graph->outputIds()->Get(i));
1028 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
1032 const int outputId = graph->outputIds()->Get(i);
1033 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
1043 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
1045 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
1046 unsigned int sourceLayerIndex =
1047 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
1048 unsigned int outputSlotIndex =
1049 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->outputSlotIndex());
1052 sourceBaseLayer->outputSlots()->Get(outputSlotIndex)->tensorInfo());
1054 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
1058 void IDeserializer::DeserializerImpl::RegisterOutputSlots(
GraphPtr graph,
1059 uint32_t layerIndex,
1067 throw ParseException(fmt::format(
"The number of outputslots ({0}) does not match the number expected ({1})" 1068 " for layer index: {2} {3}",
1069 baseLayer->outputSlots()->size(),
1077 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
1080 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
1084 void IDeserializer::DeserializerImpl::RegisterInputSlots(
GraphPtr graph,
1085 uint32_t layerIndex,
1087 std::vector<unsigned int> ignoreSlots)
1093 if (baseLayer->inputSlots()->size() != (layer->
GetNumInputSlots() - ignoreSlots.size()))
1095 throw ParseException(fmt::format(
"The number of inputslots ({0}) does not match the number expected ({1})" 1096 " for layer index:{2} {3}",
1097 baseLayer->inputSlots()->size(),
1106 if (std::find(ignoreSlots.begin(), ignoreSlots.end(), i) == ignoreSlots.end())
1108 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1109 auto fbConnection = fbInputSlot->connection();
1111 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
1116 void IDeserializer::DeserializerImpl::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
1117 uint32_t outputSlotIndex,
1120 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1122 m_GraphConnections[sourceLayerIndex] = Connections();
1125 Connections& connections = m_GraphConnections[sourceLayerIndex];
1126 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1128 connections.inputSlots[outputSlotIndex] = {inputSlot};
1132 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
1136 void IDeserializer::DeserializerImpl::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
1137 uint32_t outputSlotIndex,
1140 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1142 m_GraphConnections[sourceLayerIndex] = Connections();
1145 Connections& connections = m_GraphConnections[sourceLayerIndex];
1146 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1151 connections.outputSlots[outputSlotIndex] = outputSlot;
1154 void IDeserializer::DeserializerImpl::ParseAbs(
GraphPtr graph,
unsigned int layerIndex)
1157 auto inputs =
GetInputs(graph, layerIndex);
1161 auto outputs =
GetOutputs(graph, layerIndex);
1167 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1171 RegisterInputSlots(graph, layerIndex, layer);
1172 RegisterOutputSlots(graph, layerIndex, layer);
1175 void IDeserializer::DeserializerImpl::ParseActivation(
GraphPtr graph,
unsigned int layerIndex)
1178 auto inputs =
GetInputs(graph, layerIndex);
1182 auto outputs =
GetOutputs(graph, layerIndex);
1185 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
1187 auto serializerDescriptor = serializerLayer->descriptor();
1191 descriptor.
m_A = serializerDescriptor->a();
1192 descriptor.
m_B = serializerDescriptor->b();
1199 RegisterInputSlots(graph, layerIndex, layer);
1200 RegisterOutputSlots(graph, layerIndex, layer);
1203 void IDeserializer::DeserializerImpl::ParseAdd(
GraphPtr graph,
unsigned int layerIndex)
1206 auto inputs =
GetInputs(graph, layerIndex);
1210 auto outputs =
GetOutputs(graph, layerIndex);
1219 RegisterInputSlots(graph, layerIndex, layer);
1220 RegisterOutputSlots(graph, layerIndex, layer);
1223 void IDeserializer::DeserializerImpl::ParseArgMinMax(
GraphPtr graph,
unsigned int layerIndex)
1226 auto inputs =
GetInputs(graph, layerIndex);
1230 auto outputs =
GetOutputs(graph, layerIndex);
1233 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1234 auto serializerDescriptor = serializerLayer->descriptor();
1238 descriptor.
m_Axis = serializerDescriptor->axis();
1240 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1245 RegisterInputSlots(graph, layerIndex, layer);
1246 RegisterOutputSlots(graph, layerIndex, layer);
1249 void IDeserializer::DeserializerImpl::ParseBatchToSpaceNd(
GraphPtr graph,
unsigned int layerIndex)
1259 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1260 auto flatBufferCrops = flatBufferDescriptor->crops();
1261 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1263 if (flatBufferCrops->Length() % 2 != 0)
1268 std::vector<std::pair<unsigned int, unsigned int>> crops;
1269 crops.reserve(flatBufferCrops->Length() / 2);
1270 for (
unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1272 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1278 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1282 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1287 RegisterInputSlots(graph, layerIndex, layer);
1288 RegisterOutputSlots(graph, layerIndex, layer);
1291 void IDeserializer::DeserializerImpl::ParseBatchNormalization(
GraphPtr graph,
unsigned int layerIndex)
1295 auto inputs =
GetInputs(graph, layerIndex);
1298 auto outputs =
GetOutputs(graph, layerIndex);
1304 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1305 auto serializerDescriptor = serializerLayer->descriptor();
1308 descriptor.
m_Eps = serializerDescriptor->eps();
1322 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1324 RegisterInputSlots(graph, layerIndex, layer);
1325 RegisterOutputSlots(graph, layerIndex, layer);
1328 void IDeserializer::DeserializerImpl::ParseCast(
GraphPtr graph,
unsigned int layerIndex)
1345 RegisterInputSlots(graph, layerIndex, layer);
1346 RegisterOutputSlots(graph, layerIndex, layer);
1349 void IDeserializer::DeserializerImpl::ParseConstant(
GraphPtr graph,
unsigned int layerIndex)
1354 auto outputs =
GetOutputs(graph, layerIndex);
1359 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1360 auto serializerInput = serializerLayer->input();
1364 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1369 RegisterOutputSlots(graph, layerIndex, layer);
1372 void IDeserializer::DeserializerImpl::ParseConvolution2d(
GraphPtr graph,
unsigned int layerIndex)
1375 auto inputs =
GetInputs(graph, layerIndex);
1379 auto outputs =
GetOutputs(graph, layerIndex);
1382 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
1384 auto serializerDescriptor = serializerLayer->descriptor();
1387 descriptor.
m_PadLeft = serializerDescriptor->padLeft();
1388 descriptor.
m_PadRight = serializerDescriptor->padRight();
1389 descriptor.
m_PadTop = serializerDescriptor->padTop();
1390 descriptor.
m_PadBottom = serializerDescriptor->padBottom();
1391 descriptor.
m_StrideX = serializerDescriptor->strideX();
1392 descriptor.
m_StrideY = serializerDescriptor->strideY();;
1393 descriptor.
m_DilationX = serializerDescriptor->dilationX();
1394 descriptor.
m_DilationY = serializerDescriptor->dilationY();;
1395 descriptor.
m_BiasEnabled = serializerDescriptor->biasEnabled();;
1412 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1414 RegisterInputSlots(graph, layerIndex, layer);
1415 RegisterOutputSlots(graph, layerIndex, layer);
1418 void IDeserializer::DeserializerImpl::ParseConvolution3d(
GraphPtr graph,
unsigned int layerIndex)
1421 auto inputs =
GetInputs(graph, layerIndex);
1424 auto outputs =
GetOutputs(graph, layerIndex);
1427 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution3dLayer();
1429 auto serializerDescriptor = serializerLayer->descriptor();
1432 descriptor.
m_PadLeft = serializerDescriptor->padLeft();
1433 descriptor.
m_PadRight = serializerDescriptor->padRight();
1434 descriptor.
m_PadTop = serializerDescriptor->padTop();
1435 descriptor.
m_PadBottom = serializerDescriptor->padBottom();
1436 descriptor.
m_PadFront = serializerDescriptor->padFront();
1437 descriptor.
m_PadBack = serializerDescriptor->padBack();
1438 descriptor.
m_StrideX = serializerDescriptor->strideX();
1439 descriptor.
m_StrideY = serializerDescriptor->strideY();
1440 descriptor.
m_StrideZ = serializerDescriptor->strideZ();
1441 descriptor.
m_DilationX = serializerDescriptor->dilationX();
1442 descriptor.
m_DilationY = serializerDescriptor->dilationY();
1443 descriptor.
m_DilationZ = serializerDescriptor->dilationZ();
1444 descriptor.
m_BiasEnabled = serializerDescriptor->biasEnabled();
1450 IConnectableLayer* layer = m_Network->AddConvolution3dLayer(descriptor, layerName.c_str());
1453 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1455 RegisterInputSlots(graph, layerIndex, layer);
1456 RegisterOutputSlots(graph, layerIndex, layer);
1459 void IDeserializer::DeserializerImpl::ParseDepthToSpace(
GraphPtr graph,
unsigned int layerIndex)
1463 auto inputs =
GetInputs(graph, layerIndex);
1466 auto outputs =
GetOutputs(graph, layerIndex);
1469 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1472 descriptor.
m_BlockSize = fbDescriptor->blockSize();
1476 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1481 RegisterInputSlots(graph, layerIndex, layer);
1482 RegisterOutputSlots(graph, layerIndex, layer);
1485 void IDeserializer::DeserializerImpl::ParseDepthwiseConvolution2d(
GraphPtr graph,
unsigned int layerIndex)
1488 auto inputs =
GetInputs(graph, layerIndex);
1492 auto outputs =
GetOutputs(graph, layerIndex);
1495 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
1497 auto serializerDescriptor = serializerLayer->descriptor();
1500 descriptor.
m_PadLeft = serializerDescriptor->padLeft();
1501 descriptor.
m_PadRight = serializerDescriptor->padRight();
1502 descriptor.
m_PadTop = serializerDescriptor->padTop();
1503 descriptor.
m_PadBottom = serializerDescriptor->padBottom();
1504 descriptor.
m_StrideX = serializerDescriptor->strideX();
1505 descriptor.
m_StrideY = serializerDescriptor->strideY();
1506 descriptor.
m_DilationX = serializerDescriptor->dilationX();
1507 descriptor.
m_DilationY = serializerDescriptor->dilationY();
1508 descriptor.
m_BiasEnabled = serializerDescriptor->biasEnabled();;
1523 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1529 std::unique_ptr<unsigned char[]> permuteBuffer(
new unsigned char[weightsInfo.
GetNumBytes()]);
1532 weights.GetMemoryArea(), permuteBuffer.get(),
1536 auto weightsShape = weightsInfo.GetShape();
1537 weightsInfo.SetShape({1,
1540 weightsShape[2]*weightsShape[3]});
1544 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1551 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1558 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1560 RegisterInputSlots(graph, layerIndex, layer);
1561 RegisterOutputSlots(graph, layerIndex, layer);
1564 void IDeserializer::DeserializerImpl::ParseDetectionPostProcess(
GraphPtr graph,
unsigned int layerIndex)
1567 auto inputs =
GetInputs(graph, layerIndex);
1571 auto outputs =
GetOutputs(graph, layerIndex);
1574 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1576 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1584 descriptor.
m_NumClasses = flatBufferDescriptor->numClasses();
1586 descriptor.
m_ScaleX = flatBufferDescriptor->scaleX();
1587 descriptor.
m_ScaleY = flatBufferDescriptor->scaleY();
1588 descriptor.
m_ScaleW = flatBufferDescriptor->scaleW();
1589 descriptor.
m_ScaleH = flatBufferDescriptor->scaleH();
1597 for (
unsigned int i = 0; i < 4; i++)
1599 layer->GetOutputSlot(i).SetTensorInfo(
ToTensorInfo(outputs[i]));
1602 RegisterInputSlots(graph, layerIndex, layer);
1603 RegisterOutputSlots(graph, layerIndex, layer);
1606 void IDeserializer::DeserializerImpl::ParseDivision(
GraphPtr graph,
unsigned int layerIndex)
1609 auto inputs =
GetInputs(graph, layerIndex);
1613 auto outputs =
GetOutputs(graph, layerIndex);
1622 RegisterInputSlots(graph, layerIndex, layer);
1623 RegisterOutputSlots(graph, layerIndex, layer);
1626 void IDeserializer::DeserializerImpl::ParseEqual(
GraphPtr graph,
unsigned int layerIndex)
1629 auto inputs =
GetInputs(graph, layerIndex);
1633 auto outputs =
GetOutputs(graph, layerIndex);
1638 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1643 RegisterInputSlots(graph, layerIndex, layer);
1644 RegisterOutputSlots(graph, layerIndex, layer);
1647 void IDeserializer::DeserializerImpl::ParseFill(
GraphPtr graph,
unsigned int layerIndex)
1650 auto inputs =
GetInputs(graph, layerIndex);
1654 auto outputs =
GetOutputs(graph, layerIndex);
1659 descriptor.
m_Value = graph->layers()->Get(layerIndex)->layer_as_FillLayer()->descriptor()->value();
1660 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1665 RegisterInputSlots(graph, layerIndex, layer);
1666 RegisterOutputSlots(graph, layerIndex, layer);
1669 void IDeserializer::DeserializerImpl::ParseGreater(
GraphPtr graph,
unsigned int layerIndex)
1672 auto inputs =
GetInputs(graph, layerIndex);
1676 auto outputs =
GetOutputs(graph, layerIndex);
1681 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1686 RegisterInputSlots(graph, layerIndex, layer);
1687 RegisterOutputSlots(graph, layerIndex, layer);
1690 void IDeserializer::DeserializerImpl::ParseInstanceNormalization(
GraphPtr graph,
unsigned int layerIndex)
1694 auto inputs =
GetInputs(graph, layerIndex);
1697 auto outputs =
GetOutputs(graph, layerIndex);
1700 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1701 auto fbDescriptor = fbLayer->descriptor();
1704 descriptor.
m_Gamma = fbDescriptor->gamma();
1705 descriptor.
m_Beta = fbDescriptor->beta();
1706 descriptor.
m_Eps = fbDescriptor->eps();
1709 const std::string layerName =
GetLayerName(graph, layerIndex);
1712 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1715 RegisterInputSlots(graph, layerIndex, layer);
1716 RegisterOutputSlots(graph, layerIndex, layer);
1719 void IDeserializer::DeserializerImpl::ParseL2Normalization(
GraphPtr graph,
unsigned int layerIndex)
1723 auto inputs =
GetInputs(graph, layerIndex);
1726 auto outputs =
GetOutputs(graph, layerIndex);
1730 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1731 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1736 descriptor.
m_Eps = flatBufferDescriptor->eps();
1738 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1741 RegisterInputSlots(graph, layerIndex, layer);
1742 RegisterOutputSlots(graph, layerIndex, layer);
1745 void IDeserializer::DeserializerImpl::ParseLogicalBinary(
GraphPtr graph,
unsigned int layerIndex)
1750 auto inputs =
GetInputs(graph, layerIndex);
1753 auto outputs =
GetOutputs(graph, layerIndex);
1756 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1757 auto fbDescriptor = fbLayer->descriptor();
1762 const std::string& layerName =
GetLayerName(graph, layerIndex);
1763 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1768 RegisterInputSlots(graph, layerIndex, layer);
1769 RegisterOutputSlots(graph, layerIndex, layer);
1772 void IDeserializer::DeserializerImpl::ParseLogSoftmax(
GraphPtr graph,
unsigned int layerIndex)
1783 descriptor.
m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1784 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1787 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1792 RegisterInputSlots(graph, layerIndex, layer);
1793 RegisterOutputSlots(graph, layerIndex, layer);
1796 void IDeserializer::DeserializerImpl::ParseMinimum(
GraphPtr graph,
unsigned int layerIndex)
1799 auto inputs =
GetInputs(graph, layerIndex);
1803 auto outputs =
GetOutputs(graph, layerIndex);
1812 RegisterInputSlots(graph, layerIndex, layer);
1813 RegisterOutputSlots(graph, layerIndex, layer);
1816 void IDeserializer::DeserializerImpl::ParseMaximum(
GraphPtr graph,
unsigned int layerIndex)
1819 auto inputs =
GetInputs(graph, layerIndex);
1823 auto outputs =
GetOutputs(graph, layerIndex);
1832 RegisterInputSlots(graph, layerIndex, layer);
1833 RegisterOutputSlots(graph, layerIndex, layer);
1837 unsigned int layerIndex)
1839 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1844 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1846 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1851 void IDeserializer::DeserializerImpl::ParseChannelShuffle(
GraphPtr graph,
unsigned int layerIndex)
1862 descriptor.
m_Axis = graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->axis();
1863 descriptor.m_NumGroups =
1864 graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->numGroups();
1867 IConnectableLayer* layer = m_Network->AddChannelShuffleLayer(descriptor, layerName.c_str());
1872 RegisterInputSlots(graph, layerIndex, layer);
1873 RegisterOutputSlots(graph, layerIndex, layer);
1875 void IDeserializer::DeserializerImpl::ParseComparison(
GraphPtr graph,
unsigned int layerIndex)
1880 auto inputs =
GetInputs(graph, layerIndex);
1883 auto outputs =
GetOutputs(graph, layerIndex);
1886 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1887 auto fbDescriptor = fbLayer->descriptor();
1892 const std::string& layerName =
GetLayerName(graph, layerIndex);
1893 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1898 RegisterInputSlots(graph, layerIndex, layer);
1899 RegisterOutputSlots(graph, layerIndex, layer);
1902 void IDeserializer::DeserializerImpl::ParseElementwiseUnary(
GraphPtr graph,
unsigned int layerIndex)
1907 auto inputs =
GetInputs(graph, layerIndex);
1910 auto outputs =
GetOutputs(graph, layerIndex);
1913 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
1914 auto fbDescriptor = fbLayer->descriptor();
1919 const std::string& layerName =
GetLayerName(graph, layerIndex);
1920 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1925 RegisterInputSlots(graph, layerIndex, layer);
1926 RegisterOutputSlots(graph, layerIndex, layer);
1929 void IDeserializer::DeserializerImpl::ParseConcat(
GraphPtr graph,
unsigned int layerIndex)
1934 auto outputs =
GetOutputs(graph, layerIndex);
1939 unsigned int numViews = originsDescriptor->numViews();
1940 unsigned int numDimensions = originsDescriptor->numDimensions();
1943 auto inputs =
GetInputs(graph, layerIndex);
1947 auto originsPtr = originsDescriptor->viewOrigins();
1948 for (
unsigned int v = 0; v < numViews; ++v)
1950 auto originPtr = originsPtr->Get(v);
1951 for (
unsigned int d = 0; d < numDimensions; ++d)
1953 uint32_t value = originPtr->data()->Get(d);
1954 descriptor.SetViewOriginCoord(v, d, value);
1957 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
1959 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
1963 RegisterInputSlots(graph, layerIndex, layer);
1964 RegisterOutputSlots(graph, layerIndex, layer);
1967 void IDeserializer::DeserializerImpl::ParseMultiplication(
GraphPtr graph,
unsigned int layerIndex)
1970 auto inputs =
GetInputs(graph, layerIndex);
1974 auto outputs =
GetOutputs(graph, layerIndex);
1978 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
1983 RegisterInputSlots(graph, layerIndex, layer);
1984 RegisterOutputSlots(graph, layerIndex, layer);
1987 void IDeserializer::DeserializerImpl::ParseFloor(
GraphPtr graph,
unsigned int layerIndex)
1992 auto inputs =
GetInputs(graph, layerIndex);
1995 auto outputs =
GetOutputs(graph, layerIndex);
2002 layer = m_Network->AddFloorLayer(layerName.c_str());
2007 RegisterInputSlots(graph, layerIndex, layer);
2008 RegisterOutputSlots(graph, layerIndex, layer);
2011 void IDeserializer::DeserializerImpl::ParseFullyConnected(
GraphPtr graph,
unsigned int layerIndex)
2014 auto inputs =
GetInputs(graph, layerIndex);
2017 auto outputs =
GetOutputs(graph, layerIndex);
2020 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
2022 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2025 fullyConnectedDescriptor.
m_BiasEnabled = flatBufferDescriptor->biasEnabled();
2027 fullyConnectedDescriptor.
m_ConstantWeights = flatBufferDescriptor->constantWeights();
2030 std::vector<unsigned int> ignoreSlots {};
2034 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
2039 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2043 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
2044 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
2045 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.
GetInfo());
2046 ignoreSlots.emplace_back(1u);
2051 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
2052 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
2053 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.
GetInfo());
2054 ignoreSlots.emplace_back(2u);
2059 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2061 uint32_t numInputs = fullyConnectedDescriptor.
GetNumInputs();
2068 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
2069 RegisterOutputSlots(graph, layerIndex, layer);
2072 void IDeserializer::DeserializerImpl::ParsePad(
GraphPtr graph,
unsigned int layerIndex)
2082 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
2083 auto flatBufferPadList = flatBufferDescriptor->padList();
2084 auto paddingMode = flatBufferDescriptor->paddingMode();
2085 float padValue = flatBufferDescriptor->padValue();
2087 if (flatBufferPadList->Length() % 2 != 0)
2089 throw ParseException(fmt::format(
"The size of the pad list must be divisible by 2 {}",
2093 std::vector<std::pair<unsigned int, unsigned int>> padList;
2094 padList.reserve(flatBufferPadList->Length() / 2);
2095 for (
unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2097 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2103 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
2108 RegisterInputSlots(graph, layerIndex, layer);
2109 RegisterOutputSlots(graph, layerIndex, layer);
2112 void IDeserializer::DeserializerImpl::ParsePermute(
GraphPtr graph,
unsigned int layerIndex)
2117 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
2119 auto inputs =
GetInputs(graph, layerIndex);
2122 auto outputs =
GetOutputs(graph, layerIndex);
2129 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
2132 RegisterInputSlots(graph, layerIndex, layer);
2133 RegisterOutputSlots(graph, layerIndex, layer);
2137 unsigned int layerIndex)
2142 switch (pooling2dDesc->poolType())
2165 switch (pooling2dDesc->outputShapeRounding())
2183 switch (pooling2dDesc->paddingMethod())
2201 switch (pooling2dDesc->dataLayout())
2220 desc.
m_PadLeft = pooling2dDesc->padLeft();
2222 desc.
m_PadTop = pooling2dDesc->padTop();
2223 desc.
m_StrideX = pooling2dDesc->strideX();
2224 desc.
m_StrideY = pooling2dDesc->strideY();
2232 unsigned int layerIndex)
2237 switch (pooling3dDesc->poolType())
2260 switch (pooling3dDesc->outputShapeRounding())
2278 switch (pooling3dDesc->paddingMethod())
2296 switch (pooling3dDesc->dataLayout())
2315 desc.
m_PadLeft = pooling3dDesc->padLeft();
2317 desc.
m_PadTop = pooling3dDesc->padTop();
2319 desc.
m_PadBack = pooling3dDesc->padBack();
2320 desc.
m_StrideX = pooling3dDesc->strideX();
2321 desc.
m_StrideY = pooling3dDesc->strideY();
2322 desc.
m_StrideZ = pooling3dDesc->strideZ();
2330 void IDeserializer::DeserializerImpl::ParsePooling2d(
GraphPtr graph,
unsigned int layerIndex)
2334 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
2335 auto inputs =
GetInputs(graph, layerIndex);
2338 auto outputs =
GetOutputs(graph, layerIndex);
2344 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
2347 RegisterInputSlots(graph, layerIndex, layer);
2348 RegisterOutputSlots(graph, layerIndex, layer);
2351 void IDeserializer::DeserializerImpl::ParsePooling3d(
GraphPtr graph,
unsigned int layerIndex)
2355 auto pooling3dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->descriptor();
2356 auto inputs =
GetInputs(graph, layerIndex);
2359 auto outputs =
GetOutputs(graph, layerIndex);
2365 IConnectableLayer* layer = m_Network->AddPooling3dLayer(pooling3dDescriptor, layerName.c_str());
2368 RegisterInputSlots(graph, layerIndex, layer);
2369 RegisterOutputSlots(graph, layerIndex, layer);
2372 void IDeserializer::DeserializerImpl::ParseQuantize(
GraphPtr graph,
unsigned int layerIndex)
2376 auto inputs =
GetInputs(graph, layerIndex);
2379 auto outputs =
GetOutputs(graph, layerIndex);
2387 RegisterInputSlots(graph, layerIndex, layer);
2388 RegisterOutputSlots(graph, layerIndex, layer);
2392 const std::vector<uint32_t>& targetDimsIn)
2394 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2395 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2397 if (stretchDim != targetDimsIn.end())
2399 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2401 throw ParseException(fmt::format(
"At most one component of shape can be -1 {}",
2405 auto targetNumElements =
2407 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2409 auto stretchIndex =
static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2410 outputDims[stretchIndex] = inputTensorInfo.
GetNumElements() / targetNumElements;
2421 void IDeserializer::DeserializerImpl::ParseRank(
GraphPtr graph,
unsigned int layerIndex)
2437 RegisterInputSlots(graph, layerIndex, layer);
2438 RegisterOutputSlots(graph, layerIndex, layer);
2441 void IDeserializer::DeserializerImpl::ParseReduce(
GraphPtr graph,
unsigned int layerIndex)
2446 auto inputs =
GetInputs(graph, layerIndex);
2449 auto outputs =
GetOutputs(graph, layerIndex);
2452 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ReduceLayer();
2453 auto fbDescriptor = fbLayer->descriptor();
2454 auto flatBufferAxis = fbDescriptor->axis();
2457 descriptor.
m_KeepDims = fbDescriptor->keepDims();
2458 descriptor.
m_vAxis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2461 const std::string& layerName =
GetLayerName(graph, layerIndex);
2462 IConnectableLayer* layer = m_Network->AddReduceLayer(descriptor, layerName.c_str());
2467 RegisterInputSlots(graph, layerIndex, layer);
2468 RegisterOutputSlots(graph, layerIndex, layer);
2471 void IDeserializer::DeserializerImpl::ParseReshape(
GraphPtr graph,
unsigned int layerIndex)
2474 auto inputs =
GetInputs(graph, layerIndex);
2476 auto outputs =
GetOutputs(graph, layerIndex);
2482 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
2483 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2486 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2488 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2489 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2491 if (inputs.size() > 1 && !
CheckShape(reshapeOutputTensorShape, expectedDims))
2493 std::stringstream ss;
2494 ss <<
"New shape defined in reshape parameters " 2495 << reshapeOutputTensorShape
2496 <<
" does not equal output shape " 2497 << actualOutputTensorInfo.
GetShape()
2507 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2510 RegisterInputSlots(graph, layerIndex, layer);
2511 RegisterOutputSlots(graph, layerIndex, layer);
2514 void IDeserializer::DeserializerImpl::ParseResize(
GraphPtr graph,
unsigned int layerIndex)
2524 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2527 descriptor.
m_TargetWidth = flatBufferDescriptor->targetWidth();
2528 descriptor.
m_TargetHeight = flatBufferDescriptor->targetHeight();
2531 descriptor.
m_AlignCorners = flatBufferDescriptor->alignCorners();
2535 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2540 RegisterInputSlots(graph, layerIndex, layer);
2541 RegisterOutputSlots(graph, layerIndex, layer);
2547 void IDeserializer::DeserializerImpl::ParseResizeBilinear(
GraphPtr graph,
unsigned int layerIndex)
2557 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2560 descriptor.
m_TargetWidth = flatBufferDescriptor->targetWidth();
2561 descriptor.
m_TargetHeight = flatBufferDescriptor->targetHeight();
2564 descriptor.
m_AlignCorners = flatBufferDescriptor->alignCorners();
2568 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2573 RegisterInputSlots(graph, layerIndex, layer);
2574 RegisterOutputSlots(graph, layerIndex, layer);
2577 void IDeserializer::DeserializerImpl::ParseShape(
GraphPtr graph,
unsigned int layerIndex)
2593 RegisterInputSlots(graph, layerIndex, layer);
2594 RegisterOutputSlots(graph, layerIndex, layer);
2597 void IDeserializer::DeserializerImpl::ParseSoftmax(
GraphPtr graph,
unsigned int layerIndex)
2608 descriptor.
m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
2609 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->axis();
2612 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2617 RegisterInputSlots(graph, layerIndex, layer);
2618 RegisterOutputSlots(graph, layerIndex, layer);
2621 void IDeserializer::DeserializerImpl::ParseSpaceToBatchNd(
GraphPtr graph,
unsigned int layerIndex)
2631 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2632 auto flatBufferPadList = flatBufferDescriptor->padList();
2633 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2635 if (flatBufferPadList->Length() % 2 != 0)
2637 throw ParseException(fmt::format(
"The size of the pad list must be divisible by 2 {}",
2641 std::vector<std::pair<unsigned int, unsigned int>> padList;
2642 padList.reserve(flatBufferPadList->Length() / 2);
2643 for (
unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2645 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2651 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2655 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2660 RegisterInputSlots(graph, layerIndex, layer);
2661 RegisterOutputSlots(graph, layerIndex, layer);
2664 void IDeserializer::DeserializerImpl::ParseSpaceToDepth(
GraphPtr graph,
unsigned int layerIndex)
2674 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2677 descriptor.
m_BlockSize = flatBufferDescriptor->blockSize();
2681 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2686 RegisterInputSlots(graph, layerIndex, layer);
2687 RegisterOutputSlots(graph, layerIndex, layer);
2692 unsigned int layerIndex)
2697 switch (normalizationDescriptor->normChannelType())
2715 switch (normalizationDescriptor->normMethodType())
2733 switch (normalizationDescriptor->dataLayout())
2751 desc.
m_Alpha = normalizationDescriptor->alpha();
2752 desc.
m_Beta = normalizationDescriptor->beta();
2753 desc.
m_K = normalizationDescriptor->k();
2754 desc.
m_NormSize = normalizationDescriptor->normSize();
2759 void IDeserializer::DeserializerImpl::ParseNormalization(
GraphPtr graph,
unsigned int layerIndex)
2763 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2776 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2779 RegisterInputSlots(graph, layerIndex, layer);
2780 RegisterOutputSlots(graph, layerIndex, layer);
2783 void IDeserializer::DeserializerImpl::ParseRsqrt(
GraphPtr graph,
unsigned int layerIndex)
2786 auto inputs =
GetInputs(graph, layerIndex);
2790 auto outputs =
GetOutputs(graph, layerIndex);
2796 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
2800 RegisterInputSlots(graph, layerIndex, layer);
2801 RegisterOutputSlots(graph, layerIndex, layer);
2804 void IDeserializer::DeserializerImpl::ParseSlice(
GraphPtr graph,
unsigned int layerIndex)
2808 auto inputs =
GetInputs(graph, layerIndex);
2811 auto outputs =
GetOutputs(graph, layerIndex);
2814 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2816 auto fbBegin = fbDescriptor->begin();
2817 auto fbSize = fbDescriptor->size();
2819 if (fbBegin->Length() != fbSize->Length())
2821 throw ParseException(fmt::format(
"Begin and size descriptors must have the same length {}",
2826 descriptor.
m_Begin.insert(descriptor.
m_Begin.end(), fbBegin->begin(), fbBegin->end());
2827 descriptor.
m_Size.insert(descriptor.
m_Size.end(), fbSize->begin(), fbSize->end());
2830 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2835 RegisterInputSlots(graph, layerIndex, layer);
2836 RegisterOutputSlots(graph, layerIndex, layer);
2839 void IDeserializer::DeserializerImpl::ParseStridedSlice(
GraphPtr graph,
unsigned int layerIndex)
2849 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2851 auto flatBufferBegin = flatBufferDescriptor->begin();
2852 auto flatBufferEnd = flatBufferDescriptor->end();
2853 auto flatBufferStride = flatBufferDescriptor->stride();
2855 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2856 flatBufferBegin->Length() == flatBufferStride->Length()))
2858 throw ParseException(fmt::format(
"The size of the begin, end, and stride must be equal {}",
2862 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2863 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2864 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2867 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2868 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2869 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2870 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2871 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2872 descriptor.m_DataLayout =
ToDataLayout(flatBufferDescriptor->dataLayout());
2875 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2880 RegisterInputSlots(graph, layerIndex, layer);
2881 RegisterOutputSlots(graph, layerIndex, layer);
2884 void IDeserializer::DeserializerImpl::ParseSubtraction(
GraphPtr graph,
unsigned int layerIndex)
2887 auto inputs =
GetInputs(graph, layerIndex);
2891 auto outputs =
GetOutputs(graph, layerIndex);
2900 RegisterInputSlots(graph, layerIndex, layer);
2901 RegisterOutputSlots(graph, layerIndex, layer);
2904 void IDeserializer::DeserializerImpl::ParseGather(
GraphPtr graph,
unsigned int layerIndex)
2915 descriptor.
m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
2918 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
2923 RegisterInputSlots(graph, layerIndex, layer);
2924 RegisterOutputSlots(graph, layerIndex, layer);
2927 void IDeserializer::DeserializerImpl::ParseMean(
GraphPtr graph,
unsigned int layerIndex)
2937 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2938 auto flatBufferAxis = flatBufferDescriptor->axis();
2939 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2942 descriptor.
m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2946 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2951 RegisterInputSlots(graph, layerIndex, layer);
2952 RegisterOutputSlots(graph, layerIndex, layer);
2955 void IDeserializer::DeserializerImpl::ParseSplitter(
GraphPtr graph,
unsigned int layerIndex)
2964 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2965 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2966 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2967 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2968 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2969 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2976 for(
unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2978 for (
unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2980 viewsDescriptor.
SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2981 viewsDescriptor.
SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2986 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2989 for(
unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2995 RegisterInputSlots(graph, layerIndex, layer);
2996 RegisterOutputSlots(graph, layerIndex, layer);
3014 void IDeserializer::DeserializerImpl::ParseLstm(
GraphPtr graph,
unsigned int layerIndex)
3018 auto inputs =
GetInputs(graph, layerIndex);
3021 auto outputs =
GetOutputs(graph, layerIndex);
3024 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
3026 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3027 auto flatBufferInputParams = flatBufferLayer->inputParams();
3057 if (!lstmDescriptor.m_CifgEnabled)
3059 inputToInputWeights =
ToConstTensor(flatBufferInputParams->inputToInputWeights());
3060 recurrentToInputWeights =
ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3061 cellToInputWeights =
ToConstTensor(flatBufferInputParams->cellToInputWeights());
3062 inputGateBias =
ToConstTensor(flatBufferInputParams->inputGateBias());
3072 if (lstmDescriptor.m_ProjectionEnabled)
3074 projectionWeights =
ToConstTensor(flatBufferInputParams->projectionWeights());
3075 projectionBias =
ToConstTensor(flatBufferInputParams->projectionBias());
3083 if (lstmDescriptor.m_PeepholeEnabled)
3085 cellToForgetWeights =
ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3086 cellToOutputWeights =
ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3096 if (lstmDescriptor.m_LayerNormEnabled)
3098 if (!lstmDescriptor.m_CifgEnabled)
3100 inputLayerNormWeights =
ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3103 forgetLayerNormWeights =
ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3104 cellLayerNormWeights =
ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3105 outputLayerNormWeights =
ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3112 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
3126 RegisterInputSlots(graph, layerIndex, layer);
3127 RegisterOutputSlots(graph, layerIndex, layer);
3139 desc.
m_CellClip = qLstmDescriptor->cellClip();
3153 void IDeserializer::DeserializerImpl::ParseQLstm(
GraphPtr graph,
unsigned int layerIndex)
3157 auto inputs =
GetInputs(graph, layerIndex);
3160 auto outputs =
GetOutputs(graph, layerIndex);
3163 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
3165 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3166 auto flatBufferInputParams = flatBufferLayer->inputParams();
3197 if (!qLstmDescriptor.m_CifgEnabled)
3199 inputToInputWeights =
ToConstTensor(flatBufferInputParams->inputToInputWeights());
3200 recurrentToInputWeights =
ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3201 inputGateBias =
ToConstTensor(flatBufferInputParams->inputGateBias());
3212 if (qLstmDescriptor.m_ProjectionEnabled)
3214 projectionWeights =
ToConstTensor(flatBufferInputParams->projectionWeights());
3215 projectionBias =
ToConstTensor(flatBufferInputParams->projectionBias());
3226 if (qLstmDescriptor.m_PeepholeEnabled)
3228 if (!qLstmDescriptor.m_CifgEnabled)
3230 cellToInputWeights =
ToConstTensor(flatBufferInputParams->cellToInputWeights());
3234 cellToForgetWeights =
ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3235 cellToOutputWeights =
ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3247 if (qLstmDescriptor.m_LayerNormEnabled)
3249 if (!qLstmDescriptor.m_CifgEnabled)
3251 inputLayerNormWeights =
ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3255 forgetLayerNormWeights =
ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3256 cellLayerNormWeights =
ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3257 outputLayerNormWeights =
ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3264 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
3275 RegisterInputSlots(graph, layerIndex, layer);
3276 RegisterOutputSlots(graph, layerIndex, layer);
3279 void IDeserializer::DeserializerImpl::ParseQuantizedLstm(
GraphPtr graph,
unsigned int layerIndex)
3283 auto inputs =
GetInputs(graph, layerIndex);
3286 auto outputs =
GetOutputs(graph, layerIndex);
3289 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
3291 auto flatBufferInputParams = flatBufferLayer->inputParams();
3321 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
3329 RegisterInputSlots(graph, layerIndex, layer);
3330 RegisterOutputSlots(graph, layerIndex, layer);
3333 void IDeserializer::DeserializerImpl::ParseDequantize(
GraphPtr graph,
unsigned int layerIndex)
3343 const std::string layerName =
GetLayerName(graph, layerIndex);
3349 RegisterInputSlots(graph, layerIndex, layer);
3350 RegisterOutputSlots(graph, layerIndex, layer);
3353 void IDeserializer::DeserializerImpl::ParseMerge(
GraphPtr graph,
unsigned int layerIndex)
3363 const std::string layerName =
GetLayerName(graph, layerIndex);
3369 RegisterInputSlots(graph, layerIndex, layer);
3370 RegisterOutputSlots(graph, layerIndex, layer);
3373 void IDeserializer::DeserializerImpl::ParseSwitch(
GraphPtr graph,
unsigned int layerIndex)
3376 auto inputs =
GetInputs(graph, layerIndex);
3380 auto outputs =
GetOutputs(graph, layerIndex);
3392 RegisterInputSlots(graph, layerIndex, layer);
3393 RegisterOutputSlots(graph, layerIndex, layer);
3396 void IDeserializer::DeserializerImpl::ParsePrelu(
GraphPtr graph,
unsigned int layerIndex)
3399 auto inputs =
GetInputs(graph, layerIndex);
3403 auto outputs =
GetOutputs(graph, layerIndex);
3412 RegisterInputSlots(graph, layerIndex, layer);
3413 RegisterOutputSlots(graph, layerIndex, layer);
3416 void IDeserializer::DeserializerImpl::ParseTranspose(
GraphPtr graph,
unsigned int layerIndex)
3420 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
3422 auto inputs =
GetInputs(graph, layerIndex);
3425 auto outputs =
GetOutputs(graph, layerIndex);
3432 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
3435 RegisterInputSlots(graph, layerIndex, layer);
3436 RegisterOutputSlots(graph, layerIndex, layer);
3439 void IDeserializer::DeserializerImpl::ParseTransposeConvolution2d(
GraphPtr graph,
unsigned int layerIndex)
3443 auto inputs =
GetInputs(graph, layerIndex);
3446 auto outputs =
GetOutputs(graph, layerIndex);
3449 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3451 auto serializerDescriptor = serializerLayer->descriptor();
3454 descriptor.
m_PadLeft = serializerDescriptor->padLeft();
3455 descriptor.
m_PadRight = serializerDescriptor->padRight();
3456 descriptor.
m_PadTop = serializerDescriptor->padTop();
3457 descriptor.
m_PadBottom = serializerDescriptor->padBottom();
3458 descriptor.
m_StrideX = serializerDescriptor->strideX();
3459 descriptor.
m_StrideY = serializerDescriptor->strideY();;
3460 descriptor.
m_BiasEnabled = serializerDescriptor->biasEnabled();;
3469 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3472 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3478 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3480 RegisterInputSlots(graph, layerIndex, layer);
3481 RegisterOutputSlots(graph, layerIndex, layer);
3484 void IDeserializer::DeserializerImpl::ParseStack(
GraphPtr graph,
unsigned int layerIndex)
3487 auto inputs =
GetInputs(graph, layerIndex);
3489 auto outputs =
GetOutputs(graph, layerIndex);
3492 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3493 unsigned int axis = flatBufferDescriptor->axis();
3494 unsigned int numInputs = flatBufferDescriptor->numInputs();
3497 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3498 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3499 flatBufferInputShape->begin() + flatBufferInputShape->size());
3501 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3504 for (
unsigned int i=0; i<inputs.size(); ++i)
3507 if (descriptor.m_InputShape != inputShape)
3509 std::stringstream ss;
3510 ss <<
"Shape of input " 3514 <<
" does not equal defined input shape " 3515 << descriptor.m_InputShape
3523 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3528 RegisterInputSlots(graph, layerIndex, layer);
3529 RegisterOutputSlots(graph, layerIndex, layer);
3532 void IDeserializer::DeserializerImpl::ParseStandIn(
GraphPtr graph,
unsigned int layerIndex)
3536 auto inputs =
GetInputs(graph, layerIndex);
3537 auto outputs =
GetOutputs(graph, layerIndex);
3539 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3540 auto fbDescriptor = fbLayer->descriptor();
3543 descriptor.
m_NumInputs = fbDescriptor->numInputs();
3549 const std::string layerName =
GetLayerName(graph, layerIndex);
3552 for (
unsigned int i = 0u; i < descriptor.
m_NumOutputs; ++i)
3558 RegisterInputSlots(graph, layerIndex, layer);
3559 RegisterOutputSlots(graph, layerIndex, layer);
3579 void IDeserializer::DeserializerImpl::ParseUnidirectionalSequenceLstm(
GraphPtr graph,
unsigned int layerIndex)
3583 auto inputs =
GetInputs(graph, layerIndex);
3586 auto outputs =
GetOutputs(graph, layerIndex);
3589 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer();
3591 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3592 auto flatBufferInputParams = flatBufferLayer->inputParams();
3622 if (!descriptor.m_CifgEnabled)
3624 inputToInputWeights =
ToConstTensor(flatBufferInputParams->inputToInputWeights());
3625 recurrentToInputWeights =
ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3626 inputGateBias =
ToConstTensor(flatBufferInputParams->inputGateBias());
3632 if (descriptor.m_PeepholeEnabled)
3634 cellToInputWeights =
ToConstTensor(flatBufferInputParams->cellToInputWeights());
3641 if (descriptor.m_ProjectionEnabled)
3643 projectionWeights =
ToConstTensor(flatBufferInputParams->projectionWeights());
3644 projectionBias =
ToConstTensor(flatBufferInputParams->projectionBias());
3652 if (descriptor.m_PeepholeEnabled)
3654 cellToForgetWeights =
ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3655 cellToOutputWeights =
ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3665 if (descriptor.m_LayerNormEnabled)
3667 if (!descriptor.m_CifgEnabled)
3669 inputLayerNormWeights =
ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3672 forgetLayerNormWeights =
ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3673 cellLayerNormWeights =
ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3674 outputLayerNormWeights =
ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3681 IConnectableLayer* layer = m_Network->AddUnidirectionalSequenceLstmLayer(descriptor,
3688 RegisterInputSlots(graph, layerIndex, layer);
3689 RegisterOutputSlots(graph, layerIndex, layer);
static armnn::NormalizationDescriptor GetNormalizationDescriptor(NormalizationDescriptorPtr normalizationDescriptor, unsigned int layerIndex)
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
PoolingAlgorithm m_PoolType
The pooling algorithm to use (Max. Average, L2).
float m_Eps
Used to avoid dividing by zero.
virtual unsigned int GetNumOutputSlots() const =0
Returns the number of connectable output slots.
armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
bool m_ProjectionEnabled
Enable/disable the projection layer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
UnaryOperation m_Operation
Specifies the elementwiseUnary operation to execute.
static TensorRawPtrVector GetOutputs(const GraphPtr &graph, unsigned int layerIndex)
A ViewsDescriptor for the SplitterLayer.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
float m_ScaleW
Center size encoding scale weight.
#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX)
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
armnn::ReduceOperation ToReduceOperation(armnnSerializer::ReduceOperation operation)
virtual unsigned int GetNumInputSlots() const =0
Returns the number of connectable input slots.
float m_K
Kappa value used for the across channel normalization equation.
A TransposeConvolution2dDescriptor for the TransposeConvolution2dLayer.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
const TensorShape & GetShape() const
uint32_t m_PoolWidth
Pooling width value.
uint32_t m_PadBottom
Padding bottom value in the height dimension.
uint32_t m_PadLeft
Padding left value in the width dimension.
float m_ClippingThresProj
Clipping threshold value for the projection.
uint32_t m_PoolDepth
Pooling depth value.
std::string AsString() const
static LayerBaseRawPtr GetBaseLayer(const GraphPtr &graphPtr, unsigned int layerIndex)
A ReshapeDescriptor for the ReshapeLayer.
const armnnSerializer::ConstTensor * ConstTensorRawPtr
uint32_t m_PadBack
Padding back value in the depth dimension.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
const armnnSerializer::NormalizationDescriptor * NormalizationDescriptorPtr
A ComparisonDescriptor for the ComparisonLayer.
static GraphPtr LoadGraphFromBinary(const uint8_t *binaryContent, size_t len)
float m_ScaleX
Center size encoding scale x.
bool m_TransposeWeightMatrix
Enable/disable transpose weight matrix.
uint32_t m_PoolWidth
Pooling width value.
bool m_PeepholeEnabled
Enable/disable peephole.
#define CHECK_TENSOR_PTR(TENSOR_PTR)
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
A Convolution2dDescriptor for the Convolution2dLayer.
float m_Alpha
Alpha value for the normalization equation.
uint32_t m_PadLeft
Padding left value in the width dimension.
const armnnSerializer::QLstmDescriptor * QLstmDescriptorPtr
static armnn::UnidirectionalSequenceLstmDescriptor GetUnidirectionalSequenceLstmDescriptor(UnidirectionalSequenceLstmDescriptorPtr descriptor)
bool m_KeepDims
if true then output shape has no change.
float m_HiddenStateScale
Hidden State quantization scale.
const char * EnumNameConstTensorData(ConstTensorData e)
bool m_BiasEnabled
Enable/disable bias.
unsigned int GetNumBytes() const
float m_OutputIntermediateScale
Output intermediate quantization scale.
ResizeMethod m_Method
The Interpolation method to use (Bilinear, NearestNeighbor).
float m_Gamma
Gamma, the scale scalar value applied for the normalized tensor. Defaults to 1.0. ...
float m_Beta
Exponentiation value.
BindingPointInfo GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const
Retrieve binding info (layer id and tensor info) for the network input identified by the given layer ...
std::vector< unsigned int > m_Size
Size of the slice in each dimension.
armnn::INetworkPtr CreateNetworkFromBinary(const std::vector< uint8_t > &binaryContent)
Create an input network from binary file contents.
The padding fields don't count and are ignored.
float m_Eps
Value to add to the variance. Used to avoid dividing by zero.
PaddingMethod m_PaddingMethod
The padding method to be used. (Exclude, IgnoreValue).
ArgMinMaxFunction m_Function
Specify if the function is to find Min or Max.
uint32_t m_DetectionsPerClass
Detections per classes, used in Regular NMS.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
void CheckLayers(Graph &graph)
uint32_t m_PadRight
Padding right value in the width dimension.
const armnnSerializer::SerializedGraph * GetSerializedGraph(const void *buf)
uint32_t m_PadTop
Padding top value in the height dimension.
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
A LogicalBinaryDescriptor for the LogicalBinaryLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
ReduceOperation m_ReduceOperation
Specifies the reduction operation to execute.
bool m_TimeMajor
Enable/disable time major.
Copyright (c) 2021 ARM Limited and Contributors.
DataLayout m_DataLayout
The data layout to be used (NCDHW, NDHWC).
void IgnoreUnused(Ts &&...)
uint32_t m_PadBottom
Padding bottom value in the height dimension.
uint32_t m_PadFront
Padding front value in the depth dimension.
#define CHECK_GRAPH(GRAPH, LAYERS_INDEX)
uint32_t m_DilationY
Dilation along y axis.
A SpaceToDepthDescriptor for the SpaceToDepthLayer.
const armnnSerializer::SerializedGraph * GraphPtr
std::vector< std::pair< unsigned int, unsigned int > > m_PadList
Specifies the padding values for the input dimension: heightPad{top, bottom} widthPad{left, right}.
uint32_t m_PoolHeight
Pooling height value.
uint32_t m_DilationX
Dilation along x axis.
uint32_t m_DilationY
Dilation factor value for height dimension.
const armnnSerializer::Pooling2dDescriptor * Pooling2dDescriptor
LogicalBinaryOperation m_Operation
Specifies the logical operation to execute.
A BatchToSpaceNdDescriptor for the BatchToSpaceNdLayer.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
static int32_t GetBindingLayerInfo(const GraphPtr &graphPtr, unsigned int layerIndex)
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
uint32_t m_NumOutputs
Number of output tensors.
NormalizationAlgorithmMethod m_NormMethodType
Normalization method algorithm to use (LocalBrightness, LocalContrast).
void SetShape(const TensorShape &newShape)
A ResizeBilinearDescriptor for the ResizeBilinearLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_MaxClassesPerDetection
Maximum numbers of classes per detection, used in Fast NMS.
const armnnSerializer::LayerBase * LayerBaseRawPtr
std::vector< unsigned int > m_Axis
Values for the dimensions to reduce.
A StackDescriptor for the StackLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
TensorShape m_TargetShape
Target shape value.
uint32_t m_PoolHeight
Pooling height value.
uint32_t m_PadTop
Padding top value in the height dimension.
uint32_t m_MaxDetections
Maximum numbers of detections.
A PadDescriptor for the PadLayer.
std::vector< TensorRawPtr > TensorRawPtrVector
void Permute(const armnn::TensorShape &dstShape, const armnn::PermutationVector &mappings, const void *src, void *dst, size_t dataTypeSize)
#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE)
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
const armnnSerializer::UnidirectionalSequenceLstmDescriptor * UnidirectionalSequenceLstmDescriptorPtr
uint32_t m_PadBack
Padding back value in the depth dimension.
armnn::INetworkPtr CreateNetworkFromBinary(const std::vector< uint8_t > &binaryContent)
Create an input network from binary file contents.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
bool m_LayerNormEnabled
Enable/disable layer normalization.
const armnnSerializer::LstmDescriptor * LstmDescriptorPtr
armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
bool CheckShape(const armnn::TensorShape &actual, const std::vector< uint32_t > &expected)
float m_NmsIouThreshold
Intersection over union threshold.
static armnn::LstmDescriptor GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
An LstmDescriptor for the LstmLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
uint32_t m_DilationX
Dilation factor value for width dimension.
uint32_t m_PadTop
Padding top value in the height dimension.
std::string FileLine() const
Status SetViewSize(uint32_t view, uint32_t coord, uint32_t value)
Set the size of the views.
#define ARMNN_ASSERT_MSG(COND, MSG)
std::vector< unsigned int > m_Begin
Beginning indices of the slice in each dimension.
bool m_KeepDims
Enable/disable keep dimensions. If true, then the reduced dimensions that are of length 1 are kept...
std::vector< unsigned int > m_BlockShape
Block shape values.
float m_Eps
Epsilon, small scalar value added to variance to avoid dividing by zero. Defaults to 1e-12f...
An output connection slot for a layer.
A L2NormalizationDescriptor for the L2NormalizationLayer.
static TensorRawPtrVector GetInputs(const GraphPtr &graph, unsigned int layerIndex)
An ArgMinMaxDescriptor for ArgMinMaxLayer.
An OriginsDescriptor for the ConcatLayer.
A ReduceDescriptor for the REDUCE operators.
float m_ProjectionClip
Clipping threshold value for the projection.
A FullyConnectedDescriptor for the FullyConnectedLayer.
BindingPointInfo GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const
Retrieve binding info (layer id and tensor info) for the network output identified by the given layer...
bool m_BiasEnabled
Enable/disable bias.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
float m_InputIntermediateScale
Input intermediate quantization scale.
OutputShapeRounding m_OutputShapeRounding
The rounding method for the output shape. (Floor, Ceiling).
uint32_t m_TargetWidth
Target width value.
A GatherDescriptor for the GatherLayer.
uint32_t m_PadBottom
Padding bottom value in the height dimension.
#define CHECK_VALID_SIZE(ACTUAL,...)
bool m_PeepholeEnabled
Enable/disable peephole.
uint32_t m_NumClasses
Number of classes.
#define CHECKED_NON_NEGATIVE(VALUE)
bool m_HalfPixelCenters
Half Pixel Centers.
std::unique_ptr< IDeserializer, void(*)(IDeserializer *parser)> IDeserializerPtr
armnn::ConstTensor ToConstTensor(ConstTensorRawPtr constTensorPtr)
armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
uint32_t m_PadTop
Padding top value in the height dimension.
armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
#define ARMNN_ASSERT(COND)
A StandInDescriptor for the StandIn layer.
A QLstmDescriptor for the QLstmLayer.
#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR)
bool m_UseRegularNms
Use Regular NMS.
uint32_t m_PadFront
Padding front value in the depth dimension.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
std::vector< unsigned int > m_BlockShape
Block shape value.
PaddingMode
The padding mode controls whether the padding should be filled with constant values (Constant)...
An ActivationDescriptor for the ActivationLayer.
const TensorInfo & GetInfo() const
min(a, max(b, input)) ReLu1 & ReLu6.
uint32_t m_PadLeft
Padding left value in the width dimension.
uint32_t m_TargetHeight
Target height value.
uint32_t m_ActivationFunc
The activation function to use.
A SliceDescriptor for the SliceLayer.
static armnn::Pooling3dDescriptor GetPooling3dDescriptor(Pooling3dDescriptor pooling3dDescriptor, unsigned int layerIndex)
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
A Convolution3dDescriptor for the Convolution3dLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
float m_ClippingThresCell
Clipping threshold value for the cell state.
unsigned int m_BlockSize
Scalar specifying the input block size. It must be >= 1.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
float m_ForgetIntermediateScale
Forget intermediate quantization scale.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
float m_Beta
Beta, the offset scalar value applied for the normalized tensor. Defaults to 1.0. ...
armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
armnn::PaddingMode ToPaddingMode(armnnSerializer::PaddingMode paddingMode)
A Pooling3dDescriptor for the Pooling3dLayer.
uint32_t m_StrideZ
Stride value when proceeding through input for the depth dimension.
std::vector< uint32_t > m_vAxis
The indices of the dimensions to reduce.
float m_ScaleH
Center size encoding scale height.
static armnn::Pooling2dDescriptor GetPooling2dDescriptor(Pooling2dDescriptor pooling2dDescriptor, unsigned int layerIndex)
ComparisonOperation m_Operation
Specifies the comparison operation to execute.
A SpaceToBatchNdDescriptor for the SpaceToBatchNdLayer.
DataLayout m_DataLayout
The data layout to be used (NDHWC, NCDHW).
NormalizationAlgorithmChannel m_NormChannelType
Normalization channel algorithm to use (Across, Within).
float m_CellClip
Clipping threshold value for the cell state.
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
uint32_t m_DilationX
Dilation along x axis.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
const armnnSerializer::TensorInfo * TensorRawPtr
bool m_CifgEnabled
Enable/disable cifg (coupled input & forget gate).
armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
uint32_t GetNumInputs() const
Get the number of views/inputs.
uint32_t m_PadLeft
Padding left value in the width dimension.
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
bool m_AlignCorners
Aligned corners.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
int32_t m_Axis
The axis in params to gather indices from.
A ElementwiseUnaryDescriptor for the ElementwiseUnaryLayer.
PoolingAlgorithm m_PoolType
The pooling algorithm to use (Max. Average, L2).
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
uint32_t m_PadLeft
Padding left value in the width dimension.
The padding fields count, but are ignored.
std::vector< std::pair< unsigned int, unsigned int > > m_Crops
The values to crop from the input dimension.
Base class for all ArmNN exceptions so that users can filter to just those.
static std::string GetLayerName(const GraphPtr &graph, unsigned int index)
uint32_t m_PadTop
Padding top value in the height dimension.
unsigned int GetNumDimensions() const
Function that returns the tensor rank.
uint32_t m_PadTop
Padding top value in the height dimension.
bool m_ProjectionEnabled
Enable/disable the projection layer.
Jarret 2009: Local Contrast Normalization.
OutputShapeRounding m_OutputShapeRounding
The rounding method for the output shape. (Floor, Ceiling).
uint32_t m_NumInputs
Number of input tensors.
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
A MeanDescriptor for the MeanLayer.
void SetConstant(const bool IsConstant=true)
Marks the data corresponding to this tensor info as constant.
static armnn::QLstmDescriptor GetQLstmDescriptor(QLstmDescriptorPtr qLstmDescriptorPtr)
static armnn::TensorInfo OutputShapeOfReshape(const armnn::TensorInfo &inputTensorInfo, const std::vector< uint32_t > &targetDimsIn)
bool m_LayerNormEnabled
Enable/disable layer normalization.
std::enable_if_t< std::is_unsigned< Source >::value &&std::is_unsigned< Dest >::value, Dest > numeric_cast(Source source)
armnn::TensorInfo ToTensorInfo(TensorRawPtr tensorPtr)
uint32_t m_PadRight
Padding right value in the width dimension.
A TransposeDescriptor for the TransposeLayer.
A StridedSliceDescriptor for the StridedSliceLayer.
uint32_t m_Axis
Axis to apply channel shuffle operation on.
uint32_t GetNumInputs() const
Get the number of views/inputs.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
int m_Axis
Axis to reduce across the input tensor.
float m_ScaleY
Center size encoding scale y.
float m_NmsScoreThreshold
NMS score threshold.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
virtual int Connect(IInputSlot &destination)=0
Krichevsky 2012: Local Brightness Normalization.
const char * EnumNameDataType(DataType e)
A Pooling2dDescriptor for the Pooling2dLayer.
A NormalizationDescriptor for the NormalizationLayer.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
An InstanceNormalizationDescriptor for InstanceNormalizationLayer.
PaddingMethod m_PaddingMethod
The padding method to be used. (Exclude, IgnoreValue).
A ChannelShuffleDescriptor for the ChannelShuffle operator.
float m_CellIntermediateScale
Cell intermediate quantization scale.
uint32_t m_DilationZ
Dilation along z axis.
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
armnn::TensorShape Permuted(const armnn::TensorShape &srcShape, const armnn::PermutationVector &mappings)
A SoftmaxDescriptor for the SoftmaxLayer.
float m_Beta
Beta value for the normalization equation.
uint32_t m_StrideZ
Stride value when proceeding through input for the depth dimension.
const armnnSerializer::OriginsDescriptor * GetOriginsDescriptor(const armnnSerializer::SerializedGraph *graph, unsigned int layerIndex)
bool m_CifgEnabled
Enable/disable CIFG (coupled input & forget gate).
uint32_t m_NormSize
Depth radius value.
Status SetViewOriginCoord(uint32_t view, uint32_t coord, uint32_t value)
Set the view origin coordinates.
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
A DepthwiseConvolution2dDescriptor for the DepthwiseConvolution2dLayer.
constexpr unsigned int MaxNumOfTensorDimensions
uint32_t m_DilationY
Dilation along y axis.
A FillDescriptor for the FillLayer.
A BatchNormalizationDescriptor for the BatchNormalizationLayer.
uint32_t m_PadLeft
Padding left value in the width dimension.
unsigned int GetNumElements() const
constexpr unsigned int GetDataTypeSize(DataType dataType)
A PermuteDescriptor for the PermuteLayer.
const armnnSerializer::Pooling3dDescriptor * Pooling3dDescriptor
uint32_t m_PadRight
Padding right value in the width dimension.
int32_t m_HiddenStateZeroPoint
Hidden State zero point.
bool m_ConstantWeights
Enable/disable constant weights and biases.