ArmNN
 24.08
ArmnnConverter.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017, 2023-2024 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #include <armnn/Logging.hpp>
6 
7 #if defined(ARMNN_ONNX_PARSER)
9 #endif
10 #if defined(ARMNN_SERIALIZER)
12 #endif
13 #if defined(ARMNN_TF_LITE_PARSER)
15 #endif
16 
17 #include <HeapProfiling.hpp>
20 
21 /*
22  * Historically we use the ',' character to separate dimensions in a tensor shape. However, cxxopts will read this
23  * an an array of values which is fine until we have multiple tensors specified. This lumps the values of all shapes
24  * together in a single array and we cannot break it up again. We'll change the vector delimiter to a '.'. We do this
25  * as close as possible to the usage of cxxopts to avoid polluting other possible uses.
26  */
27 #define CXXOPTS_VECTOR_DELIMITER '.'
28 #include <cxxopts/cxxopts.hpp>
29 
30 #include <fmt/format.h>
31 
32 #include <cstdlib>
33 #include <fstream>
34 #include <iostream>
35 
36 namespace
37 {
38 
39 armnn::TensorShape ParseTensorShape(std::istream& stream)
40 {
41  std::vector<unsigned int> result;
42  std::string line;
43 
44  while (std::getline(stream, line))
45  {
46  std::vector<std::string> tokens = armnn::stringUtils::StringTokenizer(line, ",");
47  for (const std::string& token : tokens)
48  {
49  if (!token.empty())
50  {
51  try
52  {
53  result.push_back(armnn::numeric_cast<unsigned int>(std::stoi((token))));
54  }
55  catch (const std::exception&)
56  {
57  ARMNN_LOG(error) << "'" << token << "' is not a valid number. It has been ignored.";
58  }
59  }
60  }
61  }
62 
63  return armnn::TensorShape(armnn::numeric_cast<unsigned int>(result.size()), result.data());
64 }
65 
66 int ParseCommandLineArgs(int argc, char* argv[],
67  std::string& modelFormat,
68  std::string& modelPath,
69  std::vector<std::string>& inputNames,
70  std::vector<std::string>& inputTensorShapeStrs,
71  std::vector<std::string>& outputNames,
72  std::string& outputPath, bool& isModelBinary)
73 {
74  cxxopts::Options options("ArmNNConverter", "Convert a neural network model from provided file to ArmNN format.");
75  try
76  {
77  std::string modelFormatDescription("Format of the model file");
78 #if defined(ARMNN_ONNX_PARSER)
79  modelFormatDescription += ", onnx-binary, onnx-text";
80 #endif
81 #if defined(ARMNN_TF_PARSER)
82  modelFormatDescription += ", tensorflow-binary, tensorflow-text";
83 #endif
84 #if defined(ARMNN_TF_LITE_PARSER)
85  modelFormatDescription += ", tflite-binary";
86 #endif
87  modelFormatDescription += ".";
88  options.add_options()
89  ("help", "Display usage information")
90  ("f,model-format", modelFormatDescription, cxxopts::value<std::string>(modelFormat))
91  ("m,model-path", "Path to model file.", cxxopts::value<std::string>(modelPath))
92 
93  ("i,input-name", "Identifier of the input tensors in the network. "
94  "Each input must be specified separately.",
95  cxxopts::value<std::vector<std::string>>(inputNames))
96  ("s,input-tensor-shape",
97  "The shape of the input tensor in the network as a flat array of integers, "
98  "separated by comma. Each input shape must be specified separately after the input name. "
99  "This parameter is optional, depending on the network.",
100  cxxopts::value<std::vector<std::string>>(inputTensorShapeStrs))
101 
102  ("o,output-name", "Identifier of the output tensor in the network.",
103  cxxopts::value<std::vector<std::string>>(outputNames))
104  ("p,output-path",
105  "Path to serialize the network to.", cxxopts::value<std::string>(outputPath));
106  }
107  catch (const std::exception& e)
108  {
109  std::cerr << e.what() << std::endl << options.help() << std::endl;
110  return EXIT_FAILURE;
111  }
112  try
113  {
114  cxxopts::ParseResult result = options.parse(argc, argv);
115  if (result.count("help"))
116  {
117  std::cerr << options.help() << std::endl;
118  return EXIT_SUCCESS;
119  }
120  // Check for mandatory single options.
121  std::string mandatorySingleParameters[] = { "model-format", "model-path", "output-name", "output-path" };
122  bool somethingsMissing = false;
123  for (auto param : mandatorySingleParameters)
124  {
125  if (result.count(param) != 1)
126  {
127  std::cerr << "Parameter \'--" << param << "\' is required but missing." << std::endl;
128  somethingsMissing = true;
129  }
130  }
131  // Check at least one "input-name" option.
132  if (result.count("input-name") == 0)
133  {
134  std::cerr << "Parameter \'--" << "input-name" << "\' must be specified at least once." << std::endl;
135  somethingsMissing = true;
136  }
137  // If input-tensor-shape is specified then there must be a 1:1 match with input-name.
138  if (result.count("input-tensor-shape") > 0)
139  {
140  if (result.count("input-tensor-shape") != result.count("input-name"))
141  {
142  std::cerr << "When specifying \'input-tensor-shape\' a matching number of \'input-name\' parameters "
143  "must be specified." << std::endl;
144  somethingsMissing = true;
145  }
146  }
147 
148  if (somethingsMissing)
149  {
150  std::cerr << options.help() << std::endl;
151  return EXIT_FAILURE;
152  }
153  }
154  catch (const cxxopts::exceptions::exception& e)
155  {
156  std::cerr << e.what() << std::endl << std::endl;
157  return EXIT_FAILURE;
158  }
159 
160  if (modelFormat.find("bin") != std::string::npos)
161  {
162  isModelBinary = true;
163  }
164  else if (modelFormat.find("text") != std::string::npos)
165  {
166  isModelBinary = false;
167  }
168  else
169  {
170  ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'";
171  return EXIT_FAILURE;
172  }
173 
174  return EXIT_SUCCESS;
175 }
176 
177 template<typename T>
178 struct ParserType
179 {
180  typedef T parserType;
181 };
182 
183 class ArmnnConverter
184 {
185 public:
186  ArmnnConverter(const std::string& modelPath,
187  const std::vector<std::string>& inputNames,
188  const std::vector<armnn::TensorShape>& inputShapes,
189  const std::vector<std::string>& outputNames,
190  const std::string& outputPath,
191  bool isModelBinary)
192  : m_NetworkPtr(armnn::INetworkPtr(nullptr, [](armnn::INetwork *){})),
193  m_ModelPath(modelPath),
194  m_InputNames(inputNames),
195  m_InputShapes(inputShapes),
196  m_OutputNames(outputNames),
197  m_OutputPath(outputPath),
198  m_IsModelBinary(isModelBinary) {}
199 
200  bool Serialize()
201  {
202  if (m_NetworkPtr.get() == nullptr)
203  {
204  return false;
205  }
206 
208 
209  serializer->Serialize(*m_NetworkPtr);
210 
211  std::ofstream file(m_OutputPath, std::ios::out | std::ios::binary);
212 
213  bool retVal = serializer->SaveSerializedToStream(file);
214 
215  return retVal;
216  }
217 
218  template <typename IParser>
219  bool CreateNetwork ()
220  {
221  return CreateNetwork (ParserType<IParser>());
222  }
223 
224 private:
225  armnn::INetworkPtr m_NetworkPtr;
226  std::string m_ModelPath;
227  std::vector<std::string> m_InputNames;
228  std::vector<armnn::TensorShape> m_InputShapes;
229  std::vector<std::string> m_OutputNames;
230  std::string m_OutputPath;
231  bool m_IsModelBinary;
232 
233  template <typename IParser>
234  bool CreateNetwork (ParserType<IParser>)
235  {
236  // Create a network from a file on disk
237  auto parser(IParser::Create());
238 
239  std::map<std::string, armnn::TensorShape> inputShapes;
240  if (!m_InputShapes.empty())
241  {
242  const size_t numInputShapes = m_InputShapes.size();
243  const size_t numInputBindings = m_InputNames.size();
244  if (numInputShapes < numInputBindings)
245  {
246  throw armnn::Exception(fmt::format(
247  "Not every input has its tensor shape specified: expected={0}, got={1}",
248  numInputBindings, numInputShapes));
249  }
250 
251  for (size_t i = 0; i < numInputShapes; i++)
252  {
253  inputShapes[m_InputNames[i]] = m_InputShapes[i];
254  }
255  }
256 
257  {
258  ARMNN_SCOPED_HEAP_PROFILING("Parsing");
259  m_NetworkPtr = (m_IsModelBinary ?
260  parser->CreateNetworkFromBinaryFile(m_ModelPath.c_str(), inputShapes, m_OutputNames) :
261  parser->CreateNetworkFromTextFile(m_ModelPath.c_str(), inputShapes, m_OutputNames));
262  }
263 
264  return m_NetworkPtr.get() != nullptr;
265  }
266 
267 #if defined(ARMNN_TF_LITE_PARSER)
268  bool CreateNetwork (ParserType<armnnTfLiteParser::ITfLiteParser>)
269  {
270  // Create a network from a file on disk
272 
273  if (!m_InputShapes.empty())
274  {
275  const size_t numInputShapes = m_InputShapes.size();
276  const size_t numInputBindings = m_InputNames.size();
277  if (numInputShapes < numInputBindings)
278  {
279  throw armnn::Exception(fmt::format(
280  "Not every input has its tensor shape specified: expected={0}, got={1}",
281  numInputBindings, numInputShapes));
282  }
283  }
284 
285  {
286  ARMNN_SCOPED_HEAP_PROFILING("Parsing");
287  m_NetworkPtr = parser->CreateNetworkFromBinaryFile(m_ModelPath.c_str());
288  }
289 
290  return m_NetworkPtr.get() != nullptr;
291  }
292 #endif
293 
294 #if defined(ARMNN_ONNX_PARSER)
296  bool CreateNetwork (ParserType<armnnOnnxParser::IOnnxParser>)
297  {
298  // Create a network from a file on disk
300 
301  if (!m_InputShapes.empty())
302  {
303  const size_t numInputShapes = m_InputShapes.size();
304  const size_t numInputBindings = m_InputNames.size();
305  if (numInputShapes < numInputBindings)
306  {
307  throw armnn::Exception(fmt::format(
308  "Not every input has its tensor shape specified: expected={0}, got={1}",
309  numInputBindings, numInputShapes));
310  }
311  }
312 
313  {
314  ARMNN_SCOPED_HEAP_PROFILING("Parsing");
315  m_NetworkPtr = (m_IsModelBinary ?
316  parser->CreateNetworkFromBinaryFile(m_ModelPath.c_str()) :
317  parser->CreateNetworkFromTextFile(m_ModelPath.c_str()));
318  }
319 
320  return m_NetworkPtr.get() != nullptr;
321  }
323 #endif
324 
325 };
326 
327 } // anonymous namespace
328 
329 int main(int argc, char* argv[])
330 {
331 
332 #if (!defined(ARMNN_ONNX_PARSER) \
333  && !defined(ARMNN_TF_PARSER) \
334  && !defined(ARMNN_TF_LITE_PARSER))
335  ARMNN_LOG(fatal) << "Not built with any of the supported parsers Onnx, Tensorflow, or TfLite.";
336  return EXIT_FAILURE;
337 #endif
338 
339 #if !defined(ARMNN_SERIALIZER)
340  ARMNN_LOG(fatal) << "Not built with Serializer support.";
341  return EXIT_FAILURE;
342 #endif
343 
344 #ifdef NDEBUG
346 #else
348 #endif
349 
350  armnn::ConfigureLogging(true, true, level);
351 
352  std::string modelFormat;
353  std::string modelPath;
354 
355  std::vector<std::string> inputNames;
356  std::vector<std::string> inputTensorShapeStrs;
357  std::vector<armnn::TensorShape> inputTensorShapes;
358 
359  std::vector<std::string> outputNames;
360  std::string outputPath;
361 
362  bool isModelBinary = true;
363 
364  if (ParseCommandLineArgs(
365  argc, argv, modelFormat, modelPath, inputNames, inputTensorShapeStrs, outputNames, outputPath, isModelBinary)
366  != EXIT_SUCCESS)
367  {
368  return EXIT_FAILURE;
369  }
370 
371  for (const std::string& shapeStr : inputTensorShapeStrs)
372  {
373  if (!shapeStr.empty())
374  {
375  std::stringstream ss(shapeStr);
376 
377  try
378  {
379  armnn::TensorShape shape = ParseTensorShape(ss);
380  inputTensorShapes.push_back(shape);
381  }
382  catch (const armnn::InvalidArgumentException& e)
383  {
384  ARMNN_LOG(fatal) << "Cannot create tensor shape: " << e.what();
385  return EXIT_FAILURE;
386  }
387  }
388  }
389 
390  ArmnnConverter converter(modelPath, inputNames, inputTensorShapes, outputNames, outputPath, isModelBinary);
391 
392  try
393  {
394  if (modelFormat.find("onnx") != std::string::npos)
395  {
396 #if defined(ARMNN_ONNX_PARSER)
397  if (!converter.CreateNetwork<armnnOnnxParser::IOnnxParser>())
398  {
399  ARMNN_LOG(fatal) << "Failed to load model from file";
400  return EXIT_FAILURE;
401  }
402 #else
403  ARMNN_LOG(fatal) << "Not built with Onnx parser support.";
404  return EXIT_FAILURE;
405 #endif
406  }
407  else if (modelFormat.find("tflite") != std::string::npos)
408  {
409 #if defined(ARMNN_TF_LITE_PARSER)
410  if (!isModelBinary)
411  {
412  ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
413  for tflite files";
414  return EXIT_FAILURE;
415  }
416 
417  if (!converter.CreateNetwork<armnnTfLiteParser::ITfLiteParser>())
418  {
419  ARMNN_LOG(fatal) << "Failed to load model from file";
420  return EXIT_FAILURE;
421  }
422 #else
423  ARMNN_LOG(fatal) << "Not built with TfLite parser support.";
424  return EXIT_FAILURE;
425 #endif
426  }
427  else
428  {
429  ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'";
430  return EXIT_FAILURE;
431  }
432  }
433  catch(armnn::Exception& e)
434  {
435  ARMNN_LOG(fatal) << "Failed to load model from file: " << e.what();
436  return EXIT_FAILURE;
437  }
438 
439  if (!converter.Serialize())
440  {
441  ARMNN_LOG(fatal) << "Failed to serialize model";
442  return EXIT_FAILURE;
443  }
444 
445  return EXIT_SUCCESS;
446 }
armnn::INetworkPtr
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:339
armnnTfLiteParser::ITfLiteParser::Create
static ITfLiteParserPtr Create(const armnn::Optional< TfLiteParserOptions > &options=armnn::EmptyOptional())
Definition: TfLiteParser.cpp:67
armnn::ConfigureLogging
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
Configures the logging behaviour of the ARMNN library.
Definition: Utils.cpp:20
ARMNN_NO_DEPRECATE_WARN_BEGIN
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
armnnSerializer
Definition: ISerializer.hpp:11
armnn::LogSeverity::Info
@ Info
NumericCast.hpp
StringUtils.hpp
armnn::Exception::what
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
ARMNN_LOG
#define ARMNN_LOG(severity)
Definition: Logging.hpp:212
ISerializer.hpp
armnn::TensorShape
Definition: Tensor.hpp:20
Logging.hpp
IOnnxParser.hpp
armnn::InvalidArgumentException
Definition: Exceptions.hpp:80
armnnOnnxParser::IOnnxParser::Create
static IOnnxParserPtr Create()
Definition: OnnxParser.cpp:38
ARMNN_SCOPED_HEAP_PROFILING
#define ARMNN_SCOPED_HEAP_PROFILING(TAG)
Definition: HeapProfiling.hpp:45
armnnSerializer::ISerializer::Create
static ISerializerPtr Create()
Definition: Serializer.cpp:35
armnn::Exception
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
HeapProfiling.hpp
ITfLiteParser.hpp
main
int main(int argc, char *argv[])
Definition: ArmnnConverter.cpp:329
ARMNN_NO_DEPRECATE_WARN_END
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
armnnOnnxParser::IOnnxParser
Definition: IOnnxParser.hpp:23
armnn::LogSeverity::Debug
@ Debug
armnn::LogSeverity
LogSeverity
Definition: Utils.hpp:13
armnn::stringUtils::StringTokenizer
std::vector< std::string > StringTokenizer(const std::string &str, const char *delimiters, bool tokenCompression=true)
Function to take a string and a list of delimiters and split the string into tokens based on those de...
Definition: StringUtils.hpp:23
armnnTfLiteParser::ITfLiteParser
Definition: ITfLiteParser.hpp:26
armnn::INetwork
Main network class which provides the interface for building up a neural network.
Definition: INetwork.hpp:347