ArmNN
 24.08
ParserPrototxtFixture.hpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017-2024 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #pragma once
7 
8 #include <armnn/IRuntime.hpp>
9 #include <armnnTestUtils/TensorHelpers.hpp>
10 
11 #include <Network.hpp>
12 #include <VerificationHelpers.hpp>
13 
14 #include <doctest/doctest.h>
15 #include <fmt/format.h>
16 
17 #include <iomanip>
18 #include <string>
19 
20 namespace armnnUtils
21 {
22 
23 template<typename TParser>
25 {
28  : m_Parser(TParser::Create())
29  , m_Runtime(armnn::IRuntime::Create(armnn::IRuntime::CreationOptions()))
31  {
32  }
34 
35  /// Parses and loads the network defined by the m_Prototext string.
36  /// @{
37  void SetupSingleInputSingleOutput(const std::string& inputName, const std::string& outputName);
38  void SetupSingleInputSingleOutput(const armnn::TensorShape& inputTensorShape,
39  const std::string& inputName,
40  const std::string& outputName);
41  void SetupSingleInputSingleOutput(const armnn::TensorShape& inputTensorShape,
42  const armnn::TensorShape& outputTensorShape,
43  const std::string& inputName,
44  const std::string& outputName);
45  void Setup(const std::map<std::string, armnn::TensorShape>& inputShapes,
46  const std::vector<std::string>& requestedOutputs);
47  void Setup(const std::map<std::string, armnn::TensorShape>& inputShapes);
48  void Setup();
50  const std::map<std::string,armnn::TensorShape>& inputShapes,
51  const std::vector<std::string>& requestedOutputs);
52  /// @}
53 
54  /// Executes the network with the given input tensor and checks the result against the given output tensor.
55  /// This overload assumes that the network has a single input and a single output.
56  template <std::size_t NumOutputDimensions>
57  void RunTest(const std::vector<float>& inputData, const std::vector<float>& expectedOutputData);
58 
59  /// Executes the network with the given input tensor and checks the result against the given output tensor.
60  /// Calls RunTest with output type of uint8_t for checking comparison operators.
61  template <std::size_t NumOutputDimensions>
62  void RunComparisonTest(const std::map<std::string, std::vector<float>>& inputData,
63  const std::map<std::string, std::vector<uint8_t>>& expectedOutputData);
64 
65  /// Executes the network with the given input tensors and checks the results against the given output tensors.
66  /// This overload supports multiple inputs and multiple outputs, identified by name.
67  template <std::size_t NumOutputDimensions, typename T = float>
68  void RunTest(const std::map<std::string, std::vector<float>>& inputData,
69  const std::map<std::string, std::vector<T>>& expectedOutputData);
70 
71  std::string m_Prototext;
72  std::unique_ptr<TParser, void(*)(TParser* parser)> m_Parser;
75 
76  /// If the single-input-single-output overload of Setup() is called, these will store the input and output name
77  /// so they don't need to be passed to the single-input-single-output overload of RunTest().
78  /// @{
79  std::string m_SingleInputName;
80  std::string m_SingleOutputName;
81  /// @}
82 
83  /// This will store the output shape so it don't need to be passed to the single-input-single-output overload
84  /// of RunTest().
86 };
87 
88 template<typename TParser>
90  const std::string& outputName)
91 {
92  // Stores the input and output name so they don't need to be passed to the single-input-single-output RunTest().
93  m_SingleInputName = inputName;
94  m_SingleOutputName = outputName;
95  Setup({ }, { outputName });
96 }
97 
98 template<typename TParser>
100  const std::string& inputName,
101  const std::string& outputName)
102 {
103  // Stores the input and output name so they don't need to be passed to the single-input-single-output RunTest().
104  m_SingleInputName = inputName;
105  m_SingleOutputName = outputName;
106  Setup({ { inputName, inputTensorShape } }, { outputName });
107 }
108 
109 template<typename TParser>
111  const armnn::TensorShape& outputTensorShape,
112  const std::string& inputName,
113  const std::string& outputName)
114 {
115  // Stores the input name, the output name and the output tensor shape
116  // so they don't need to be passed to the single-input-single-output RunTest().
117  m_SingleInputName = inputName;
118  m_SingleOutputName = outputName;
119  m_SingleOutputShape = outputTensorShape;
120  Setup({ { inputName, inputTensorShape } }, { outputName });
121 }
122 
123 template<typename TParser>
124 void ParserPrototxtFixture<TParser>::Setup(const std::map<std::string, armnn::TensorShape>& inputShapes,
125  const std::vector<std::string>& requestedOutputs)
126 {
127  std::string errorMessage;
128 
129  armnn::INetworkPtr network =
130  m_Parser->CreateNetworkFromString(m_Prototext.c_str(), inputShapes, requestedOutputs);
131  auto optimized = Optimize(*network, { armnn::Compute::CpuRef }, m_Runtime->GetDeviceSpec());
132  armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optimized), errorMessage);
133  if (ret != armnn::Status::Success)
134  {
135  throw armnn::Exception(fmt::format("LoadNetwork failed with error: '{0}' {1}",
136  errorMessage,
137  CHECK_LOCATION().AsString()));
138  }
139 }
140 
141 template<typename TParser>
142 void ParserPrototxtFixture<TParser>::Setup(const std::map<std::string, armnn::TensorShape>& inputShapes)
143 {
144  std::string errorMessage;
146  armnn::INetworkPtr network =
147  m_Parser->CreateNetworkFromString(m_Prototext.c_str(), inputShapes);
148  auto optimized = Optimize(*network, { armnn::Compute::CpuRef }, m_Runtime->GetDeviceSpec());
149  armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optimized), errorMessage);
150  if (ret != armnn::Status::Success)
151  {
152  throw armnn::Exception(fmt::format("LoadNetwork failed with error: '{0}' {1}",
153  errorMessage,
154  CHECK_LOCATION().AsString()));
155  }
157 }
158 
159 template<typename TParser>
161 {
162  std::string errorMessage;
164  armnn::INetworkPtr network =
165  m_Parser->CreateNetworkFromString(m_Prototext.c_str());
166  auto optimized = Optimize(*network, { armnn::Compute::CpuRef }, m_Runtime->GetDeviceSpec());
167  armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optimized), errorMessage);
168  if (ret != armnn::Status::Success)
169  {
170  throw armnn::Exception(fmt::format("LoadNetwork failed with error: '{0}' {1}",
171  errorMessage,
172  CHECK_LOCATION().AsString()));
173  }
175 }
176 
177 template<typename TParser>
179  const std::map<std::string,armnn::TensorShape>& inputShapes,
180  const std::vector<std::string>& requestedOutputs)
181 {
182  armnn::INetworkPtr network =
183  m_Parser->CreateNetworkFromString(m_Prototext.c_str(), inputShapes, requestedOutputs);
184  auto optimized = Optimize(*network, { armnn::Compute::CpuRef }, m_Runtime->GetDeviceSpec());
185  return optimized;
186 }
187 
188 template<typename TParser>
189 template <std::size_t NumOutputDimensions>
190 void ParserPrototxtFixture<TParser>::RunTest(const std::vector<float>& inputData,
191  const std::vector<float>& expectedOutputData)
192 {
193  RunTest<NumOutputDimensions>({ { m_SingleInputName, inputData } }, { { m_SingleOutputName, expectedOutputData } });
194 }
195 
196 template<typename TParser>
197 template <std::size_t NumOutputDimensions>
198 void ParserPrototxtFixture<TParser>::RunComparisonTest(const std::map<std::string, std::vector<float>>& inputData,
199  const std::map<std::string, std::vector<uint8_t>>&
200  expectedOutputData)
201 {
202  RunTest<NumOutputDimensions, uint8_t>(inputData, expectedOutputData);
203 }
204 
205 template<typename TParser>
206 template <std::size_t NumOutputDimensions, typename T>
207 void ParserPrototxtFixture<TParser>::RunTest(const std::map<std::string, std::vector<float>>& inputData,
208  const std::map<std::string, std::vector<T>>& expectedOutputData)
209 {
210  // Sets up the armnn input tensors from the given vectors.
211  armnn::InputTensors inputTensors;
212  for (auto&& it : inputData)
213  {
214  armnn::BindingPointInfo bindingInfo = m_Parser->GetNetworkInputBindingInfo(it.first);
215  bindingInfo.second.SetConstant(true);
216  inputTensors.push_back({ bindingInfo.first, armnn::ConstTensor(bindingInfo.second, it.second.data()) });
217  if (bindingInfo.second.GetNumElements() != it.second.size())
218  {
219  throw armnn::Exception(fmt::format("Input tensor {0} is expected to have {1} elements. "
220  "{2} elements supplied. {3}",
221  it.first,
222  bindingInfo.second.GetNumElements(),
223  it.second.size(),
224  CHECK_LOCATION().AsString()));
225  }
226  }
227 
228  // Allocates storage for the output tensors to be written to and sets up the armnn output tensors.
229  std::map<std::string, std::vector<T>> outputStorage;
230  armnn::OutputTensors outputTensors;
231  for (auto&& it : expectedOutputData)
232  {
233  armnn::BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(it.first);
234  outputStorage.emplace(it.first, std::vector<T>(bindingInfo.second.GetNumElements()));
235  outputTensors.push_back(
236  { bindingInfo.first, armnn::Tensor(bindingInfo.second, outputStorage.at(it.first).data()) });
237  }
238 
239  m_Runtime->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors);
240 
241  // Compares each output tensor to the expected values.
242  for (auto&& it : expectedOutputData)
243  {
244  armnn::BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(it.first);
245  if (bindingInfo.second.GetNumElements() != it.second.size())
246  {
247  throw armnn::Exception(fmt::format("Output tensor {0} is expected to have {1} elements. "
248  "{2} elements supplied. {3}",
249  it.first,
250  bindingInfo.second.GetNumElements(),
251  it.second.size(),
252  CHECK_LOCATION().AsString()));
253  }
254 
255  // If the expected output shape is set, the output tensor checks will be carried out.
256  if (m_SingleOutputShape.GetNumDimensions() != 0)
257  {
258 
259  if (bindingInfo.second.GetShape().GetNumDimensions() == NumOutputDimensions &&
260  bindingInfo.second.GetShape().GetNumDimensions() == m_SingleOutputShape.GetNumDimensions())
261  {
262  for (unsigned int i = 0; i < m_SingleOutputShape.GetNumDimensions(); ++i)
263  {
264  if (m_SingleOutputShape[i] != bindingInfo.second.GetShape()[i])
265  {
266  // This exception message could not be created by fmt:format because of an oddity in
267  // the operator << of TensorShape.
268  std::stringstream message;
269  message << "Output tensor " << it.first << " is expected to have "
270  << bindingInfo.second.GetShape() << "shape. "
271  << m_SingleOutputShape << " shape supplied. "
272  << CHECK_LOCATION().AsString();
273  throw armnn::Exception(message.str());
274  }
275  }
276  }
277  else
278  {
279  throw armnn::Exception(fmt::format("Output tensor {0} is expected to have {1} dimensions. "
280  "{2} dimensions supplied. {3}",
281  it.first,
282  bindingInfo.second.GetShape().GetNumDimensions(),
283  NumOutputDimensions,
284  CHECK_LOCATION().AsString()));
285  }
286  }
287 
288  auto outputExpected = it.second;
289  auto shape = bindingInfo.second.GetShape();
290  if (std::is_same<T, uint8_t>::value)
291  {
292  auto result = CompareTensors(outputExpected, outputStorage[it.first], shape, shape, true);
293  CHECK_MESSAGE(result.m_Result, result.m_Message.str());
294  }
295  else
296  {
297  auto result = CompareTensors(outputExpected, outputStorage[it.first], shape, shape);
298  CHECK_MESSAGE(result.m_Result, result.m_Message.str());
299  }
300  }
301 }
302 
303 } // namespace armnnUtils
armnn::INetworkPtr
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:339
armnn::IOptimizedNetworkPtr
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:340
armnn::BindingPointInfo
std::pair< armnn::LayerBindingId, armnn::TensorInfo > BindingPointInfo
Definition: Tensor.hpp:276
armnn::Tensor
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:321
IRuntime.hpp
armnn::InputTensors
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:394
armnn::Compute::CpuRef
@ CpuRef
CPU Execution: Reference C++ kernels.
armnnUtils::ParserPrototxtFixture
Definition: ParserPrototxtFixture.hpp:24
CHECK_LOCATION
#define CHECK_LOCATION()
Definition: Exceptions.hpp:203
armnnUtils::ParserPrototxtFixture::RunComparisonTest
void RunComparisonTest(const std::map< std::string, std::vector< float >> &inputData, const std::map< std::string, std::vector< uint8_t >> &expectedOutputData)
Executes the network with the given input tensor and checks the result against the given output tenso...
Definition: ParserPrototxtFixture.hpp:198
ARMNN_NO_DEPRECATE_WARN_BEGIN
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
armnn::OutputTensors
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:395
armnn::IRuntime
Definition: IRuntime.hpp:75
armnn::TensorShape
Definition: Tensor.hpp:20
VerificationHelpers.hpp
armnn::NetworkId
int NetworkId
Definition: IRuntime.hpp:35
armnnUtils::ParserPrototxtFixture::ParserPrototxtFixture
ARMNN_NO_DEPRECATE_WARN_BEGIN ParserPrototxtFixture()
Definition: ParserPrototxtFixture.hpp:27
armnnUtils::ParserPrototxtFixture::m_Prototext
std::string m_Prototext
Definition: ParserPrototxtFixture.hpp:71
armnn::IRuntimePtr
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:41
armnnUtils::ParserPrototxtFixture::m_SingleOutputShape
armnn::TensorShape m_SingleOutputShape
This will store the output shape so it don't need to be passed to the single-input-single-output over...
Definition: ParserPrototxtFixture.hpp:85
armnnUtils::ParserPrototxtFixture::SetupSingleInputSingleOutput
ARMNN_NO_DEPRECATE_WARN_END void SetupSingleInputSingleOutput(const std::string &inputName, const std::string &outputName)
Parses and loads the network defined by the m_Prototext string.
Definition: ParserPrototxtFixture.hpp:89
armnnUtils::ParserPrototxtFixture::m_SingleOutputName
std::string m_SingleOutputName
Definition: ParserPrototxtFixture.hpp:80
armnnUtils
Definition: CompatibleTypes.hpp:10
armnnUtils::ParserPrototxtFixture::m_SingleInputName
std::string m_SingleInputName
If the single-input-single-output overload of Setup() is called, these will store the input and outpu...
Definition: ParserPrototxtFixture.hpp:79
armnn::Status::Success
@ Success
armnn::Exception
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
armnnUtils::ParserPrototxtFixture::m_Runtime
armnn::IRuntimePtr m_Runtime
Definition: ParserPrototxtFixture.hpp:73
armnnUtils::ParserPrototxtFixture::Setup
void Setup()
Definition: ParserPrototxtFixture.hpp:160
armnn::Status
Status
Definition: Types.hpp:42
Network.hpp
ARMNN_NO_DEPRECATE_WARN_END
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
armnn
Copyright (c) 2021 ARM Limited and Contributors.
Definition: 01_00_quick_start.dox:6
armnn::ConstTensor
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:329
armnnUtils::ParserPrototxtFixture::RunTest
void RunTest(const std::vector< float > &inputData, const std::vector< float > &expectedOutputData)
Executes the network with the given input tensor and checks the result against the given output tenso...
Definition: ParserPrototxtFixture.hpp:190
armnnUtils::ParserPrototxtFixture::m_NetworkIdentifier
armnn::NetworkId m_NetworkIdentifier
Definition: ParserPrototxtFixture.hpp:74
armnn::Optimize
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptionsOpaque &options=OptimizerOptionsOpaque(), Optional< std::vector< std::string > & > messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:2145
armnnUtils::ParserPrototxtFixture::SetupOptimizedNetwork
armnn::IOptimizedNetworkPtr SetupOptimizedNetwork(const std::map< std::string, armnn::TensorShape > &inputShapes, const std::vector< std::string > &requestedOutputs)
Definition: ParserPrototxtFixture.hpp:178
armnnUtils::ParserPrototxtFixture::m_Parser
std::unique_ptr< TParser, void(*)(TParser *parser)> m_Parser
Definition: ParserPrototxtFixture.hpp:72