ArmNN
 26.01
Loading...
Searching...
No Matches
ParserPrototxtFixture< TParser > Struct Template Reference

#include <ParserPrototxtFixture.hpp>

Collaboration diagram for ParserPrototxtFixture< TParser >:
[legend]

Public Member Functions

ARMNN_NO_DEPRECATE_WARN_BEGIN ParserPrototxtFixture ()
 
template<std::size_t NumOutputDimensions>
void RunTest (const std::vector< float > &inputData, const std::vector< float > &expectedOutputData)
 Executes the network with the given input tensor and checks the result against the given output tensor.
 
template<std::size_t NumOutputDimensions>
void RunComparisonTest (const std::map< std::string, std::vector< float > > &inputData, const std::map< std::string, std::vector< uint8_t > > &expectedOutputData)
 Executes the network with the given input tensor and checks the result against the given output tensor.
 
template<std::size_t NumOutputDimensions, typename T = float>
void RunTest (const std::map< std::string, std::vector< float > > &inputData, const std::map< std::string, std::vector< T > > &expectedOutputData)
 Executes the network with the given input tensors and checks the results against the given output tensors.
 
ARMNN_NO_DEPRECATE_WARN_END void SetupSingleInputSingleOutput (const std::string &inputName, const std::string &outputName)
 Parses and loads the network defined by the m_Prototext string.
 
void SetupSingleInputSingleOutput (const armnn::TensorShape &inputTensorShape, const std::string &inputName, const std::string &outputName)
 
void SetupSingleInputSingleOutput (const armnn::TensorShape &inputTensorShape, const armnn::TensorShape &outputTensorShape, const std::string &inputName, const std::string &outputName)
 
void Setup (const std::map< std::string, armnn::TensorShape > &inputShapes, const std::vector< std::string > &requestedOutputs)
 
void Setup (const std::map< std::string, armnn::TensorShape > &inputShapes)
 
void Setup ()
 
armnn::IOptimizedNetworkPtr SetupOptimizedNetwork (const std::map< std::string, armnn::TensorShape > &inputShapes, const std::vector< std::string > &requestedOutputs)
 

Public Attributes

std::string m_Prototext
 
std::unique_ptr< TParser, void(*)(TParser *parser)> m_Parser
 
armnn::IRuntimePtr m_Runtime
 
armnn::NetworkId m_NetworkIdentifier
 
armnn::TensorShape m_SingleOutputShape
 This will store the output shape so it don't need to be passed to the single-input-single-output overload of RunTest().
 
std::string m_SingleInputName
 If the single-input-single-output overload of Setup() is called, these will store the input and output name so they don't need to be passed to the single-input-single-output overload of RunTest().
 
std::string m_SingleOutputName
 

Detailed Description

template<typename TParser>
struct armnnUtils::ParserPrototxtFixture< TParser >

Definition at line 24 of file ParserPrototxtFixture.hpp.

Constructor & Destructor Documentation

◆ ParserPrototxtFixture()

template<typename TParser >
ARMNN_NO_DEPRECATE_WARN_BEGIN ParserPrototxtFixture ( )
inline

Definition at line 27 of file ParserPrototxtFixture.hpp.

28 : m_Parser(TParser::Create())
31 {
32 }
static IRuntimePtr Create(const CreationOptions &options)
Definition Runtime.cpp:52
std::unique_ptr< TParser, void(*)(TParser *parser)> m_Parser

Member Function Documentation

◆ RunComparisonTest()

template<typename TParser >
template<std::size_t NumOutputDimensions>
void RunComparisonTest ( const std::map< std::string, std::vector< float > > & inputData,
const std::map< std::string, std::vector< uint8_t > > & expectedOutputData )

Executes the network with the given input tensor and checks the result against the given output tensor.

Calls RunTest with output type of uint8_t for checking comparison operators.

Definition at line 198 of file ParserPrototxtFixture.hpp.

201{
202 RunTest<NumOutputDimensions, uint8_t>(inputData, expectedOutputData);
203}

◆ RunTest() [1/2]

template<typename TParser >
template<std::size_t NumOutputDimensions, typename T >
void RunTest ( const std::map< std::string, std::vector< float > > & inputData,
const std::map< std::string, std::vector< T > > & expectedOutputData )

Executes the network with the given input tensors and checks the results against the given output tensors.

This overload supports multiple inputs and multiple outputs, identified by name.

Definition at line 207 of file ParserPrototxtFixture.hpp.

209{
210 // Sets up the armnn input tensors from the given vectors.
211 armnn::InputTensors inputTensors;
212 for (auto&& it : inputData)
213 {
214 armnn::BindingPointInfo bindingInfo = m_Parser->GetNetworkInputBindingInfo(it.first);
215 bindingInfo.second.SetConstant(true);
216 inputTensors.push_back({ bindingInfo.first, armnn::ConstTensor(bindingInfo.second, it.second.data()) });
217 if (bindingInfo.second.GetNumElements() != it.second.size())
218 {
219 throw armnn::Exception(fmt::format("Input tensor {0} is expected to have {1} elements. "
220 "{2} elements supplied. {3}",
221 it.first,
222 bindingInfo.second.GetNumElements(),
223 it.second.size(),
224 CHECK_LOCATION().AsString()));
225 }
226 }
227
228 // Allocates storage for the output tensors to be written to and sets up the armnn output tensors.
229 std::map<std::string, std::vector<T>> outputStorage;
230 armnn::OutputTensors outputTensors;
231 for (auto&& it : expectedOutputData)
232 {
233 armnn::BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(it.first);
234 outputStorage.emplace(it.first, std::vector<T>(bindingInfo.second.GetNumElements()));
235 outputTensors.push_back(
236 { bindingInfo.first, armnn::Tensor(bindingInfo.second, outputStorage.at(it.first).data()) });
237 }
238
239 m_Runtime->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors);
240
241 // Compares each output tensor to the expected values.
242 for (auto&& it : expectedOutputData)
243 {
244 armnn::BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(it.first);
245 if (bindingInfo.second.GetNumElements() != it.second.size())
246 {
247 throw armnn::Exception(fmt::format("Output tensor {0} is expected to have {1} elements. "
248 "{2} elements supplied. {3}",
249 it.first,
250 bindingInfo.second.GetNumElements(),
251 it.second.size(),
252 CHECK_LOCATION().AsString()));
253 }
254
255 // If the expected output shape is set, the output tensor checks will be carried out.
257 {
258
259 if (bindingInfo.second.GetShape().GetNumDimensions() == NumOutputDimensions &&
260 bindingInfo.second.GetShape().GetNumDimensions() == m_SingleOutputShape.GetNumDimensions())
261 {
262 for (unsigned int i = 0; i < m_SingleOutputShape.GetNumDimensions(); ++i)
263 {
264 if (m_SingleOutputShape[i] != bindingInfo.second.GetShape()[i])
265 {
266 // This exception message could not be created by fmt:format because of an oddity in
267 // the operator << of TensorShape.
268 std::stringstream message;
269 message << "Output tensor " << it.first << " is expected to have "
270 << bindingInfo.second.GetShape() << "shape. "
271 << m_SingleOutputShape << " shape supplied. "
272 << CHECK_LOCATION().AsString();
273 throw armnn::Exception(message.str());
274 }
275 }
276 }
277 else
278 {
279 throw armnn::Exception(fmt::format("Output tensor {0} is expected to have {1} dimensions. "
280 "{2} dimensions supplied. {3}",
281 it.first,
282 bindingInfo.second.GetShape().GetNumDimensions(),
283 NumOutputDimensions,
284 CHECK_LOCATION().AsString()));
285 }
286 }
287
288 auto outputExpected = it.second;
289 auto shape = bindingInfo.second.GetShape();
290 if (std::is_same<T, uint8_t>::value)
291 {
292 auto result = CompareTensors(outputExpected, outputStorage[it.first], shape, shape, true);
293 CHECK_MESSAGE(result.m_Result, result.m_Message.str());
294 }
295 else
296 {
297 auto result = CompareTensors(outputExpected, outputStorage[it.first], shape, shape);
298 CHECK_MESSAGE(result.m_Result, result.m_Message.str());
299 }
300 }
301}
#define CHECK_LOCATION()
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition Tensor.hpp:330
Base class for all ArmNN exceptions so that users can filter to just those.
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition Tensor.hpp:322
unsigned int GetNumDimensions() const
Function that returns the tensor rank.
Definition Tensor.cpp:174
std::pair< armnn::LayerBindingId, armnn::TensorInfo > BindingPointInfo
Definition Tensor.hpp:276
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition Tensor.hpp:394
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition Tensor.hpp:395
armnn::TensorShape m_SingleOutputShape
This will store the output shape so it don't need to be passed to the single-input-single-output over...

References CHECK_LOCATION.

◆ RunTest() [2/2]

template<typename TParser >
template<std::size_t NumOutputDimensions>
void RunTest ( const std::vector< float > & inputData,
const std::vector< float > & expectedOutputData )

Executes the network with the given input tensor and checks the result against the given output tensor.

This overload assumes that the network has a single input and a single output.

Definition at line 190 of file ParserPrototxtFixture.hpp.

192{
193 RunTest<NumOutputDimensions>({ { m_SingleInputName, inputData } }, { { m_SingleOutputName, expectedOutputData } });
194}
std::string m_SingleInputName
If the single-input-single-output overload of Setup() is called, these will store the input and outpu...

◆ Setup() [1/3]

template<typename TParser >
void Setup ( )

Definition at line 160 of file ParserPrototxtFixture.hpp.

161{
162 std::string errorMessage;
164 armnn::INetworkPtr network =
165 m_Parser->CreateNetworkFromString(m_Prototext.c_str());
166 auto optimized = Optimize(*network, { armnn::Compute::CpuRef }, m_Runtime->GetDeviceSpec());
167 armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optimized), errorMessage);
168 if (ret != armnn::Status::Success)
169 {
170 throw armnn::Exception(fmt::format("LoadNetwork failed with error: '{0}' {1}",
171 errorMessage,
172 CHECK_LOCATION().AsString()));
173 }
175}
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
#define ARMNN_NO_DEPRECATE_WARN_END
Status
enumeration
Definition Types.hpp:43
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptionsOpaque &options=OptimizerOptionsOpaque(), Optional< std::vector< std::string > & > messages=EmptyOptional())
Create an optimized version of the network.
Definition Network.cpp:2287
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition INetwork.hpp:339
@ CpuRef
CPU Execution: Reference C++ kernels.

References ARMNN_NO_DEPRECATE_WARN_BEGIN, ARMNN_NO_DEPRECATE_WARN_END, CHECK_LOCATION, armnn::CpuRef, armnn::Optimize(), and armnn::Success.

◆ Setup() [2/3]

template<typename TParser >
void Setup ( const std::map< std::string, armnn::TensorShape > & inputShapes)

Definition at line 142 of file ParserPrototxtFixture.hpp.

143{
144 std::string errorMessage;
146 armnn::INetworkPtr network =
147 m_Parser->CreateNetworkFromString(m_Prototext.c_str(), inputShapes);
148 auto optimized = Optimize(*network, { armnn::Compute::CpuRef }, m_Runtime->GetDeviceSpec());
149 armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optimized), errorMessage);
150 if (ret != armnn::Status::Success)
151 {
152 throw armnn::Exception(fmt::format("LoadNetwork failed with error: '{0}' {1}",
153 errorMessage,
154 CHECK_LOCATION().AsString()));
155 }
157}

References ARMNN_NO_DEPRECATE_WARN_BEGIN, ARMNN_NO_DEPRECATE_WARN_END, CHECK_LOCATION, armnn::CpuRef, armnn::Optimize(), and armnn::Success.

◆ Setup() [3/3]

template<typename TParser >
void Setup ( const std::map< std::string, armnn::TensorShape > & inputShapes,
const std::vector< std::string > & requestedOutputs )

Definition at line 124 of file ParserPrototxtFixture.hpp.

126{
127 std::string errorMessage;
128
129 armnn::INetworkPtr network =
130 m_Parser->CreateNetworkFromString(m_Prototext.c_str(), inputShapes, requestedOutputs);
131 auto optimized = Optimize(*network, { armnn::Compute::CpuRef }, m_Runtime->GetDeviceSpec());
132 armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optimized), errorMessage);
133 if (ret != armnn::Status::Success)
134 {
135 throw armnn::Exception(fmt::format("LoadNetwork failed with error: '{0}' {1}",
136 errorMessage,
137 CHECK_LOCATION().AsString()));
138 }
139}

References CHECK_LOCATION, armnn::CpuRef, armnn::Optimize(), and armnn::Success.

◆ SetupOptimizedNetwork()

template<typename TParser >
armnn::IOptimizedNetworkPtr SetupOptimizedNetwork ( const std::map< std::string, armnn::TensorShape > & inputShapes,
const std::vector< std::string > & requestedOutputs )

Definition at line 178 of file ParserPrototxtFixture.hpp.

181{
182 armnn::INetworkPtr network =
183 m_Parser->CreateNetworkFromString(m_Prototext.c_str(), inputShapes, requestedOutputs);
184 auto optimized = Optimize(*network, { armnn::Compute::CpuRef }, m_Runtime->GetDeviceSpec());
185 return optimized;
186}

References armnn::CpuRef, and armnn::Optimize().

◆ SetupSingleInputSingleOutput() [1/3]

template<typename TParser >
void SetupSingleInputSingleOutput ( const armnn::TensorShape & inputTensorShape,
const armnn::TensorShape & outputTensorShape,
const std::string & inputName,
const std::string & outputName )

Definition at line 110 of file ParserPrototxtFixture.hpp.

114{
115 // Stores the input name, the output name and the output tensor shape
116 // so they don't need to be passed to the single-input-single-output RunTest().
117 m_SingleInputName = inputName;
118 m_SingleOutputName = outputName;
119 m_SingleOutputShape = outputTensorShape;
120 Setup({ { inputName, inputTensorShape } }, { outputName });
121}

◆ SetupSingleInputSingleOutput() [2/3]

template<typename TParser >
void SetupSingleInputSingleOutput ( const armnn::TensorShape & inputTensorShape,
const std::string & inputName,
const std::string & outputName )

Definition at line 99 of file ParserPrototxtFixture.hpp.

102{
103 // Stores the input and output name so they don't need to be passed to the single-input-single-output RunTest().
104 m_SingleInputName = inputName;
105 m_SingleOutputName = outputName;
106 Setup({ { inputName, inputTensorShape } }, { outputName });
107}

◆ SetupSingleInputSingleOutput() [3/3]

template<typename TParser >
void SetupSingleInputSingleOutput ( const std::string & inputName,
const std::string & outputName )

Parses and loads the network defined by the m_Prototext string.

Definition at line 89 of file ParserPrototxtFixture.hpp.

91{
92 // Stores the input and output name so they don't need to be passed to the single-input-single-output RunTest().
93 m_SingleInputName = inputName;
94 m_SingleOutputName = outputName;
95 Setup({ }, { outputName });
96}

Member Data Documentation

◆ m_NetworkIdentifier

template<typename TParser >
armnn::NetworkId m_NetworkIdentifier

Definition at line 74 of file ParserPrototxtFixture.hpp.

◆ m_Parser

template<typename TParser >
std::unique_ptr<TParser, void(*)(TParser* parser)> m_Parser

Definition at line 72 of file ParserPrototxtFixture.hpp.

◆ m_Prototext

template<typename TParser >
std::string m_Prototext

Definition at line 71 of file ParserPrototxtFixture.hpp.

◆ m_Runtime

template<typename TParser >
armnn::IRuntimePtr m_Runtime

Definition at line 73 of file ParserPrototxtFixture.hpp.

◆ m_SingleInputName

template<typename TParser >
std::string m_SingleInputName

If the single-input-single-output overload of Setup() is called, these will store the input and output name so they don't need to be passed to the single-input-single-output overload of RunTest().

Definition at line 79 of file ParserPrototxtFixture.hpp.

◆ m_SingleOutputName

template<typename TParser >
std::string m_SingleOutputName

Definition at line 80 of file ParserPrototxtFixture.hpp.

◆ m_SingleOutputShape

template<typename TParser >
armnn::TensorShape m_SingleOutputShape

This will store the output shape so it don't need to be passed to the single-input-single-output overload of RunTest().

Definition at line 85 of file ParserPrototxtFixture.hpp.


The documentation for this struct was generated from the following file: