ArmNN
 24.08
OptimizerOptionsOpaque Class Reference

#include <INetwork.hpp>

Public Member Functions

 OptimizerOptionsOpaque ()
 
 OptimizerOptionsOpaque (const OptimizerOptionsOpaque &other)
 
 ~OptimizerOptionsOpaque ()
 
 OptimizerOptionsOpaque (const OptimizerOptions &OptimizerStruct)
 
OptimizerOptionsOpaqueoperator= (OptimizerOptionsOpaque other)
 
 OptimizerOptionsOpaque (bool reduceFp32ToFp16, bool debug, bool reduceFp32ToBf16, bool importEnabled, ModelOptions modelOptions={}, bool exportEnabled=false, bool debugToFile=false)
 
 OptimizerOptionsOpaque (bool reduceFp32ToFp16, bool debug, bool reduceFp32ToBf16=false, ShapeInferenceMethod shapeInferenceMethod=armnn::ShapeInferenceMethod::ValidateOnly, bool importEnabled=false, ModelOptions modelOptions={}, bool exportEnabled=false, bool debugToFile=false, bool allowExpandedDims=false)
 
const std::string ToString () const
 
bool GetProfilingEnabled () const
 
bool GetImportEnabled () const
 
bool GetExportEnabled () const
 
bool GetReduceFp32ToFp16 () const
 
bool GetReduceFp32ToBf16 () const
 
bool GetDebugEnabled () const
 
bool GetDebugToFileEnabled () const
 
bool GetAllowExpandedDims () const
 
armnn::ModelOptions GetModelOptions () const
 
armnn::ShapeInferenceMethod GetShapeInferenceMethod () const
 
void SetImportEnabled (bool ImportState)
 
void SetExportEnabled (bool ExportState)
 
void SetProfilingEnabled (bool ProfilingState)
 
void SetDebugEnabled (bool DebugState)
 
void SetDebugToFileEnabled (bool DebugFileState)
 
void SetReduceFp32ToFp16 (bool ReduceFp32ToFp16State)
 
void SetShapeInferenceMethod (armnn::ShapeInferenceMethod ShapeInferenceMethodType)
 
void AddModelOption (armnn::BackendOptions)
 
void SetAllowExpandedDims (bool ExpandedDimsAllowed)
 

Detailed Description

Examples
CustomMemoryAllocatorSample.cpp.

Definition at line 272 of file INetwork.hpp.

Constructor & Destructor Documentation

◆ OptimizerOptionsOpaque() [1/5]

Definition at line 49 of file Network.cpp.

50  : p_OptimizerOptionsImpl(std::make_unique<OptimizerOptionsOpaqueImpl>())
51 {
52 }

◆ OptimizerOptionsOpaque() [2/5]

Definition at line 54 of file Network.cpp.

55  : p_OptimizerOptionsImpl(std::make_unique<OptimizerOptionsOpaqueImpl>(*other.p_OptimizerOptionsImpl))
56 {
57 }

◆ ~OptimizerOptionsOpaque()

~OptimizerOptionsOpaque ( )
default

◆ OptimizerOptionsOpaque() [3/5]

OptimizerOptionsOpaque ( const OptimizerOptions OptimizerStruct)

Definition at line 81 of file Network.cpp.

82  : p_OptimizerOptionsImpl(std::make_unique<OptimizerOptionsOpaqueImpl>())
83 {
84  p_OptimizerOptionsImpl->m_ImportEnabled = OptimizerStruct.m_ImportEnabled;
85  p_OptimizerOptionsImpl->m_shapeInferenceMethod = OptimizerStruct.m_shapeInferenceMethod;
86  p_OptimizerOptionsImpl->m_ModelOptions = OptimizerStruct.m_ModelOptions;
87  p_OptimizerOptionsImpl->m_ProfilingEnabled = OptimizerStruct.m_ProfilingEnabled;
88  p_OptimizerOptionsImpl->m_DebugToFile = OptimizerStruct.m_DebugToFile;
89  p_OptimizerOptionsImpl->m_Debug = OptimizerStruct.m_Debug;
90  p_OptimizerOptionsImpl->m_ReduceFp32ToFp16 = OptimizerStruct.m_ReduceFp32ToFp16;
91  p_OptimizerOptionsImpl->m_ExportEnabled = OptimizerStruct.m_ExportEnabled;
92  p_OptimizerOptionsImpl->m_AllowExpandedDims = OptimizerStruct.m_AllowExpandedDims;
93  p_OptimizerOptionsImpl->m_ReduceFp32ToBf16 = OptimizerStruct.m_ReduceFp32ToBf16;
94 }

References OptimizerOptions::m_AllowExpandedDims, OptimizerOptions::m_Debug, OptimizerOptions::m_DebugToFile, OptimizerOptions::m_ExportEnabled, OptimizerOptions::m_ImportEnabled, OptimizerOptions::m_ModelOptions, OptimizerOptions::m_ProfilingEnabled, OptimizerOptions::m_ReduceFp32ToBf16, OptimizerOptions::m_ReduceFp32ToFp16, and OptimizerOptions::m_shapeInferenceMethod.

◆ OptimizerOptionsOpaque() [4/5]

OptimizerOptionsOpaque ( bool  reduceFp32ToFp16,
bool  debug,
bool  reduceFp32ToBf16,
bool  importEnabled,
ModelOptions  modelOptions = {},
bool  exportEnabled = false,
bool  debugToFile = false 
)

Definition at line 61 of file Network.cpp.

64  : p_OptimizerOptionsImpl(std::make_unique<OptimizerOptionsOpaqueImpl>(reduceFp32ToFp16, debug, reduceFp32ToBf16,
65  importEnabled, modelOptions,
66  exportEnabled, debugToFile))
67 {
68 }

References armnn::debug.

◆ OptimizerOptionsOpaque() [5/5]

OptimizerOptionsOpaque ( bool  reduceFp32ToFp16,
bool  debug,
bool  reduceFp32ToBf16 = false,
ShapeInferenceMethod  shapeInferenceMethod = armnn::ShapeInferenceMethod::ValidateOnly,
bool  importEnabled = false,
ModelOptions  modelOptions = {},
bool  exportEnabled = false,
bool  debugToFile = false,
bool  allowExpandedDims = false 
)

Definition at line 70 of file Network.cpp.

74  : p_OptimizerOptionsImpl(std::make_unique<OptimizerOptionsOpaqueImpl>(reduceFp32ToFp16, debug, reduceFp32ToBf16,
75  shapeInferenceMethod, importEnabled,
76  modelOptions, exportEnabled,
77  debugToFile, allowExpandedDims))
78 {
79 }

References armnn::debug.

Member Function Documentation

◆ AddModelOption()

void AddModelOption ( armnn::BackendOptions  NewModelOption)

Definition at line 151 of file Network.cpp.

152 {
153  p_OptimizerOptionsImpl->m_ModelOptions.push_back(NewModelOption);
154 }

Referenced by ArmnnDriverImpl::PrepareArmnnModel(), and ArmnnDriverImpl::PrepareArmnnModelFromCache().

◆ GetAllowExpandedDims()

bool GetAllowExpandedDims ( ) const

Definition at line 191 of file Network.cpp.

192 {
193  return p_OptimizerOptionsImpl->m_AllowExpandedDims;
194 }

Referenced by OptimizerOptionsOpaque::operator=().

◆ GetDebugEnabled()

bool GetDebugEnabled ( ) const

Definition at line 181 of file Network.cpp.

182 {
183  return p_OptimizerOptionsImpl->m_Debug;
184 }

Referenced by OptimizerOptionsOpaque::operator=(), and armnn::Optimize().

◆ GetDebugToFileEnabled()

bool GetDebugToFileEnabled ( ) const

Definition at line 186 of file Network.cpp.

187 {
188  return p_OptimizerOptionsImpl->m_DebugToFile;
189 }

Referenced by OptimizerOptionsOpaque::operator=(), and armnn::Optimize().

◆ GetExportEnabled()

bool GetExportEnabled ( ) const

Definition at line 166 of file Network.cpp.

167 {
168  return p_OptimizerOptionsImpl->m_ExportEnabled;
169 };

Referenced by OptimizerOptionsOpaque::operator=(), and armnn::Optimize().

◆ GetImportEnabled()

bool GetImportEnabled ( ) const

Definition at line 161 of file Network.cpp.

162 {
163  return p_OptimizerOptionsImpl->m_ImportEnabled;
164 };

Referenced by OptimizerOptionsOpaque::operator=(), and armnn::Optimize().

◆ GetModelOptions()

armnn::ModelOptions GetModelOptions ( ) const

Definition at line 196 of file Network.cpp.

197 {
198  return p_OptimizerOptionsImpl->m_ModelOptions;
199 }

Referenced by OptimizerOptionsOpaque::operator=(), and armnn::Optimize().

◆ GetProfilingEnabled()

bool GetProfilingEnabled ( ) const

Definition at line 156 of file Network.cpp.

157 {
158  return p_OptimizerOptionsImpl->m_ProfilingEnabled;
159 };

Referenced by OptimizerOptionsOpaque::operator=(), and armnn::Optimize().

◆ GetReduceFp32ToBf16()

bool GetReduceFp32ToBf16 ( ) const

Definition at line 176 of file Network.cpp.

177 {
178  return p_OptimizerOptionsImpl->m_ReduceFp32ToBf16;
179 }

Referenced by OptimizerOptionsOpaque::operator=(), and armnn::Optimize().

◆ GetReduceFp32ToFp16()

bool GetReduceFp32ToFp16 ( ) const

Definition at line 171 of file Network.cpp.

172 {
173  return p_OptimizerOptionsImpl->m_ReduceFp32ToFp16;
174 };

Referenced by OptimizerOptionsOpaque::operator=(), and armnn::Optimize().

◆ GetShapeInferenceMethod()

armnn::ShapeInferenceMethod GetShapeInferenceMethod ( ) const

Definition at line 201 of file Network.cpp.

202 {
203  return p_OptimizerOptionsImpl->m_shapeInferenceMethod;
204 }

Referenced by OptimizerOptionsOpaque::operator=(), and armnn::Optimize().

◆ operator=()

Definition at line 96 of file Network.cpp.

97 {
98  p_OptimizerOptionsImpl->m_ImportEnabled = other.GetImportEnabled();
99  p_OptimizerOptionsImpl->m_shapeInferenceMethod = other.GetShapeInferenceMethod();
100  p_OptimizerOptionsImpl->m_ModelOptions = other.GetModelOptions();
101  p_OptimizerOptionsImpl->m_ProfilingEnabled = other.GetProfilingEnabled();
102  p_OptimizerOptionsImpl->m_DebugToFile = other.GetDebugToFileEnabled();
103  p_OptimizerOptionsImpl->m_Debug = other.GetDebugEnabled();
104  p_OptimizerOptionsImpl->m_ReduceFp32ToFp16 = other.GetReduceFp32ToFp16();
105  p_OptimizerOptionsImpl->m_ExportEnabled = other.GetExportEnabled();
106  p_OptimizerOptionsImpl->m_AllowExpandedDims = other.GetAllowExpandedDims();
107  p_OptimizerOptionsImpl->m_ReduceFp32ToBf16 = other.GetReduceFp32ToBf16();
108  return *this;
109 }

References OptimizerOptionsOpaque::GetAllowExpandedDims(), OptimizerOptionsOpaque::GetDebugEnabled(), OptimizerOptionsOpaque::GetDebugToFileEnabled(), OptimizerOptionsOpaque::GetExportEnabled(), OptimizerOptionsOpaque::GetImportEnabled(), OptimizerOptionsOpaque::GetModelOptions(), OptimizerOptionsOpaque::GetProfilingEnabled(), OptimizerOptionsOpaque::GetReduceFp32ToBf16(), OptimizerOptionsOpaque::GetReduceFp32ToFp16(), and OptimizerOptionsOpaque::GetShapeInferenceMethod().

◆ SetAllowExpandedDims()

void SetAllowExpandedDims ( bool  ExpandedDimsAllowed)

Definition at line 146 of file Network.cpp.

147 {
148  p_OptimizerOptionsImpl->m_AllowExpandedDims = ExpandedDimsAllowed;
149 }

◆ SetDebugEnabled()

void SetDebugEnabled ( bool  DebugState)

Definition at line 126 of file Network.cpp.

127 {
128  p_OptimizerOptionsImpl->m_Debug = DebugState;
129 }

◆ SetDebugToFileEnabled()

void SetDebugToFileEnabled ( bool  DebugFileState)

Definition at line 131 of file Network.cpp.

132 {
133  p_OptimizerOptionsImpl->m_DebugToFile = DebugFileState;
134 }

◆ SetExportEnabled()

void SetExportEnabled ( bool  ExportState)

Definition at line 116 of file Network.cpp.

117 {
118  p_OptimizerOptionsImpl->m_ExportEnabled = ExportState;
119 }

◆ SetImportEnabled()

void SetImportEnabled ( bool  ImportState)
Examples
CustomMemoryAllocatorSample.cpp.

Definition at line 111 of file Network.cpp.

112 {
113  p_OptimizerOptionsImpl->m_ImportEnabled = ImportState;
114 }

◆ SetProfilingEnabled()

void SetProfilingEnabled ( bool  ProfilingState)

Definition at line 121 of file Network.cpp.

122 {
123  p_OptimizerOptionsImpl->m_ProfilingEnabled = ProfilingState;
124 }

Referenced by ArmnnDriverImpl::PrepareArmnnModel(), and ArmnnDriverImpl::PrepareArmnnModelFromCache().

◆ SetReduceFp32ToFp16()

void SetReduceFp32ToFp16 ( bool  ReduceFp32ToFp16State)

Definition at line 136 of file Network.cpp.

137 {
138  p_OptimizerOptionsImpl->m_ReduceFp32ToFp16 = ReduceFp32ToFp16State;
139 }

Referenced by ArmnnDriverImpl::PrepareArmnnModel(), and ArmnnDriverImpl::PrepareArmnnModelFromCache().

◆ SetShapeInferenceMethod()

void SetShapeInferenceMethod ( armnn::ShapeInferenceMethod  ShapeInferenceMethodType)

Definition at line 141 of file Network.cpp.

142 {
143  p_OptimizerOptionsImpl->m_shapeInferenceMethod = ShapeInferenceMethodType;
144 }

◆ ToString()

const std::string ToString ( ) const

Definition at line 206 of file Network.cpp.

207 {
208  std::stringstream stream;
209  stream << "OptimizerOptions: \n";
210  stream << "\tReduceFp32ToFp16: " << p_OptimizerOptionsImpl->m_ReduceFp32ToFp16 << "\n";
211  stream << "\tReduceFp32ToBf16: " << p_OptimizerOptionsImpl->m_ReduceFp32ToBf16 << "\n";
212  stream << "\tDebug: " << p_OptimizerOptionsImpl->m_Debug << "\n";
213  stream << "\tDebug to file: " << p_OptimizerOptionsImpl->m_DebugToFile << "\n";
214  stream << "\tShapeInferenceMethod: " <<
215  (p_OptimizerOptionsImpl->m_shapeInferenceMethod == ShapeInferenceMethod::ValidateOnly ?
216  "ValidateOnly" : "InferAndValidate") << "\n";
217  stream << "\tImportEnabled: " << p_OptimizerOptionsImpl->m_ImportEnabled << "\n";
218  stream << "\tExportEnabled: " << p_OptimizerOptionsImpl->m_ExportEnabled << "\n";
219  stream << "\tProfilingEnabled: " << p_OptimizerOptionsImpl->m_ProfilingEnabled << "\n";
220  stream << "\tAllowExpandedDims: " << p_OptimizerOptionsImpl->m_AllowExpandedDims << "\n";
221 
222  stream << "\tModelOptions: \n";
223  for (auto optionsGroup : p_OptimizerOptionsImpl->m_ModelOptions)
224  {
225  for (size_t i=0; i < optionsGroup.GetOptionCount(); i++)
226  {
227  const armnn::BackendOptions::BackendOption option = optionsGroup.GetOption(i);
228  stream << "\t\tBackend: " << optionsGroup.GetBackendId() << "\n"
229  << "\t\t\tOption: " << option.GetName() << "\n"
230  << "\t\t\tValue: " << std::string(option.GetValue().ToString()) << "\n";
231  }
232  }
233 
234  return stream.str();
235 }

References BackendOptions::BackendOption::GetName(), BackendOptions::BackendOption::GetValue(), BackendOptions::Var::ToString(), and armnn::ValidateOnly.

Referenced by armnn::Optimize().


The documentation for this class was generated from the following files:
armnn::BackendOptions::BackendOption::GetName
std::string GetName() const
Definition: BackendOptions.hpp:251
armnn::BackendOptions::BackendOption::GetValue
Var GetValue() const
Definition: BackendOptions.hpp:252
armnn::BackendOptions::BackendOption
Definition: BackendOptions.hpp:215
armnn::BackendOptions::Var::ToString
std::string ToString()
Definition: BackendOptions.hpp:124
armnn::ShapeInferenceMethod::ValidateOnly
@ ValidateOnly
Validate all output shapes.