24.02
|
Go to the documentation of this file.
84 virtual const char*
GetName()
const = 0;
105 virtual std::vector<TensorShape>
InferOutputShapes(
const std::vector<TensorShape>& inputShapes)
const = 0;
136 using ConstantTensors = std::vector<std::reference_wrapper<std::shared_ptr<ConstTensorHandle>>>;
168 OptimizerOptions(
bool reduceFp32ToFp16,
bool debug,
bool reduceFp32ToBf16,
bool importEnabled,
169 ModelOptions modelOptions = {},
bool exportEnabled =
false,
bool debugToFile =
false)
186 bool importEnabled = false,
ModelOptions modelOptions = {},
bool exportEnabled =
false,
187 bool debugToFile =
false,
bool allowExpandedDims =
false)
203 std::stringstream stream;
204 stream <<
"OptimizerOptions: \n";
207 stream <<
"\tDebug: " <<
m_Debug <<
"\n";
209 stream <<
"\tShapeInferenceMethod: " <<
211 ?
"ValidateOnly" :
"InferAndValidate") <<
"\n";
217 stream <<
"\tModelOptions: \n";
220 for (
size_t i=0; i < optionsGroup.GetOptionCount(); i++)
223 stream <<
"\t\tBackend: " << optionsGroup.GetBackendId() <<
"\n"
224 <<
"\t\t\tOption: " << option.
GetName() <<
"\n"
284 ModelOptions modelOptions = {},
bool exportEnabled =
false,
bool debugToFile =
false);
288 bool importEnabled =
false,
ModelOptions modelOptions = {},
bool exportEnabled =
false,
289 bool debugToFile =
false,
bool allowExpandedDims =
false);
333 std::unique_ptr<armnn::OptimizerOptionsOpaqueImpl> p_OptimizerOptionsImpl;
368 const char* name =
nullptr);
380 const char* name =
nullptr);
390 const char* name =
nullptr);
397 const char* name =
nullptr);
404 const char* name =
nullptr);
411 const char* name =
nullptr);
418 const char* name =
nullptr);
432 const char* name =
nullptr);
439 const char* name =
nullptr);
446 const char* name =
nullptr);
453 const char* name =
nullptr);
478 const char* name =
nullptr);
486 const char* name =
nullptr);
493 const char* name =
nullptr);
500 const char* name =
nullptr);
507 const char* name =
nullptr);
514 const char* name =
nullptr);
525 const char* name =
nullptr);
532 const char* name =
nullptr);
539 const char* name =
nullptr);
554 const char* name =
nullptr);
564 const char* name =
nullptr);
595 const char* name =
nullptr);
607 const char* name =
nullptr);
614 const char* name =
nullptr);
621 const char* name =
nullptr);
629 const char* name =
nullptr);
636 const char* name =
nullptr);
646 const char* name =
nullptr);
653 const char* name =
nullptr);
665 const char* name =
nullptr);
672 const char* name =
nullptr);
693 const char* name =
nullptr);
726 const char* name =
nullptr);
738 const char* name =
nullptr);
751 const char* name =
nullptr);
777 const char* name =
nullptr);
784 const char* name =
nullptr);
791 const char* name =
nullptr);
800 const char* name =
nullptr);
807 const char* name =
nullptr);
816 const char* name =
nullptr);
823 const char* name =
nullptr);
832 const char* name =
nullptr);
839 const char* name =
nullptr);
846 const char* name =
nullptr);
858 const char* name =
nullptr);
865 const char* name =
nullptr);
876 const std::vector<BackendId>& backendPreferences,
879 Optional<std::vector<std::string>&> messages);
881 const std::vector<BackendId>& backendPreferences,
884 Optional<std::vector<std::string>&> messages);
891 namespace experimental
893 class AsyncNetworkImpl;
897 struct BackendSettings;
898 struct OptimizationResult;
899 class OptimizedNetworkImpl;
909 arm::pipe::ProfilingGuid
GetGuid()
const;
923 const std::shared_ptr<IProfiler>&
GetProfiler()
const;
934 const std::vector<BackendId>& backendPreferences,
937 Optional<std::vector<std::string>&> messages);
939 const std::vector<BackendId>& backendPreferences,
942 Optional<std::vector<std::string>&> messages);
959 const std::vector<BackendId>& backendPreferences,
974 const std::vector<BackendId>& backendPreferences,
981 const std::vector<BackendId>& backendPreferences,
988 const std::vector<BackendId>& backendPreferences,
IConnectableLayer * AddReshapeLayer(const ReshapeDescriptor &reshapeDescriptor, const char *name=nullptr)
Adds a reshape layer to the network.
A BatchNormalizationDescriptor for the BatchNormalizationLayer.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
IConnectableLayer * AddReverseV2Layer(const char *name=nullptr)
Add a ReverseV2 layer to the network.
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
void ExecuteStrategy(IStrategy &strategy) const
virtual void SetBackendId(const BackendId &id)=0
Set the backend of the IConnectableLayer.
void SetExportEnabled(bool ExportState)
IConnectableLayer * AddConstantLayer(const ConstTensor &input, const char *name=nullptr)
Adds a layer with no inputs and a single output, which always corresponds to the passed in constant t...
void ExecuteStrategy(IStrategy &strategy) const
A ViewsDescriptor for the SplitterLayer.
const std::shared_ptr< IProfiler > & GetProfiler() const
An ActivationDescriptor for the ActivationLayer.
IConnectableLayer * AddQLstmLayer(const QLstmDescriptor &descriptor, const LstmInputParams ¶ms, const char *name=nullptr)
Add a QLstm layer to the network.
A FullyConnectedDescriptor for the FullyConnectedLayer.
const std::string ToString() const
IConnectableLayer * AddCastLayer(const char *name=nullptr)
Adds a cast layer to the network.
IConnectableLayer * AddReduceLayer(const ReduceDescriptor &reduceDescriptor, const char *name=nullptr)
Adds a reduce layer to the network.
IConnectableLayer * AddMaximumLayer(const char *name=nullptr)
Add a Maximum layer to the network.
bool m_ImportEnabled
Enable Import.
A QLstmDescriptor for the QLstmLayer.
virtual unsigned int GetNumInputSlots() const =0
Returns the number of connectable input slots.
IConnectableLayer * AddAdditionLayer(const char *name=nullptr)
Adds an addition layer to the network.
std::string GetName() const
OptimizerOptionsOpaque & operator=(OptimizerOptionsOpaque other)
IConnectableLayer * AddSliceLayer(const SliceDescriptor &sliceDescriptor, const char *name=nullptr)
Adds a slice layer to the network.
A Pooling3dDescriptor for the Pooling3dLayer.
A ResizeDescriptor for the ResizeLayer.
An ArgMinMaxDescriptor for ArgMinMaxLayer.
virtual const char * GetName() const =0
Returns the name of the layer.
An InstanceNormalizationDescriptor for InstanceNormalizationLayer.
virtual const BaseDescriptor & GetParameters() const =0
If the layer has a descriptor return it.
A GatherDescriptor for the GatherLayer.
IConnectableLayer * AddMeanLayer(const MeanDescriptor &meanDescriptor, const char *name=nullptr)
Add a Mean layer to the network.
IConnectableLayer * AddDequantizeLayer(const char *name=nullptr)
Adds a Dequantize layer to the network.
size_t GetNumInputs() const
IConnectableLayer * AddTileLayer(const TileDescriptor &descriptor, const char *name=nullptr)
Add a Tile layer to the network.
A L2NormalizationDescriptor for the L2NormalizationLayer.
void SetReduceFp32ToFp16(bool ReduceFp32ToFp16State)
IConnectableLayer * AddSpaceToBatchNdLayer(const SpaceToBatchNdDescriptor &spaceToBatchNdDescriptor, const char *name=nullptr)
Adds a space to batch layer to the network.
IConnectableLayer * AddL2NormalizationLayer(const L2NormalizationDescriptor &desc, const char *name=nullptr)
Adds an L2 normalization layer to the network.
virtual void Disconnect(IInputSlot &slot)=0
Status SerializeToDot(std::ostream &stream) const
A NormalizationDescriptor for the NormalizationLayer.
arm::pipe::ProfilingGuid GetGuid() const
bool GetExportEnabled() const
virtual LayerGuid GetGuid() const =0
Returns the unique id of the layer.
IConnectableLayer * AddComparisonLayer(const ComparisonDescriptor &comparisonDescriptor, const char *name=nullptr)
Add a Comparison layer to the network.
~OptimizerOptionsOpaque()
A ChannelShuffleDescriptor for the ChannelShuffle operator.
IConnectableLayer * AddFullyConnectedLayer(const FullyConnectedDescriptor &fullyConnectedDescriptor, const char *name=nullptr)
Adds a fully connected layer to the network.
std::vector< std::reference_wrapper< std::shared_ptr< ConstTensorHandle > >> ConstantTensors
IConnectableLayer * AddBatchMatMulLayer(const BatchMatMulDescriptor &descriptor, const char *name=nullptr)
Add a BatchMatMul layer to the network.
armnn::ModelOptions GetModelOptions() const
IConnectableLayer * AddLogSoftmaxLayer(const LogSoftmaxDescriptor &logSoftmaxDescriptor, const char *name=nullptr)
Adds a log softmax layer to the network.
friend class TestConnectionPreservation
bool GetImportEnabled() const
A StackDescriptor for the StackLayer.
virtual std::vector< TensorShape > InferOutputShapes(const std::vector< TensorShape > &inputShapes) const =0
Infer the shape of the output(s) based on the provided input shape(s)
virtual const TensorInfo & GetTensorInfo() const =0
bool GetReduceFp32ToBf16() const
static INetwork * CreateRaw(const NetworkOptions &networkOptions={})
bool m_ReduceFp32ToFp16
Reduces all Fp32 operators in the model to Fp16 for faster processing.
IConnectableLayer * AddPreluLayer(const char *name=nullptr)
Adds a PReLU layer to the network.
bool GetReduceFp32ToFp16() const
friend Graph & GetGraphForTesting(IOptimizedNetwork *optNetPtr)
IConnectableLayer * AddOutputLayer(LayerBindingId id, const char *name=nullptr)
Adds an output layer to the network.
virtual unsigned int GetNumOutputSlots() const =0
Returns the number of connectable output slots.
IConnectableLayer * AddStridedSliceLayer(const StridedSliceDescriptor &stridedSliceDescriptor, const char *name=nullptr)
Adds a strided slice layer to the network.
IConnectableLayer * AddFusedLayer(const FusedDescriptor &fusedDescriptor, const char *name=nullptr)
Adds a Fused layer to the network.
IConnectableLayer * AddSoftmaxLayer(const SoftmaxDescriptor &softmaxDescriptor, const char *name=nullptr)
Adds a softmax layer to the network.
IConnectableLayer * AddPermuteLayer(const PermuteDescriptor &permuteDescriptor, const char *name=nullptr)
Adds a permute layer to the network.
friend void VisitLayersTopologically(const INetwork *inputNetwork, IStrategy &strategy)
IConnectableLayer * AddTransposeLayer(const TransposeDescriptor &transposeDescriptor, const char *name=nullptr)
Adds a transpose layer to the network.
armnn::ShapeInferenceMethod GetShapeInferenceMethod() const
std::vector< std::reference_wrapper< const std::shared_ptr< ConstTensorHandle > >> ImmutableConstantTensors
void SetShapeInferenceMethod(armnn::ShapeInferenceMethod ShapeInferenceMethodType)
A ElementwiseBinaryDescriptor for the ElementwiseBinaryLayer.
IConnectableLayer * AddStackLayer(const StackDescriptor &descriptor, const char *name=nullptr)
Adds a stack layer to the network.
ShapeInferenceMethod m_shapeInferenceMethod
Infer output size when not available.
IConnectableLayer * AddPooling3dLayer(const Pooling3dDescriptor &pooling3dDescriptor, const char *name=nullptr)
Adds a 3D pooling layer to the network.
std::vector< BackendOptions > NetworkOptions
An output connection slot for a layer.
A FusedDescriptor for the FusedLayer.
IConnectableLayer * AddUnidirectionalSequenceLstmLayer(const UnidirectionalSequenceLstmDescriptor &descriptor, const LstmInputParams ¶ms, const char *name=nullptr)
Add a UnidirectionalSequenceLstm layer to the network.
IConnectableLayer * AddDepthToSpaceLayer(const DepthToSpaceDescriptor &depthToSpaceDescriptor, const char *name=nullptr)
Adds a depth to space layer to the network.
IConnectableLayer * AddMergeLayer(const char *name=nullptr)
Adds a merge layer to the network.
virtual const IInputSlot * GetConnection(unsigned int index) const =0
IConnectableLayer * AddSpaceToDepthLayer(const SpaceToDepthDescriptor &spaceToDepthDescriptor, const char *name=nullptr)
Adds a space to depth layer to the network.
void AddModelOption(armnn::BackendOptions)
IConnectableLayer * AddPrecompiledLayer(const PreCompiledDescriptor &preCompiledDescriptor, CompiledBlobPtr compiledBlobPtr, const Optional< BackendId > &backend, const char *name=nullptr)
Adds a Precompiled layer to the network.
A PadDescriptor for the PadLayer.
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
A TransposeDescriptor for the TransposeLayer.
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
A SliceDescriptor for the SliceLayer.
IConnectableLayer * AddSplitterLayer(const ViewsDescriptor &splitterDescriptor, const char *name=nullptr)
Adds a splitter layer to the network.
IConnectableLayer * AddShapeLayer(const char *name=nullptr)
Adds a shape layer to the network.
IConnectableLayer * AddBatchNormalizationLayer(const BatchNormalizationDescriptor &desc, const ConstTensor &mean, const ConstTensor &variance, const ConstTensor &beta, const ConstTensor &gamma, const char *name=nullptr)
Adds a batch normalization layer to the network.
A ReshapeDescriptor for the ReshapeLayer.
bool m_Debug
Add debug data for easier troubleshooting.
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
bool m_ExportEnabled
Enable Export.
IConnectableLayer * AddMinimumLayer(const char *name=nullptr)
Add a Minimum layer to the network.
IConnectableLayer * AddConvolution2dLayer(const Convolution2dDescriptor &convolution2dDescriptor, const char *name=nullptr)
Adds a 2D convolution layer to the network.
IConnectableLayer * AddDetectionPostProcessLayer(const DetectionPostProcessDescriptor &descriptor, const ConstTensor &anchors, const char *name=nullptr)
Adds a Detection PostProcess layer to the network.
A PermuteDescriptor for the PermuteLayer.
A BatchMatMulDescriptor for the BatchMatMul operator.
IConnectableLayer * AddLogicalBinaryLayer(const LogicalBinaryDescriptor &descriptor, const char *name=nullptr)
Adds a Logical Binary layer to the network.
virtual LayerGuid GetOwningLayerGuid() const =0
virtual LayerType GetType() const =0
Returns the armnn::LayerType of this layer.
IConnectableLayer * AddDivisionLayer(const char *name=nullptr)
Adds a division layer to the network.
bool GetDebugToFileEnabled() const
A SpaceToBatchNdDescriptor for the SpaceToBatchNdLayer.
IConnectableLayer * AddRankLayer(const char *name=nullptr)
Adds a rank layer to the network.
IConnectableLayer * AddPadLayer(const PadDescriptor &padDescriptor, const char *name=nullptr)
Adds a fully pad layer to the network.
A Convolution3dDescriptor for the Convolution3dLayer.
Base class for all descriptors.
IConnectableLayer * AddLstmLayer(const LstmDescriptor &descriptor, const LstmInputParams ¶ms, const char *name=nullptr)
Add a Lstm layer to the network.
virtual void BackendSelectionHint(Optional< BackendId > backend)=0
Provide a hint for the optimizer as to which backend to prefer for this layer.
Private implementation of INetwork.
friend class experimental::AsyncNetworkImpl
size_t GetNumOutputs() const
bool m_ProfilingEnabled
Enable profiling dump of the optimizer phase.
friend IOptimizedNetworkPtr Optimize(const INetwork &inNetwork, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptionsOpaque &options, Optional< std::vector< std::string > & > messages)
Create an optimized version of the network.
void SetAllowExpandedDims(bool ExpandedDimsAllowed)
IConnectableLayer * AddStandInLayer(const StandInDescriptor &descriptor, const char *name=nullptr)
Add a stand-in layer for a type unknown to the Arm NN framework.
arm::pipe::ProfilingGuid LayerGuid
Define LayerGuid type.
bool GetDebugEnabled() const
IConnectableLayer * AddSwitchLayer(const char *name=nullptr)
Adds a switch layer to the network.
IConnectableLayer * AddElementwiseBinaryLayer(const ElementwiseBinaryDescriptor &elementwiseBinaryDescriptor, const char *name=nullptr)
Add an ElementwiseBinary layer to the network.
@ ValidateOnly
Validate all output shapes.
IConnectableLayer * AddFillLayer(const FillDescriptor &fillDescriptor, const char *name=nullptr)
Add an Fill layer to the network.
A BatchToSpaceNdDescriptor for the BatchToSpaceNdLayer.
A Convolution2dDescriptor for the Convolution2dLayer.
A ComparisonDescriptor for the ComparisonLayer.
A FillDescriptor for the FillLayer.
IConnectableLayer * AddSubtractionLayer(const char *name=nullptr)
Adds a subtraction layer to the network.
bool m_ReduceFp32ToBf16
@Note This feature has been replaced by enabling Fast Math in compute library backend options.
friend TensorInfo GetInputTensorInfo(const INetwork *network)
A StandInDescriptor for the StandIn layer.
ModelOptions m_ModelOptions
Enable Model Options.
IConnectableLayer * AddQuantizedLstmLayer(const QuantizedLstmInputParams ¶ms, const char *name=nullptr)
Add a QuantizedLstm layer to the network.
virtual ConstantTensors GetConstantTensorsByRef()=0
virtual const IConnectableLayer & GetOwningIConnectableLayer() const =0
virtual void ExecuteStrategy(IStrategy &strategy) const =0
Apply a visitor to this layer.
Struct for the users to pass backend specific options.
An LstmDescriptor for the LstmLayer.
A StridedSliceDescriptor for the StridedSliceLayer.
IConnectableLayer * AddTransposeConvolution2dLayer(const TransposeConvolution2dDescriptor &descriptor, const ConstTensor &weights, const Optional< ConstTensor > &biases, const char *name=nullptr)
Adds a 2D transpose convolution layer to the network.
INetwork(NetworkOptions networkOptions={})
virtual int Connect(IInputSlot &destination)=0
virtual bool IsTensorInfoSet() const =0
A LogicalBinaryDescriptor for the LogicalBinaryLayer.
bool GetAllowExpandedDims() const
IConnectableLayer * AddDepthwiseConvolution2dLayer(const DepthwiseConvolution2dDescriptor &convolution2dDescriptor, const char *name=nullptr)
Adds a 2D depthwise convolution layer to the network.
IConnectableLayer * AddPooling2dLayer(const Pooling2dDescriptor &pooling2dDescriptor, const char *name=nullptr)
Adds a 2D pooling layer to the network.
friend IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options, Optional< std::vector< std::string > & > messages)
Accept legacy OptimizerOptions.
IConnectableLayer * AddNormalizationLayer(const NormalizationDescriptor &normalizationDescriptor, const char *name=nullptr)
Adds a normalization layer to the network.
const std::string ToString() const
#define ARMNN_DEPRECATED_MSG_REMOVAL_DATE(message, removed_in_release)
IConnectableLayer * AddQuantizeLayer(const char *name=nullptr)
Add a quantize layer to the network.
static void Destroy(INetwork *network)
std::unique_ptr< NetworkImpl > pNetworkImpl
static void Destroy(IOptimizedNetwork *network)
virtual unsigned int CalculateIndexOnOwner() const =0
An OriginsDescriptor for the ConcatLayer.
virtual unsigned int GetNumConnections() const =0
bool GetProfilingEnabled() const
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
Copyright (c) 2021 ARM Limited and Contributors.
A ElementwiseUnaryDescriptor for the ElementwiseUnaryLayer.
A TransposeConvolution2dDescriptor for the TransposeConvolution2dLayer.
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
void SetProfilingEnabled(bool ProfilingState)
~IOutputSlot()
Not user deletable.
IOptimizedNetwork(const IOptimizedNetwork &other, const ModelOptions &modelOptions)
Creates a copy of the IOptimizedNetwork.
IConnectableLayer * AddConcatLayer(const ConcatDescriptor &concatDescriptor, const char *name=nullptr)
Adds a concatenation layer to the network.
void SetDebugToFileEnabled(bool DebugFileState)
bool m_DebugToFile
Pass debug data to separate output files for easier troubleshooting.
IConnectableLayer * AddMultiplicationLayer(const char *name=nullptr)
Adds a multiplication layer to the network.
IConnectableLayer * AddConvolution3dLayer(const Convolution3dDescriptor &convolution3dDescriptor, const char *name=nullptr)
Adds a 3D convolution layer to the network.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Device specific knowledge to be passed to the optimizer.
std::vector< BackendOptions > ModelOptions
IConnectableLayer * AddElementwiseUnaryLayer(const ElementwiseUnaryDescriptor &elementwiseUnaryDescriptor, const char *name=nullptr)
Add an ElementwiseUnary layer to the network.
A PreCompiledDescriptor for the PreCompiledLayer.
IConnectableLayer * AddChannelShuffleLayer(const ChannelShuffleDescriptor &descriptor, const char *name=nullptr)
Add a ChannelShuffle layer to the network.
IConnectableLayer * AddInstanceNormalizationLayer(const InstanceNormalizationDescriptor &desc, const char *name=nullptr)
Adds an instance normalization layer to the network.
IConnectableLayer * AddFloorLayer(const char *name=nullptr)
Adds a floor layer to the network.
A Pooling2dDescriptor for the Pooling2dLayer.
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptionsOpaque &options=OptimizerOptionsOpaque(), Optional< std::vector< std::string > & > messages=EmptyOptional())
Create an optimized version of the network.
A DepthwiseConvolution2dDescriptor for the DepthwiseConvolution2dLayer.
ShapeInferenceMethod
The ShapeInferenceMethod modify how the output shapes are treated.
bool m_AllowExpandedDims
When calculating tensor sizes, dimensions of size == 1 will be ignored.
IConnectableLayer * AddGatherLayer(const GatherDescriptor &descriptor, const char *name=nullptr)
Add Gather layer to the network.
A ReduceDescriptor for the REDUCE operators.
std::function< void(const void *)> CompiledBlobDeleter
IConnectableLayer * AddBroadcastToLayer(const BroadcastToDescriptor &descriptor, const char *name=nullptr)
Add a BroadcastTo layer to the network.
static INetworkPtr Create(const NetworkOptions &networkOptions={})
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below.
A MeanDescriptor for the MeanLayer.
std::unique_ptr< void, CompiledBlobDeleter > CompiledBlobPtr
void SetDebugEnabled(bool DebugState)
IConnectableLayer * AddBatchToSpaceNdLayer(const BatchToSpaceNdDescriptor &batchToSpaceNdDescriptor, const char *name=nullptr)
Adds a batch to space ND layer to the network.
A SoftmaxDescriptor for the SoftmaxLayer.
IConnectableLayer * AddResizeLayer(const ResizeDescriptor &resizeDescriptor, const char *name=nullptr)
Adds a resize layer to the network.
IConnectableLayer * AddInputLayer(LayerBindingId id, const char *name=nullptr)
Adds an input layer to the network.
A SpaceToDepthDescriptor for the SpaceToDepthLayer.
IConnectableLayer * AddArgMinMaxLayer(const ArgMinMaxDescriptor &desc, const char *name=nullptr)
Adds an ArgMinMax layer to the network.
std::unique_ptr< OptimizedNetworkImpl > pOptimizedNetworkImpl
IConnectableLayer * AddGatherNdLayer(const char *name=nullptr)
Add GatherNd layer to the network.
IConnectableLayer * AddActivationLayer(const ActivationDescriptor &activationDescriptor, const char *name=nullptr)
Adds an activation layer to the network.
~IConnectableLayer()
Objects are not deletable via the handle.
Main network class which provides the interface for building up a neural network.
friend ModelOptions & GetModelOptionsForTesting(IOptimizedNetwork *optNetPtr)
void SetImportEnabled(bool ImportState)