ArmNN
 24.08
IOptimizedNetwork Class Reference

#include <INetwork.hpp>

Public Member Functions

Status PrintGraph ()
 
Status SerializeToDot (std::ostream &stream) const
 
arm::pipe::ProfilingGuid GetGuid () const
 
size_t GetNumInputs () const
 
size_t GetNumOutputs () const
 
void ExecuteStrategy (IStrategy &strategy) const
 
 IOptimizedNetwork (const IOptimizedNetwork &other, const ModelOptions &modelOptions)
 Creates a copy of the IOptimizedNetwork. More...
 
 IOptimizedNetwork (std::unique_ptr< Graph > graph)
 
 IOptimizedNetwork (std::unique_ptr< OptimizedNetworkImpl > impl)
 
 ~IOptimizedNetwork ()
 
const std::shared_ptr< IProfiler > & GetProfiler () const
 

Static Public Member Functions

static void Destroy (IOptimizedNetwork *network)
 

Protected Member Functions

 IOptimizedNetwork (std::unique_ptr< Graph > graph, const ModelOptions &modelOptions)
 

Protected Attributes

std::unique_ptr< OptimizedNetworkImplpOptimizedNetworkImpl
 

Friends

class LoadedNetwork
 
class experimental::AsyncNetworkImpl
 
class experimental::WorkingMemHandle
 
GraphGetGraphForTesting (IOptimizedNetwork *optNetPtr)
 
ModelOptionsGetModelOptionsForTesting (IOptimizedNetwork *optNetPtr)
 
IOptimizedNetworkPtr Optimize (const INetwork &inNetwork, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptionsOpaque &options, Optional< std::vector< std::string > & > messages)
 Create an optimized version of the network. More...
 
IOptimizedNetworkPtr Optimize (const Graph &inGraph, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptionsOpaque &options, Optional< std::vector< std::string > & > messages)
 Create an optimized version of the network. More...
 

Detailed Description

Definition at line 908 of file INetwork.hpp.

Constructor & Destructor Documentation

◆ IOptimizedNetwork() [1/4]

IOptimizedNetwork ( const IOptimizedNetwork other,
const ModelOptions modelOptions 
)

Creates a copy of the IOptimizedNetwork.

The IOptimizedNetwork will not be reoptimized, the provided ModelOptions will only be used when creating a LoadedNetwork.

Definition at line 692 of file Network.cpp.

693  : pOptimizedNetworkImpl(new OptimizedNetworkImpl(*other.pOptimizedNetworkImpl.get(), modelOptions)) {}

◆ IOptimizedNetwork() [2/4]

IOptimizedNetwork ( std::unique_ptr< Graph graph)

Definition at line 695 of file Network.cpp.

696  : pOptimizedNetworkImpl(new OptimizedNetworkImpl(std::move(graph))) {}

◆ IOptimizedNetwork() [3/4]

IOptimizedNetwork ( std::unique_ptr< OptimizedNetworkImpl impl)

Definition at line 698 of file Network.cpp.

699  : pOptimizedNetworkImpl(std::move(impl)) {}

◆ ~IOptimizedNetwork()

~IOptimizedNetwork ( )
default

◆ IOptimizedNetwork() [4/4]

IOptimizedNetwork ( std::unique_ptr< Graph graph,
const ModelOptions modelOptions 
)
protected

Definition at line 701 of file Network.cpp.

702  : pOptimizedNetworkImpl(new OptimizedNetworkImpl(std::move(graph), modelOptions)) {}

Member Function Documentation

◆ Destroy()

void Destroy ( IOptimizedNetwork network)
static

Definition at line 706 of file Network.cpp.

707 {
708  delete network;
709 }

◆ ExecuteStrategy()

void ExecuteStrategy ( IStrategy strategy) const

Definition at line 3135 of file Network.cpp.

3136 {
3137  pOptimizedNetworkImpl->ExecuteStrategy(strategy);
3138 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ GetGuid()

arm::pipe::ProfilingGuid GetGuid ( ) const

Definition at line 726 of file Network.cpp.

727 {
728  return pOptimizedNetworkImpl->GetGuid();
729 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ GetNumInputs()

size_t GetNumInputs ( ) const

Definition at line 731 of file Network.cpp.

732 {
733  return pOptimizedNetworkImpl->GetNumInputs();
734 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ GetNumOutputs()

size_t GetNumOutputs ( ) const

Definition at line 736 of file Network.cpp.

737 {
738  return pOptimizedNetworkImpl->GetNumOutputs();
739 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ GetProfiler()

const std::shared_ptr< IProfiler > & GetProfiler ( ) const

Definition at line 721 of file Network.cpp.

722 {
723  return pOptimizedNetworkImpl->GetGraph().GetProfiler();
724 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ PrintGraph()

Status PrintGraph ( )

Definition at line 711 of file Network.cpp.

712 {
713  return pOptimizedNetworkImpl->PrintGraph();
714 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ SerializeToDot()

Status SerializeToDot ( std::ostream &  stream) const

Definition at line 716 of file Network.cpp.

717 {
718  return pOptimizedNetworkImpl->SerializeToDot(stream);
719 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

Referenced by armnn_driver::ExportNetworkGraphToDotFile().

Friends And Related Function Documentation

◆ experimental::AsyncNetworkImpl

friend class experimental::AsyncNetworkImpl
friend

Definition at line 935 of file INetwork.hpp.

◆ experimental::WorkingMemHandle

friend class experimental::WorkingMemHandle
friend

Definition at line 936 of file INetwork.hpp.

◆ GetGraphForTesting

Graph& GetGraphForTesting ( IOptimizedNetwork optNetPtr)
friend

Definition at line 49 of file TestUtils.cpp.

50 {
51  return optNet->pOptimizedNetworkImpl->GetGraph();
52 }

◆ GetModelOptionsForTesting

ModelOptions& GetModelOptionsForTesting ( IOptimizedNetwork optNetPtr)
friend

Definition at line 54 of file TestUtils.cpp.

55 {
56  return optNet->pOptimizedNetworkImpl->GetModelOptions();
57 }

◆ LoadedNetwork

friend class LoadedNetwork
friend

Definition at line 933 of file INetwork.hpp.

◆ Optimize [1/2]

IOptimizedNetworkPtr Optimize ( const Graph inGraph,
const std::vector< BackendId > &  backendPreferences,
const IDeviceSpec deviceSpec,
const OptimizerOptionsOpaque options,
Optional< std::vector< std::string > & >  messages = EmptyOptional() 
)
friend

Create an optimized version of the network.

Parameters
inGraphGraph to be optimized.
backendPreferencesThe choice of the backend ordered by user preferences.
deviceSpecDeviceSpec object as queried from the runtime. See IRuntime::GetDeviceSpec()
messagesIf there are failures or warnings a string describing same will be added to the vector
optionsOptimizerOptions object with optimizer configuration options
Returns
An IOptimizedNetworkPtr interface to the optimized network, throws an exception derived from armnn::Exception if process fails.

Definition at line 1906 of file Network.cpp.

1911 {
1912  ARMNN_LOG(debug) << options.ToString();
1913 
1914  // Enable profiling
1915  auto profiler = inGraph.GetProfiler();
1917  profiler->EnableProfiling(options.GetProfilingEnabled());
1918 
1919  // Some backends don't play well together. Check here before continuing.
1920  {
1921  std::set<BackendId> backendSet(backendPreferences.begin(), backendPreferences.end());
1922  // GpuFsa cannot co-exist with GpuAcc.
1923  if (backendSet.find("GpuFsa") != backendSet.end() &&
1924  backendSet.find("GpuAcc") != backendSet.end())
1925  {
1926  throw InvalidArgumentException("The backends \"GpuAcc\" and \"GpuFsa\" cannot be specified "
1927  "for the same optimized network.");
1928  }
1929  }
1930 
1932  if (backendPreferences.empty())
1933  {
1934  throw InvalidArgumentException("Invoked Optimize with no backends specified");
1935  }
1936 
1937  if (options.GetReduceFp32ToBf16())
1938  {
1939  throw InvalidArgumentException("BFloat16 optimization is currently ignored. In order to use Bf16 optimization "
1940  "Please use the FastMathEnabled backend option for CpuAcc or GpuAcc.");
1941  }
1942 
1943  if (options.GetReduceFp32ToFp16() && options.GetReduceFp32ToBf16())
1944  {
1945  throw InvalidArgumentException("BFloat16 and Float16 optimization cannot be enabled at the same time.");
1946  }
1947 
1948  // Ensure TensorInfo is set on all output slots of ConstantLayers in the graph
1949  inGraph.VerifyConstantLayerSetTensorInfo();
1950 
1951  std::unique_ptr<Graph> graph = std::make_unique<Graph>(inGraph);
1952 
1953  // We need to pass on the information about whether import and export is enabled to the LoadNetwork phase.
1954  // The mechanism to do that is to add model options to the optimized network.
1955  armnn::BackendOptions importExport("Global",
1956  {{"ImportEnabled", options.GetImportEnabled()},
1957  {"ExportEnabled", options.GetExportEnabled()}});
1958  ModelOptions optimizedOptions(options.GetModelOptions());
1959  optimizedOptions.push_back(importExport);
1960 
1961  auto optNet = IOptimizedNetworkPtr(new IOptimizedNetwork(std::move(graph), optimizedOptions),
1963 
1964  IOptimizedNetwork* optNetObjPtr = optNet.get();
1965 
1966  // Get the optimized graph
1967  Graph& optGraph = optNetObjPtr->pOptimizedNetworkImpl->GetGraph();
1968 
1969  if(options.GetShapeInferenceMethod() == ShapeInferenceMethod::InferAndValidate)
1970  {
1971  // Infer the tensor infos for all output slots. Throws an exception on failure
1972  optGraph.InferTensorInfos();
1973  }
1974 
1975  using namespace optimizations;
1976  // Substitute Max + Min with Bounded Relu before AddBroadcastReshapeLayer optimisation,
1977  // as Bounded ReLu needs the constants to be 1D size 1
1979 
1980  // Perform BroadcastToOptimizationLayer before AddBroadcastReshapeLayer optimisation
1982 
1984 
1985  if(options.GetShapeInferenceMethod() == ShapeInferenceMethod::ValidateOnly)
1986  {
1987  // Validate the tensor infos for all output slots. Throws an exception on failure
1988  optGraph.InferTensorInfos();
1989  }
1990 
1991  // Group Constant Layer optimizations together where possible.
1992  // This is important as:
1993  // FusePermuteIntoConstantLayer must happen before FoldPadIntoDepthwiseConvolution2d and
1994  // FuseBatchNormIntoDepthwiseConvolution2D.
1995  // ConvertConstDequantisationLayersToConstLayers must happen before FoldPadIntoConvolution2d
1998  // Perform optimisation passes
2004  MovePermuteUp(),
2005  MoveTransposeUp(),
2006  PermuteAsReshape(),
2019 
2020  // Initialize backend settings
2021  BackendSettings backendSettings(backendPreferences, deviceSpec);
2022  auto availablePreferredBackends = backendSettings.GetAvailablePreferredBackends();
2023  if (availablePreferredBackends.empty())
2024  {
2025  std::stringstream failureMsg;
2026  failureMsg << "None of the preferred backends " << backendPreferences
2027  << " are supported. Current platform provides " << backendSettings.m_SupportedBackends;
2028  ReportError(failureMsg.str(), messages);
2029  throw InvalidArgumentException(failureMsg.str());
2030  }
2031 
2032  // Create a map to temporarily hold initialized backend objects
2033  TensorHandleFactoryRegistry tensorHandleFactoryRegistry;
2034  BackendsMap backends = CreateSupportedBackends(tensorHandleFactoryRegistry, backendSettings);
2035 
2036  if (options.GetReduceFp32ToFp16())
2037  {
2038  bool hasFp16 = CheckFp16Support(backends, availablePreferredBackends);
2039  if (hasFp16)
2040  {
2041  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "Optimizer_ReduceFp32ToFp16");
2044  }
2045  }
2046 
2047  // Assign an available backend to each layer
2048  Graph::Iterator firstLayer = optGraph.begin();
2049  Graph::Iterator lastLayer = optGraph.end();
2050  OptimizationResult assignBackendsResult = AssignBackends(optNetObjPtr->pOptimizedNetworkImpl.get(),
2051  backendSettings,
2052  firstLayer,
2053  lastLayer,
2054  messages);
2055  if (assignBackendsResult.m_Error)
2056  {
2057  // Failed to assign a backend to each layer
2058  throw InvalidArgumentException("Failed to assign a backend to each layer");
2059  }
2060 
2063 
2064  // Apply the backend-specific optimizations
2065  OptimizationResult backendOptimizationResult = ApplyBackendOptimizations(optNetObjPtr->pOptimizedNetworkImpl.get(),
2066  backendSettings,
2067  backends,
2068  options.GetModelOptions(),
2069  messages);
2070  if (backendOptimizationResult.m_Error)
2071  {
2072  // Failed to apply the backend-specific optimizations
2073  throw InvalidArgumentException("Failed to apply the backend-specific optimizations");
2074  }
2075 
2076  // Convert constants
2077  {
2078  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "Optimizer_ConvertConstants");
2081  }
2082 
2083  // This must occur after all topological changes to the graph and any redirection of variables
2084  // If the debug flag is set, then insert a DebugLayer after each layer
2085  // Doing this after applying the backend optimizations as they might have changed some layers
2086  if (options.GetDebugEnabled() && !options.GetDebugToFileEnabled())
2087  {
2089  }
2090  else if (options.GetDebugToFileEnabled())
2091  {
2092  // Setup the output file path
2093  try
2094  {
2095 #if !defined(ARMNN_DISABLE_FILESYSTEM)
2096  auto result = armnnUtils::Filesystem::CreateDirectory("/ArmNNIntermediateLayerOutputs");
2097  ARMNN_LOG(info) << "Intermediate tensors will be written to: " << result;
2098 #endif
2100  }
2101  catch (const armnn::RuntimeException& e)
2102  {
2103  // If we cannot create the output directory then we'll issue a warning and continue.
2104  ARMNN_LOG(warning) << "Unable to print intermediate layer outputs : " << e.what();
2105  }
2106  }
2107 
2108  // Calculate the compatibility strategies for tensor handles
2109  OptimizationResult strategyResult = SelectTensorHandleStrategy(optGraph,
2110  backends,
2111  tensorHandleFactoryRegistry,
2112  options.GetImportEnabled(),
2113  options.GetExportEnabled(),
2114  messages);
2115 
2116  if (strategyResult.m_Error)
2117  {
2118  // Failed to apply the backend-specific optimizations
2120  }
2121 
2122  // Based on the tensor handle strategy determined above, insert copy layers where required.
2123  {
2124  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "Optimizer_AddCompatibilityLayers");
2125  optGraph.AddCompatibilityLayers(backends, tensorHandleFactoryRegistry);
2126  }
2127 
2128  return optNet;
2129 }

◆ Optimize [2/2]

IOptimizedNetworkPtr Optimize ( const INetwork inNetwork,
const std::vector< BackendId > &  backendPreferences,
const IDeviceSpec deviceSpec,
const OptimizerOptionsOpaque options = OptimizerOptionsOpaque(),
Optional< std::vector< std::string > & >  messages = EmptyOptional() 
)
friend

Create an optimized version of the network.

Parameters
networkINetwork description of the network to be optimized.
backendPreferencesThe choice of the backend ordered by user preferences.
deviceSpecDeviceSpec object as queried from the runtime. See IRuntime::GetDeviceSpec()
messagesIf there are failures or warnings a string describing same will be added to the vector
optionsOptimizerOptions object with optimizer configuration options
Returns
An IOptimizedNetworkPtr interface to the optimized network, throws an exception derived from armnn::Exception if process fails.

Definition at line 2145 of file Network.cpp.

2150 {
2151  return Optimize(inNetwork.pNetworkImpl->GetGraph(),
2152  backendPreferences,
2153  deviceSpec,
2154  options,
2155  messages);
2156 }

Member Data Documentation

◆ pOptimizedNetworkImpl


The documentation for this class was generated from the following files:
armnn::optimizations::InsertDebugToFileLayer
OptimizeForType< Layer, AddDebugToFileImpl > InsertDebugToFileLayer
Definition: AddDebug.hpp:54
armnn::IOptimizedNetworkPtr
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:340
armnn::ApplyBackendOptimizations
OptimizationResult ApplyBackendOptimizations(OptimizedNetworkImpl *optNetObjPtr, BackendSettings &backendSettings, BackendsMap &backends, const ModelOptions &modelOptions, Optional< std::vector< std::string > & > errMessages)
Definition: Network.cpp:1320
armnn::Compute::Undefined
@ Undefined
armnn::optimizations::InsertDebugLayer
OptimizeForType< Layer, AddDebugImpl > InsertDebugLayer
Definition: AddDebug.hpp:53
armnn::optimizations::FuseBatchNormIntoConvolution2DFloat32
OptimizeForExclusiveConnection< Convolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< Convolution2dLayer, armnn::DataType::Float32 > > FuseBatchNormIntoConvolution2DFloat32
Definition: FuseBatchNorm.hpp:222
armnn::ProfilerManager::RegisterProfiler
void RegisterProfiler(IProfiler *profiler)
Definition: Profiling.cpp:609
armnn::optimizations::OptimizeInversePermutes
OptimizeForConnection< PermuteLayer, PermuteLayer, OptimizeInversePermutesImpl< PermuteLayer > > OptimizeInversePermutes
Definition: OptimizeInversePermutes.hpp:43
armnn::optimizations::TransposeAndBatchToSpaceAsDepthToSpace
OptimizeForConnection< TransposeLayer, BatchToSpaceNdLayer, PermuteAndBatchToSpaceAsDepthToSpaceImpl< TransposeLayer > > TransposeAndBatchToSpaceAsDepthToSpace
Definition: PermuteAndBatchToSpaceAsDepthToSpace.hpp:104
armnn::optimizations::FoldPadIntoPooling2d
OptimizeForExclusiveConnection< PadLayer, Pooling2dLayer, pad_fold::FoldPadIntoPooling2dImpl > FoldPadIntoPooling2d
Definition: FoldPadIntoLayer2d.hpp:283
armnn::optimizations::Fp32NetworkToFp16Converter
OptimizeForType< Layer, ConvertFp32NetworkToFp16Impl > Fp32NetworkToFp16Converter
Definition: ConvertFp32NetworkToFp16.hpp:87
armnn::optimizations::FoldPadIntoConvolution2d
OptimizeForExclusiveConnection< PadLayer, Convolution2dLayer, pad_fold::FoldPadIntoConvolution2dImpl > FoldPadIntoConvolution2d
Definition: FoldPadIntoLayer2d.hpp:277
armnn::optimizations::ConvertConstDequantisationLayersToConstLayers
OptimizeForConnection< ConstantLayer, DequantizeLayer, ConvertConstDequantisationLayersToConstLayersImpl > ConvertConstDequantisationLayersToConstLayers
Definition: ConvertConstDequantisationLayersToConstLayers.hpp:173
armnn::optimizations::MoveTransposeUp
OptimizeForConnection< Layer, TransposeLayer, MoveTransposeUpImpl > MoveTransposeUp
Definition: MoveTransposeUp.hpp:83
armnn::AssignBackends
OptimizationResult AssignBackends(OptimizedNetworkImpl *optNetObjPtr, BackendSettings &backendSettings, Graph::Iterator &firstLayer, Graph::Iterator &lastLayer, Optional< std::vector< std::string > & > errMessages)
Definition: Network.cpp:1179
armnn::optimizations::BroadcastToOptimizationLayer
OptimizeForType< BroadcastToLayer, DeleteBroadcastToImpl > BroadcastToOptimizationLayer
Definition: DeleteBroadcastTo.hpp:38
armnn::Graph::Iterator
LayerList::const_iterator Iterator
Definition: Graph.hpp:53
armnn::optimizations::PermuteAsReshape
OptimizeForType< PermuteLayer, PermuteAsReshapeImpl > PermuteAsReshape
Definition: PermuteAsReshape.hpp:66
armnn::optimizations::PermuteAndBatchToSpaceAsDepthToSpace
OptimizeForConnection< PermuteLayer, BatchToSpaceNdLayer, PermuteAndBatchToSpaceAsDepthToSpaceImpl< PermuteLayer > > PermuteAndBatchToSpaceAsDepthToSpace
Definition: PermuteAndBatchToSpaceAsDepthToSpace.hpp:102
armnn::SelectTensorHandleStrategy
OptimizationResult SelectTensorHandleStrategy(Graph &optGraph, BackendsMap &backends, TensorHandleFactoryRegistry &registry, bool importEnabled, bool exportEnabled, Optional< std::vector< std::string > & > errMessages)
Definition: Network.cpp:1821
armnn::optimizations::MovePermuteUp
OptimizeForConnection< Layer, PermuteLayer, MovePermuteUpImpl > MovePermuteUp
Definition: MovePermuteUp.hpp:83
armnn::optimizations::OptimizeInverseConversionsFp16
OptimizeForConnection< ConvertFp16ToFp32Layer, ConvertFp32ToFp16Layer, OptimizeInverseConversionsImpl > OptimizeInverseConversionsFp16
Definition: OptimizeInverseConversions.hpp:42
armnn::Exception::what
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
ARMNN_LOG
#define ARMNN_LOG(severity)
Definition: Logging.hpp:212
armnn::CreateSupportedBackends
BackendsMap CreateSupportedBackends(TensorHandleFactoryRegistry &handleFactoryRegistry, BackendSettings &backendSettings)
Definition: Network.cpp:1302
armnn::optimizations::ConvertConstantsFloatToHalf
ConvertConstants< Float32ToFloat16, IsFloat16Layer > ConvertConstantsFloatToHalf
Definition: ConvertConstants.hpp:99
ARMNN_SCOPED_PROFILING_EVENT
#define ARMNN_SCOPED_PROFILING_EVENT(backendId, name)
Definition: Profiling.hpp:220
armnn::ReportError
void ReportError(const std::string &errorMessage, Optional< std::vector< std::string > & > errorMessages)
Definition: Network.cpp:762
armnn::CheckFp16Support
bool CheckFp16Support(BackendsMap &backends, const std::vector< BackendId > &availablePreferredBackends)
Definition: Network.cpp:1026
armnn::optimizations::FusePermuteIntoConstLayer
OptimizeForConnection< ConstantLayer, PermuteLayer, ConvertConstPermuteLayersToConstLayers > FusePermuteIntoConstLayer
Definition: ConvertConstPermuteLayersToConstLayers.hpp:124
armnn::MakeOptimizations
Optimizer::Optimizations MakeOptimizations(Args &&... args)
Definition: Optimizer.hpp:43
armnn::RuntimeException
Definition: Exceptions.hpp:120
armnn::IOptimizedNetwork::Optimize
friend IOptimizedNetworkPtr Optimize(const INetwork &inNetwork, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptionsOpaque &options, Optional< std::vector< std::string > & > messages)
Create an optimized version of the network.
Definition: Network.cpp:2145
armnn::ShapeInferenceMethod::ValidateOnly
@ ValidateOnly
Validate all output shapes.
armnn::ShapeInferenceMethod::InferAndValidate
@ InferAndValidate
Infer missing output shapes and validate all output shapes.
armnn::optimizations::OptimizeInverseConversionsFp32
OptimizeForConnection< ConvertFp32ToFp16Layer, ConvertFp16ToFp32Layer, OptimizeInverseConversionsImpl > OptimizeInverseConversionsFp32
Definition: OptimizeInverseConversions.hpp:44
armnn::BackendOptions
Struct for the users to pass backend specific options.
Definition: BackendOptions.hpp:22
armnn::optimizations::TransposeAsReshape
OptimizeForType< TransposeLayer, TransposeAsReshapeImpl > TransposeAsReshape
Definition: TransposeAsReshape.hpp:77
armnn::ProfilerManager::GetInstance
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:602
armnn::optimizations::FuseBatchNormIntoConvolution2DFloat16
OptimizeForExclusiveConnection< Convolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< Convolution2dLayer, armnn::DataType::Float16 > > FuseBatchNormIntoConvolution2DFloat16
Definition: FuseBatchNorm.hpp:227
armnn::IOptimizedNetwork::Destroy
static void Destroy(IOptimizedNetwork *network)
Definition: Network.cpp:706
armnn::optimizations::FuseBatchNormIntoDepthwiseConvolution2DFloat16
OptimizeForExclusiveConnection< DepthwiseConvolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< DepthwiseConvolution2dLayer, armnn::DataType::Float16 > > FuseBatchNormIntoDepthwiseConvolution2DFloat16
Definition: FuseBatchNorm.hpp:237
armnn::BackendsMap
std::map< BackendId, std::unique_ptr< class IBackendInternal > > BackendsMap
Definition: Network.hpp:285
armnn::optimizations::SquashEqualTransposeSiblings
OptimizeForConnection< Layer, TransposeLayer, SquashEqualSiblingsImpl< TransposeLayer > > SquashEqualTransposeSiblings
Definition: SquashEqualSiblings.hpp:69
armnn::optimizations::ConvertConstantsHalfToFloat
ConvertConstants< Float16ToFloat32, IsFloat32Layer > ConvertConstantsHalfToFloat
Definition: ConvertConstants.hpp:98
armnn::IOptimizedNetwork::IOptimizedNetwork
IOptimizedNetwork(const IOptimizedNetwork &other, const ModelOptions &modelOptions)
Creates a copy of the IOptimizedNetwork.
Definition: Network.cpp:692
armnn::optimizations::SquashEqualPermuteSiblings
OptimizeForConnection< Layer, PermuteLayer, SquashEqualSiblingsImpl< PermuteLayer > > SquashEqualPermuteSiblings
Definition: SquashEqualSiblings.hpp:67
armnn::Optimizer::Pass
static void Pass(Graph &graph, const Optimizations &optimizations)
Definition: Optimizer.cpp:16
armnn::ModelOptions
std::vector< BackendOptions > ModelOptions
Definition: BackendOptions.hpp:18
armnn::optimizations::FoldPadIntoDepthwiseConvolution2d
OptimizeForExclusiveConnection< PadLayer, DepthwiseConvolution2dLayer, pad_fold::FoldPadIntoDepthwiseConvolution2dImpl > FoldPadIntoDepthwiseConvolution2d
Definition: FoldPadIntoLayer2d.hpp:281
armnnUtils::Filesystem::CreateDirectory
std::string CreateDirectory(std::string sPath)
Returns full path to temporary folder.
Definition: Filesystem.cpp:47
armnn::optimizations::MaxMinIntoBoundedRelu
OptimizeForExclusiveConnection< ElementwiseBinaryLayer, ElementwiseBinaryLayer, MaxMinIntoBoundedReluImpl > MaxMinIntoBoundedRelu
Definition: MaxMinIntoBoundedRelu.hpp:134
armnn::optimizations::SquashEqualReshapeSiblings
OptimizeForConnection< Layer, ReshapeLayer, SquashEqualSiblingsImpl< ReshapeLayer > > SquashEqualReshapeSiblings
Definition: SquashEqualSiblings.hpp:70
armnn::optimizations::OptimizeInverseTransposes
OptimizeForConnection< TransposeLayer, TransposeLayer, OptimizeInversePermutesImpl< TransposeLayer > > OptimizeInverseTransposes
Definition: OptimizeInversePermutes.hpp:45
armnn::optimizations::AddBroadcastReshapeLayer
OptimizeForType< Layer, AddBroadcastReshapeLayerImpl > AddBroadcastReshapeLayer
Definition: AddBroadcastReshapeLayer.hpp:94
armnn::optimizations::FuseBatchNormIntoDepthwiseConvolution2DFloat32
OptimizeForExclusiveConnection< DepthwiseConvolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< DepthwiseConvolution2dLayer, armnn::DataType::Float32 > > FuseBatchNormIntoDepthwiseConvolution2DFloat32
Definition: FuseBatchNorm.hpp:232
armnn::optimizations::OptimizeConsecutiveReshapes
OptimizeForConnection< ReshapeLayer, ReshapeLayer, OptimizeConsecutiveReshapesImpl > OptimizeConsecutiveReshapes
Definition: OptimizeConsecutiveReshapes.hpp:61
armnn::IOptimizedNetwork::pOptimizedNetworkImpl
std::unique_ptr< OptimizedNetworkImpl > pOptimizedNetworkImpl
Definition: INetwork.hpp:953