ArmNN
 24.08
CanonicalUtils.hpp File Reference
#include <armnn/ArmNN.hpp>
#include <CpuExecutor.h>
#include <nnapi/OperandTypes.h>
#include <nnapi/Result.h>
#include <nnapi/Types.h>
#include <vector>
#include <string>
#include <fstream>
#include <iomanip>
Include dependency graph for CanonicalUtils.hpp:
This graph shows which files directly or indirectly include this file:

Go to the source code of this file.

Classes

class  UnsupportedOperand< OperandType >
 

Namespaces

 armnn_driver
 Helper classes.
 

Functions

void SwizzleAndroidNn4dTensorToArmNn (armnn::TensorInfo &tensor, const void *input, void *output, const armnn::PermutationVector &mappings)
 Swizzles tensor data in input according to the dimension mappings. More...
 
void * GetMemoryFromPool (DataLocation location, const std::vector< android::nn::RunTimePoolInfo > &memPools)
 Returns a pointer to a specific location in a pool`. More...
 
void * GetMemoryFromPointer (const Request::Argument &requestArg)
 
armnn::TensorInfo GetTensorInfoForOperand (const Operand &operand)
 
std::string GetOperandSummary (const Operand &operand)
 
bool isQuantizedOperand (const OperandType &operandType)
 
std::string GetModelSummary (const Model &model)
 
template<typename TensorType >
void DumpTensor (const std::string &dumpDir, const std::string &requestName, const std::string &tensorName, const TensorType &tensor)
 
void DumpJsonProfilingIfRequired (bool gpuProfilingEnabled, const std::string &dumpDir, armnn::NetworkId networkId, const armnn::IProfiler *profiler)
 
std::string ExportNetworkGraphToDotFile (const armnn::IOptimizedNetwork &optimizedNetwork, const std::string &dumpDir)
 
std::string SerializeNetwork (const armnn::INetwork &network, const std::string &dumpDir, std::vector< uint8_t > &dataCacheData, bool dataCachingActive)
 
void RenameExportedFiles (const std::string &existingSerializedFileName, const std::string &existingDotFileName, const std::string &dumpDir, const armnn::NetworkId networkId)
 
void RenameFile (const std::string &existingName, const std::string &extension, const std::string &dumpDir, const armnn::NetworkId networkId)
 
bool IsDynamicTensor (const armnn::TensorInfo &outputInfo)
 Checks if a tensor info represents a dynamic tensor. More...
 
bool AreDynamicTensorsSupported (void)
 Checks for ArmNN support of dynamic tensors. More...
 
std::string GetFileTimestamp ()
 
OutputShape ComputeShape (const armnn::TensorInfo &info)
 
void CommitPools (std::vector<::android::nn::RunTimePoolInfo > &memPools)